repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
bowlofstew/Impala | testdata/bin/cache_tables.py | 15 | 2845 | #!/usr/bin/env impala-python
#
# This script will warm up the buffer cache with the tables required to run the input
# query. This only works on a mini-dfs cluster. This is remarkably difficult to do
# since hdfs which tries to hide the details of the block locations from users.
# The only way to do this is to
# 1. use the java APIs (deprecated, of course) to extract the block ids.
# 2. find the files with those block ids on the file system and read them
#
# First run testdata/bin/generate-block-ids.sh. This will output the block locations
# to testdata/block-ids. This file is good as long as the mini-dfs cluster does not
# get new files. If the block-ids file is not there, this script will run
# generate-block-ids.sh.
#
# Run this script, passing it the query and it will go read every replica of every
# block of every table in the query.
import math
import os
import re
import sys
import subprocess
import tempfile
from optparse import OptionParser
# Options
parser = OptionParser()
parser.add_option("-q", "--query", dest="query", default = "",
help="Query to run. If none specified, runs all queries.")
(options, args) = parser.parse_args()
block_ids_file = 'testdata/block-ids'
data_node_root = os.environ['MINI_DFS_BASE_DATA_DIR'] + '/dfs/data'
block_ids = {}
# Parse the block ids file to all the block ids for all the tables
# the format of the file is:
# <table name>: <block_id1> <block_id2> <etc>
def parse_block_ids():
full_path = os.environ['IMPALA_HOME'] + "/" + block_ids_file;
if not os.path.isfile(full_path):
cmd = os.environ['IMPALA_HOME'] + '/testdata/bin/generate-block-ids.sh'
os.system(cmd)
if not os.path.isfile(full_path):
raise Exception("Could not find/generate block id files: " + full_path)
f = open(full_path);
for line in f:
tokens = line.split(':')
blocks = tokens[1].strip().split(' ')
block_ids[tokens[0].strip()] = blocks
# Parse for the tables used in this query
def parse_tables(query):
table_predecessor = ['from', 'join']
tokens = query.split(' ')
tables = []
next_is_table = False
for t in tokens:
t = t.lower()
if next_is_table:
tables.append(t)
next_is_table = False
if t in table_predecessor:
next_is_table = True
return tables
# Warm the buffer cache by cat-ing all the blocks to /dev/null
def warm_buffer_cache(table):
if table not in block_ids:
raise Exception("Table not found: " + table)
blocks = block_ids[table]
for block in blocks:
cmd = 'find %s -type f -name blk_%s* -exec cat {} > /dev/null \;' % \
(data_node_root, block)
os.system(cmd)
tables = parse_tables(options.query)
parse_block_ids()
if len(tables) == 0:
raise Exception("Could not parse tables in: " + options.query)
for table in tables:
warm_buffer_cache(table)
| apache-2.0 |
nhoffman/opiates | opiate/utils.py | 1 | 2361 | from collections import Iterable
import os
from os import path
import shutil
import logging
from __init__ import __version__
log = logging.getLogger(__name__)
def flatten(seq):
"""
Poached from http://stackoverflow.com/questions/2158395/flatten-an-irregular-list-of-lists-in-python
Don't flatten strings or dict-like objects.
"""
for el in seq:
if isinstance(el, Iterable) and not (isinstance(el, basestring) or hasattr(el, 'get')):
for sub in flatten(el):
yield sub
else:
yield el
def get_outfile(args, label = None, ext = None, include_version = True):
"""
Return a file-like object open for writing. `args` is expected to
have attributes 'infile' (None or a string specifying a file
path), 'outfile' (None or a file-like object open for writing),
and 'outdir' (None or a string defining a dir-path). If
`args.outfilr` is None, the name of the outfile is derived from
the basename of `args.infile` and is written either in the same
directory or in `args.outdir` if provided.
"""
version = __version__ if include_version else None
if args.outfile is None:
dirname, basename = path.split(args.infile)
parts = filter(lambda x: x,
[path.splitext(basename)[0], version, label, ext])
outname = path.join(args.outdir or dirname,
'.'.join(parts))
if path.abspath(outname) == path.abspath(args.infile):
raise OSError('Input and output file names are identical')
outfile = open(outname, 'w')
else:
outfile = args.outfile
if not (hasattr(outfile, 'write') and not outfile.closed and 'w' in outfile.mode):
raise OSError('`args.outfile` must be a file-like object open for writing')
log.debug(outfile)
return outfile
def mkdir(dirpath, clobber = False):
"""
Create a (potentially existing) directory without errors. Raise
OSError if directory can't be created. If clobber is True, remove
dirpath if it exists.
"""
if clobber:
shutil.rmtree(dirpath, ignore_errors = True)
try:
os.mkdir(dirpath)
except OSError, msg:
pass
if not path.exists(dirpath):
raise OSError('Failed to create %s' % dirpath)
return dirpath
| gpl-3.0 |
Seekatar/pcg | Games/FineControl.py | 1 | 2743 | import base
import datetime
import random
from FixedRandomGame import FixedRandomGame as __base
# use __base, otherwise when searching for games, FixedRandomGame shows up multiple times
class FineControl(__base):
"""
Touch four plates in patterns as fast as you can.
Level 1: tight, clockwise 5 times
Level 2: tight, anti clockwise 5 times
Level 3: tight, repeat 4 each: clockwise, anticlockwise, diagonal1, diagonal2
Level 4: wide, clockwise 5 times
Level 5: wide, anti clockwise 5 times
Level 6: wide, repeat 4 each: clockwise, anticlockwise, diagonal1, diagonal2
"""
def GameInfo():
"""
return tuple of (name,desc,levels,author,date,version)
"""
return ("FineControl",
"Tight patterns of plates",
6, #levels
"Jim Wallace",
datetime.date(2015,6,19),
'0.1')
GameInfo = staticmethod(GameInfo)
# patterns
_clockwise = (1,2,5,4)
_anticlockwise = _clockwise[::-1] #reverse
_diagonal1 = (1,5)
_diagonal2 = (2,4)
_wclockwise = (1,3,9,7)
_wanticlockwise = _wclockwise[::-1] #reverse
_wdiagonal1 = (1,9)
_wdiagonal2 = (3,7)
def __init__(self):
super(FineControl,self).__init__()
self._timeout_sec = 10
self._interval_sec = 0
self._pattern = None
self._pattern_index = -1
self.LOOP_CNT = 0
def initialize(self,hardware,user,level):
"""
Initialize
"""
super(FineControl,self).initialize(hardware,user,level)
if self.level == 1:
self._pattern = FineControl._clockwise*5
elif self.level == 2:
self._pattern = FineControl._anticlockwise*5
elif self.level == 3:
repeat = 4
self._pattern = FineControl._clockwise*repeat+FineControl._anticlockwise*repeat+FineControl._diagonal1*repeat+FineControl._diagonal2*repeat
elif self.level == 4:
self._pattern = FineControl._wclockwise*5
elif self.level == 5:
self._pattern = FineControl._wanticlockwise*5
else:
repeat = 4
self._pattern = FineControl._wclockwise*repeat+FineControl._wanticlockwise*repeat+FineControl._wdiagonal1*repeat+FineControl._wdiagonal2*repeat
# index for next plate
self._pattern_index = -1
self.LOOP_CNT = len(self._pattern)
def get_next_plate(self):
"""
override to change number of plates, etc.
"""
self._pattern_index += 1
return self._pattern[self._pattern_index]
| mit |
avinassh/praw | praw/errors.py | 6 | 14613 | # This file is part of PRAW.
#
# PRAW is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# PRAW. If not, see <http://www.gnu.org/licenses/>.
"""
Error classes.
Includes two main exceptions: ClientException, when something goes
wrong on our end, and APIExeception for when something goes wrong on the
server side. A number of classes extend these two main exceptions for more
specific exceptions.
"""
from __future__ import print_function, unicode_literals
import inspect
import six
import sys
class PRAWException(Exception):
"""The base PRAW Exception class.
Ideally, this can be caught to handle any exception from PRAW.
"""
class ClientException(PRAWException):
"""Base exception class for errors that don't involve the remote API."""
def __init__(self, message=None):
"""Construct a ClientException.
:param message: The error message to display.
"""
if not message:
message = 'Clientside error'
super(ClientException, self).__init__()
self.message = message
def __str__(self):
"""Return the message of the error."""
return self.message
class OAuthScopeRequired(ClientException):
"""Indicates that an OAuth2 scope is required to make the function call.
The attribute `scope` will contain the name of the necessary scope.
"""
def __init__(self, function, scope, message=None):
"""Contruct an OAuthScopeRequiredClientException.
:param function: The function that requires a scope.
:param scope: The scope required for the function.
:param message: A custom message to associate with the
exception. Default: `function` requires the OAuth2 scope `scope`
"""
if not message:
message = '`{0}` requires the OAuth2 scope `{1}`'.format(function,
scope)
super(OAuthScopeRequired, self).__init__(message)
self.scope = scope
class LoginRequired(ClientException):
"""Indicates that a logged in session is required.
This exception is raised on a preemptive basis, whereas NotLoggedIn occurs
in response to a lack of credentials on a privileged API call.
"""
def __init__(self, function, message=None):
"""Construct a LoginRequired exception.
:param function: The function that requires login-based authentication.
:param message: A custom message to associate with the exception.
Default: `function` requires a logged in session
"""
if not message:
message = '`{0}` requires a logged in session'.format(function)
super(LoginRequired, self).__init__(message)
class LoginOrScopeRequired(OAuthScopeRequired, LoginRequired):
"""Indicates that either a logged in session or OAuth2 scope is required.
The attribute `scope` will contain the name of the necessary scope.
"""
def __init__(self, function, scope, message=None):
"""Construct a LoginOrScopeRequired exception.
:param function: The function that requires authentication.
:param scope: The scope that is required if not logged in.
:param message: A custom message to associate with the exception.
Default: `function` requires a logged in session or the OAuth2
scope `scope`
"""
if not message:
message = ('`{0}` requires a logged in session or the '
'OAuth2 scope `{1}`').format(function, scope)
super(LoginOrScopeRequired, self).__init__(function, scope, message)
class ModeratorRequired(LoginRequired):
"""Indicates that a moderator of the subreddit is required."""
def __init__(self, function):
"""Construct a ModeratorRequired exception.
:param function: The function that requires moderator access.
"""
message = ('`{0}` requires a moderator '
'of the subreddit').format(function)
super(ModeratorRequired, self).__init__(message)
class ModeratorOrScopeRequired(LoginOrScopeRequired, ModeratorRequired):
"""Indicates that a moderator of the sub or OAuth2 scope is required.
The attribute `scope` will contain the name of the necessary scope.
"""
def __init__(self, function, scope):
"""Construct a ModeratorOrScopeRequired exception.
:param function: The function that requires moderator authentication or
a moderator scope..
:param scope: The scope that is required if not logged in with
moderator access..
"""
message = ('`{0}` requires a moderator of the subreddit or the '
'OAuth2 scope `{1}`').format(function, scope)
super(ModeratorOrScopeRequired, self).__init__(function, scope,
message)
class OAuthAppRequired(ClientException):
"""Raised when an OAuth client cannot be initialized.
This occurs when any one of the OAuth config values are not set.
"""
class HTTPException(PRAWException):
"""Base class for HTTP related exceptions."""
def __init__(self, _raw, message=None):
"""Construct a HTTPException.
:params _raw: The internal request library response object. This object
is mapped to attribute `_raw` whose format may change at any time.
"""
if not message:
message = 'HTTP error'
super(HTTPException, self).__init__()
self._raw = _raw
self.message = message
def __str__(self):
"""Return the message of the error."""
return self.message
class Forbidden(HTTPException):
"""Raised when the user does not have permission to the entity."""
class NotFound(HTTPException):
"""Raised when the requested entity is not found."""
class InvalidComment(PRAWException):
"""Indicate that the comment is no longer available on reddit."""
ERROR_TYPE = 'DELETED_COMMENT'
def __str__(self):
"""Return the message of the error."""
return self.ERROR_TYPE
class InvalidSubmission(PRAWException):
"""Indicates that the submission is no longer available on reddit."""
ERROR_TYPE = 'DELETED_LINK'
def __str__(self):
"""Return the message of the error."""
return self.ERROR_TYPE
class InvalidSubreddit(PRAWException):
"""Indicates that an invalid subreddit name was supplied."""
ERROR_TYPE = 'SUBREDDIT_NOEXIST'
def __str__(self):
"""Return the message of the error."""
return self.ERROR_TYPE
class RedirectException(PRAWException):
"""Raised when a redirect response occurs that is not expected."""
def __init__(self, request_url, response_url, message=None):
"""Construct a RedirectException.
:param request_url: The url requested.
:param response_url: The url being redirected to.
:param message: A custom message to associate with the exception.
"""
if not message:
message = ('Unexpected redirect '
'from {0} to {1}').format(request_url, response_url)
super(RedirectException, self).__init__()
self.request_url = request_url
self.response_url = response_url
self.message = message
def __str__(self):
"""Return the message of the error."""
return self.message
class OAuthException(PRAWException):
"""Base exception class for OAuth API calls.
Attribute `message` contains the error message.
Attribute `url` contains the url that resulted in the error.
"""
def __init__(self, message, url):
"""Construct a OAuthException.
:param message: The message associated with the exception.
:param url: The url that resulted in error.
"""
super(OAuthException, self).__init__()
self.message = message
self.url = url
def __str__(self):
"""Return the message along with the url."""
return self.message + " on url {0}".format(self.url)
class OAuthInsufficientScope(OAuthException):
"""Raised when the current OAuth scope is not sufficient for the action.
This indicates the access token is valid, but not for the desired action.
"""
class OAuthInvalidGrant(OAuthException):
"""Raised when the code to retrieve access information is not valid."""
class OAuthInvalidToken(OAuthException):
"""Raised when the current OAuth access token is not valid."""
class APIException(PRAWException):
"""Base exception class for the reddit API error message exceptions.
All exceptions of this type should have their own subclass.
"""
def __init__(self, error_type, message, field='', response=None):
"""Construct an APIException.
:param error_type: The error type set on reddit's end.
:param message: The associated message for the error.
:param field: The input field associated with the error, or ''.
:param response: The HTTP response that resulted in the exception.
"""
super(APIException, self).__init__()
self.error_type = error_type
self.message = message
self.field = field
self.response = response
def __str__(self):
"""Return a string containing the error message and field."""
if hasattr(self, 'ERROR_TYPE'):
return '`%s` on field `%s`' % (self.message, self.field)
else:
return '(%s) `%s` on field `%s`' % (self.error_type, self.message,
self.field)
class ExceptionList(APIException):
"""Raised when more than one exception occurred."""
def __init__(self, errors):
"""Construct an ExceptionList.
:param errors: The list of errors.
"""
super(ExceptionList, self).__init__(None, None)
self.errors = errors
def __str__(self):
"""Return a string representation for all the errors."""
ret = '\n'
for i, error in enumerate(self.errors):
ret += '\tError %d) %s\n' % (i, six.text_type(error))
return ret
class AlreadySubmitted(APIException):
"""An exception to indicate that a URL was previously submitted."""
ERROR_TYPE = 'ALREADY_SUB'
class AlreadyModerator(APIException):
"""Used to indicate that a user is already a moderator of a subreddit."""
ERROR_TYPE = 'ALREADY_MODERATOR'
class BadCSS(APIException):
"""An exception to indicate bad CSS (such as invalid) was used."""
ERROR_TYPE = 'BAD_CSS'
class BadCSSName(APIException):
"""An exception to indicate a bad CSS name (such as invalid) was used."""
ERROR_TYPE = 'BAD_CSS_NAME'
class BadUsername(APIException):
"""An exception to indicate an invalid username was used."""
ERROR_TYPE = 'BAD_USERNAME'
class InvalidCaptcha(APIException):
"""An exception for when an incorrect captcha error is returned."""
ERROR_TYPE = 'BAD_CAPTCHA'
class InvalidEmails(APIException):
"""An exception for when invalid emails are provided."""
ERROR_TYPE = 'BAD_EMAILS'
class InvalidFlairTarget(APIException):
"""An exception raised when an invalid user is passed as a flair target."""
ERROR_TYPE = 'BAD_FLAIR_TARGET'
class InvalidInvite(APIException):
"""Raised when attempting to accept a nonexistent moderator invite."""
ERROR_TYPE = 'NO_INVITE_FOUND'
class InvalidUser(APIException):
"""An exception for when a user doesn't exist."""
ERROR_TYPE = 'USER_DOESNT_EXIST'
class InvalidUserPass(APIException):
"""An exception for failed logins."""
ERROR_TYPE = 'WRONG_PASSWORD'
class InsufficientCreddits(APIException):
"""Raised when there are not enough creddits to complete the action."""
ERROR_TYPE = 'INSUFFICIENT_CREDDITS'
class NotLoggedIn(APIException):
"""An exception for when a Reddit user isn't logged in."""
ERROR_TYPE = 'USER_REQUIRED'
class NotModified(APIException):
"""An exception raised when reddit returns {'error': 304}.
This error indicates that the requested content was not modified and is
being requested too frequently. Such an error usually occurs when multiple
instances of PRAW are running concurrently or in rapid succession.
"""
def __init__(self, response):
"""Construct an instance of the NotModified exception.
This error does not have an error_type, message, nor field.
"""
super(NotModified, self).__init__(None, None, response=response)
def __str__(self):
"""Return: That page has not been modified."""
return 'That page has not been modified.'
class RateLimitExceeded(APIException):
"""An exception for when something has happened too frequently.
Contains a `sleep_time` attribute for the number of seconds that must
transpire prior to the next request.
"""
ERROR_TYPE = 'RATELIMIT'
def __init__(self, error_type, message, field, response):
"""Construct an instance of the RateLimitExceeded exception.
The parameters match that of :class:`APIException`.
The `sleep_time` attribute is extracted from the response object.
"""
super(RateLimitExceeded, self).__init__(error_type, message,
field, response)
self.sleep_time = self.response['ratelimit']
class SubredditExists(APIException):
"""An exception to indicate that a subreddit name is not available."""
ERROR_TYPE = 'SUBREDDIT_EXISTS'
class UsernameExists(APIException):
"""An exception to indicate that a username is not available."""
ERROR_TYPE = 'USERNAME_TAKEN'
def _build_error_mapping():
def predicate(obj):
return inspect.isclass(obj) and hasattr(obj, 'ERROR_TYPE')
tmp = {}
for _, obj in inspect.getmembers(sys.modules[__name__], predicate):
tmp[obj.ERROR_TYPE] = obj
return tmp
ERROR_MAPPING = _build_error_mapping()
| gpl-3.0 |
yati-sagade/incubator-airflow | tests/contrib/operators/test_file_to_wasb.py | 38 | 2728 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import unittest
from airflow import DAG, configuration
from airflow.contrib.operators.file_to_wasb import FileToWasbOperator
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
class TestFileToWasbOperator(unittest.TestCase):
_config = {
'file_path': 'file',
'container_name': 'container',
'blob_name': 'blob',
'wasb_conn_id': 'wasb_default',
'retries': 3,
}
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': datetime.datetime(2017, 1, 1)
}
self.dag = DAG('test_dag_id', default_args=args)
def test_init(self):
operator = FileToWasbOperator(
task_id='wasb_operator',
dag=self.dag,
**self._config
)
self.assertEqual(operator.file_path, self._config['file_path'])
self.assertEqual(operator.container_name,
self._config['container_name'])
self.assertEqual(operator.blob_name, self._config['blob_name'])
self.assertEqual(operator.wasb_conn_id, self._config['wasb_conn_id'])
self.assertEqual(operator.load_options, {})
self.assertEqual(operator.retries, self._config['retries'])
operator = FileToWasbOperator(
task_id='wasb_operator',
dag=self.dag,
load_options={'timeout': 2},
**self._config
)
self.assertEqual(operator.load_options, {'timeout': 2})
@mock.patch('airflow.contrib.operators.file_to_wasb.WasbHook',
autospec=True)
def test_execute(self, mock_hook):
mock_instance = mock_hook.return_value
operator = FileToWasbOperator(
task_id='wasb_sensor',
dag=self.dag,
load_options={'timeout': 2},
**self._config
)
operator.execute(None)
mock_instance.load_file.assert_called_once_with(
'file', 'container', 'blob', timeout=2
)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
AZtheAsian/zulip | zerver/views/pointer.py | 31 | 1319 | from __future__ import absolute_import
from django.http import HttpRequest, HttpResponse
from django.utils.translation import ugettext as _
from typing import Text
from zerver.decorator import to_non_negative_int
from zerver.lib.actions import do_update_pointer
from zerver.lib.request import has_request_variables, JsonableError, REQ
from zerver.lib.response import json_success
from zerver.models import UserProfile, UserMessage
def get_pointer_backend(request, user_profile):
# type: (HttpRequest, UserProfile) -> HttpResponse
return json_success({'pointer': user_profile.pointer})
@has_request_variables
def update_pointer_backend(request, user_profile,
pointer=REQ(converter=to_non_negative_int)):
# type: (HttpRequest, UserProfile, int) -> HttpResponse
if pointer <= user_profile.pointer:
return json_success()
try:
UserMessage.objects.get(
user_profile=user_profile,
message__id=pointer
)
except UserMessage.DoesNotExist:
raise JsonableError(_("Invalid message ID"))
request._log_data["extra"] = "[%s]" % (pointer,)
update_flags = (request.client.name.lower() in ['android', "zulipandroid"])
do_update_pointer(user_profile, pointer, update_flags=update_flags)
return json_success()
| apache-2.0 |
achawkins/Forsteri | doc/conf.py | 1 | 9317 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Forsteri documentation build configuration file, created by
# sphinx-quickstart on Fri Apr 24 16:09:15 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Forsteri'
copyright = '2015, Andrew C. Hawkins'
author = 'Andrew C. Hawkins'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'alabaster'
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Forsteridoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Forsteri.tex', 'Forsteri Documentation',
'Andrew Hawkins', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'forsteri', 'Forsteri Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Forsteri', 'Forsteri Documentation',
author, 'Forsteri', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit |
DIRACGrid/COMDIRAC | Interfaces/scripts/dgetenv.py | 1 | 1529 | #! /usr/bin/env python
"""
print DCommands session environment variables
"""
import DIRAC
from COMDIRAC.Interfaces import critical
from COMDIRAC.Interfaces import DSession
if __name__ == "__main__":
from COMDIRAC.Interfaces import ConfigCache
from DIRAC.Core.Base import Script
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [[section.]option]' % Script.scriptName,
'Arguments:',
' section: display all options in section',
'++ OR ++',
' section.option: display section specific option',] )
)
configCache = ConfigCache()
Script.parseCommandLine( ignoreErrors = True )
configCache.cacheConfig()
args = Script.getPositionalArgs()
session = DSession( )
if not args:
retVal = session.listEnv( )
if not retVal[ "OK" ]:
print "Error:", retVal[ "Message" ]
DIRAC.exit( -1 )
for o, v in retVal[ "Value" ]:
print o + "=" + v
DIRAC.exit( 0 )
arg = args[ 0 ]
section = None
option = None
if "." in arg:
section, option = arg.split( "." )
else:
option = arg
ret = None
if section:
ret = session.get( section, option )
else:
ret = session.getEnv( option )
if not ret[ "OK" ]:
print critical( ret[ "Message" ] )
print ret[ "Value" ]
| gpl-3.0 |
nistats/nistats | examples/03_second_level_models/plot_oasis.py | 1 | 6030 | """Voxel-Based Morphometry on Oasis dataset
========================================
This example uses Voxel-Based Morphometry (VBM) to study the relationship
between aging, sex and gray matter density.
The data come from the `OASIS <http://www.oasis-brains.org/>`_ project.
If you use it, you need to agree with the data usage agreement available
on the website.
It has been run through a standard VBM pipeline (using SPM8 and
NewSegment) to create VBM maps, which we study here.
VBM analysis of aging
---------------------
We run a standard GLM analysis to study the association between age
and gray matter density from the VBM data. We use only 100 subjects
from the OASIS dataset to limit the memory usage.
Note that more power would be obtained from using a larger sample of subjects.
"""
# Authors: Bertrand Thirion, <bertrand.thirion@inria.fr>, July 2018
# Elvis Dhomatob, <elvis.dohmatob@inria.fr>, Apr. 2014
# Virgile Fritsch, <virgile.fritsch@inria.fr>, Apr 2014
# Gael Varoquaux, Apr 2014
n_subjects = 100 # more subjects requires more memory
############################################################################
# Load Oasis dataset
# ------------------
from nilearn import datasets
oasis_dataset = datasets.fetch_oasis_vbm(n_subjects=n_subjects)
gray_matter_map_filenames = oasis_dataset.gray_matter_maps
age = oasis_dataset.ext_vars['age'].astype(float)
###############################################################################
# Sex is encoded as 'M' or 'F'. Hence, we make it a binary variable.
sex = oasis_dataset.ext_vars['mf'] == b'F'
###############################################################################
# Print basic information on the dataset.
print('First gray-matter anatomy image (3D) is located at: %s' %
oasis_dataset.gray_matter_maps[0]) # 3D data
print('First white-matter anatomy image (3D) is located at: %s' %
oasis_dataset.white_matter_maps[0]) # 3D data
###############################################################################
# Get a mask image: A mask of the cortex of the ICBM template.
gm_mask = datasets.fetch_icbm152_brain_gm_mask()
###############################################################################
# Resample the images, since this mask has a different resolution.
from nilearn.image import resample_to_img
mask_img = resample_to_img(
gm_mask, gray_matter_map_filenames[0], interpolation='nearest')
#############################################################################
# Analyse data
# ------------
#
# First, we create an adequate design matrix with three columns: 'age',
# 'sex', 'intercept'.
import pandas as pd
import numpy as np
intercept = np.ones(n_subjects)
design_matrix = pd.DataFrame(np.vstack((age, sex, intercept)).T,
columns=['age', 'sex', 'intercept'])
#############################################################################
# Let's plot the design matrix.
from nistats.reporting import plot_design_matrix
ax = plot_design_matrix(design_matrix)
ax.set_title('Second level design matrix', fontsize=12)
ax.set_ylabel('maps')
##########################################################################
# Next, we specify and fit the second-level model when loading the data and also
# smooth a little bit to improve statistical behavior.
from nistats.second_level_model import SecondLevelModel
second_level_model = SecondLevelModel(smoothing_fwhm=2.0, mask_img=mask_img)
second_level_model.fit(gray_matter_map_filenames,
design_matrix=design_matrix)
##########################################################################
# Estimating the contrast is very simple. We can just provide the column
# name of the design matrix.
z_map = second_level_model.compute_contrast(second_level_contrast=[1, 0, 0],
output_type='z_score')
###########################################################################
# We threshold the second level contrast at uncorrected p < 0.001 and plot it.
from nistats.thresholding import map_threshold
from nilearn import plotting
_, threshold = map_threshold(
z_map, alpha=.05, height_control='fdr')
print('The FDR=.05-corrected threshold is: %.3g' % threshold)
display = plotting.plot_stat_map(
z_map, threshold=threshold, colorbar=True, display_mode='z',
cut_coords=[-4, 26],
title='age effect on grey matter density (FDR = .05)')
plotting.show()
###########################################################################
# We can also study the effect of sex by computing the contrast, thresholding it
# and plot the resulting map.
z_map = second_level_model.compute_contrast(second_level_contrast='sex',
output_type='z_score')
_, threshold = map_threshold(
z_map, alpha=.05, height_control='fdr')
plotting.plot_stat_map(
z_map, threshold=threshold, colorbar=True,
title='sex effect on grey matter density (FDR = .05)')
###########################################################################
# Note that there does not seem to be any significant effect of sex on
# grey matter density on that dataset.
###########################################################################
# Generating a report
# -------------------
# It can be useful to quickly generate a
# portable, ready-to-view report with most of the pertinent information.
# This is easy to do if you have a fitted model and the list of contrasts,
# which we do here.
from nistats.reporting import make_glm_report
icbm152_2009 = datasets.fetch_icbm152_2009()
report = make_glm_report(model=second_level_model,
contrasts=['age', 'sex'],
bg_img=icbm152_2009['t1'],
)
#########################################################################
# We have several ways to access the report:
# report # This report can be viewed in a notebook
# report.save_as_html('report.html')
# report.open_in_browser()
| bsd-3-clause |
saltstar/spark | examples/src/main/python/ml/standard_scaler_example.py | 128 | 1594 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import StandardScaler
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("StandardScalerExample")\
.getOrCreate()
# $example on$
dataFrame = spark.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
scaler = StandardScaler(inputCol="features", outputCol="scaledFeatures",
withStd=True, withMean=False)
# Compute summary statistics by fitting the StandardScaler
scalerModel = scaler.fit(dataFrame)
# Normalize each feature to have unit standard deviation.
scaledData = scalerModel.transform(dataFrame)
scaledData.show()
# $example off$
spark.stop()
| apache-2.0 |
spring-week-topos/horizon-week | openstack_dashboard/dashboards/project/images/urls.py | 5 | 1396 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import include # noqa
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from openstack_dashboard.dashboards.project.images.images \
import urls as image_urls
from openstack_dashboard.dashboards.project.images.snapshots \
import urls as snapshot_urls
from openstack_dashboard.dashboards.project.images import views
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'', include(image_urls, namespace='images')),
url(r'', include(snapshot_urls, namespace='snapshots')),
)
| apache-2.0 |
RPGOne/Skynet | pytorch-master/torch/nn/modules/upsampling.py | 1 | 3469 | from numbers import Integral
from .module import Module
from .. import functional as F
from .utils import _pair
class _UpsamplingBase(Module):
def __init__(self, size=None, scale_factor=None):
super(_UpsamplingBase, self).__init__()
if size is None and scale_factor is None:
raise ValueError('either size or scale_factor should be defined')
if scale_factor is not None and not isinstance(scale_factor, Integral):
raise ValueError('scale_factor must be of integer type')
self.size = _pair(size)
self.scale_factor = scale_factor
def __repr__(self):
if self.scale_factor is not None:
info = 'scale_factor=' + str(self.scale_factor)
else:
info = 'size=' + str(self.size)
return self.__class__.__name__ + '(' + info + ')'
class UpsamplingNearest2d(_UpsamplingBase):
"""
Applies a 2D nearest neighbor upsampling to an input signal composed of several input
channels.
To specify the scale, it takes either the :attr:`size` or the :attr:`scale_factor`
as it's constructor argument.
When `size` is given, it is the output size of the image (h, w).
Args:
size (tuple, optional): a tuple of ints (H_out, W_out) output sizes
scale_factor (int, optional): the multiplier for the image height / width
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = floor(H_{in} * scale\_factor)`
:math:`W_{out} = floor(W_{in} * scale\_factor)`
Examples::
>>> inp
Variable containing:
(0 ,0 ,.,.) =
1 2
3 4
[torch.FloatTensor of size 1x1x2x2]
>>> m = nn.UpsamplingNearest2d(scale_factor=2)
>>> m(inp)
Variable containing:
(0 ,0 ,.,.) =
1 1 2 2
1 1 2 2
3 3 4 4
3 3 4 4
[torch.FloatTensor of size 1x1x4x4]
"""
def forward(self, input):
return F.upsample_nearest(input, self.size, self.scale_factor)
class UpsamplingBilinear2d(_UpsamplingBase):
"""
Applies a 2D bilinear upsampling to an input signal composed of several input
channels.
To specify the scale, it takes either the :attr:`size` or the :attr:`scale_factor`
as it's constructor argument.
When `size` is given, it is the output size of the image (h, w).
Args:
size (tuple, optional): a tuple of ints (H_out, W_out) output sizes
scale_factor (int, optional): the multiplier for the image height / width
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = floor(H_{in} * scale\_factor)`
:math:`W_{out} = floor(W_{in} * scale\_factor)`
Examples::
>>> inp
Variable containing:
(0 ,0 ,.,.) =
1 2
3 4
[torch.FloatTensor of size 1x1x2x2]
>>> m = nn.UpsamplingBilinear2d(scale_factor=2)
>>> m(inp)
Variable containing:
(0 ,0 ,.,.) =
1.0000 1.3333 1.6667 2.0000
1.6667 2.0000 2.3333 2.6667
2.3333 2.6667 3.0000 3.3333
3.0000 3.3333 3.6667 4.0000
[torch.FloatTensor of size 1x1x4x4]
"""
def forward(self, input):
return F.upsample_bilinear(input, self.size, self.scale_factor)
| bsd-3-clause |
python-ivi/python-ivi | ivi/agilent/agilentMSOX91304A.py | 2 | 1692 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2017 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent90000 import *
class agilentMSOX91304A(agilent90000):
"Agilent Infiniium MSOX91304A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'MSOX91304A')
super(agilentMSOX91304A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 16
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 13e9
self._init_channels()
| mit |
sdague/home-assistant | tests/components/vacuum/test_device_action.py | 15 | 3063 | """The tests for Vacuum device actions."""
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.vacuum import DOMAIN
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
from tests.common import (
MockConfigEntry,
assert_lists_same,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
async def test_get_actions(hass, device_reg, entity_reg):
"""Test we get the expected actions from a vacuum."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_actions = [
{
"domain": DOMAIN,
"type": "clean",
"device_id": device_entry.id,
"entity_id": "vacuum.test_5678",
},
{
"domain": DOMAIN,
"type": "dock",
"device_id": device_entry.id,
"entity_id": "vacuum.test_5678",
},
]
actions = await async_get_device_automations(hass, "action", device_entry.id)
assert_lists_same(actions, expected_actions)
async def test_action(hass):
"""Test for turn_on and turn_off actions."""
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event_dock"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "vacuum.entity",
"type": "dock",
},
},
{
"trigger": {"platform": "event", "event_type": "test_event_clean"},
"action": {
"domain": DOMAIN,
"device_id": "abcdefgh",
"entity_id": "vacuum.entity",
"type": "clean",
},
},
]
},
)
dock_calls = async_mock_service(hass, "vacuum", "return_to_base")
clean_calls = async_mock_service(hass, "vacuum", "start")
hass.bus.async_fire("test_event_dock")
await hass.async_block_till_done()
assert len(dock_calls) == 1
assert len(clean_calls) == 0
hass.bus.async_fire("test_event_clean")
await hass.async_block_till_done()
assert len(dock_calls) == 1
assert len(clean_calls) == 1
| apache-2.0 |
mihaip/NewsBlur | vendor/feedvalidator/demo/src/tests/testXmlEncodingDecode.py | 16 | 5356 | #!/usr/bin/python
"""$Id: testXmlEncodingDecode.py 710 2006-10-13 00:57:33Z josephw $"""
__author__ = "Joseph Walton <http://www.kafsemo.org/>"
__version__ = "$Revision: 710 $"
__date__ = "$Date: 2006-10-13 00:57:33 +0000 (Fri, 13 Oct 2006) $"
__copyright__ = "Copyright (c) 2004 Joseph Walton"
import os, sys
curdir = os.path.abspath(os.path.dirname(sys.argv[0]))
srcdir = os.path.split(curdir)[0]
if srcdir not in sys.path:
sys.path.insert(0, srcdir)
basedir = os.path.split(srcdir)[0]
import unittest
from feedvalidator import xmlEncoding
from feedvalidator.logging import *
ctAX='application/xml'
class TestDecode(unittest.TestCase):
def _assertEqualUnicode(self, a, b):
self.assertNotEqual(a, None, 'Decoded strings should not equal None')
self.assertEqual(type(a), unicode, 'Decoded strings should be Unicode (was ' + str(type(a)) + ')')
self.assertEqual(type(b), unicode, 'Test suite error: test strings must be Unicode')
self.assertEqual(a, b)
def testProvidedEncoding(self):
loggedEvents=[]
(encoding, decoded) = xmlEncoding.decode(ctAX, 'UTF-8', '<x/>', loggedEvents)
self.assertEquals('UTF-8', encoding)
self._assertEqualUnicode(decoded, u'<x/>')
self.assertEqual(loggedEvents, [])
loggedEvents=[]
(encoding, decoded) = xmlEncoding.decode(ctAX, 'UTF-8', '<?xml version="1.0" encoding="utf-8"?><x/>', loggedEvents)
self.assertEquals('UTF-8', encoding)
self._assertEqualUnicode(decoded, u'<?xml version="1.0" encoding="utf-8"?><x/>')
self.assertEquals(loggedEvents, [])
def testNoDeclarationOrBOM(self):
loggedEvents=[]
self.assertEquals(xmlEncoding.decode(ctAX, None, '<x/>', loggedEvents)[-1], None)
self.assertEquals(len(loggedEvents), 1)
self.assertEquals(loggedEvents[0].__class__, MissingEncoding, "Must warn if there's no clue as to encoding")
# This document is currently detected as UTF-8, rather than None.
#
# def testMissingEncodingDeclaration(self):
# loggedEvents=[]
# self._assertEqualUnicode(xmlEncoding.decode(ctAX, None, '<?xml version="1.0"?><x/>', loggedEvents), u'<?xml version="1.0"?><x/>')
# self.assertEquals(len(loggedEvents), 1)
# self.assertEquals(loggedEvents[0].__class__, MissingEncoding, "Must warn if there's no clue as to encoding")
def testJustDeclaration(self):
loggedEvents=[]
(encoding, decoded) = xmlEncoding.decode(ctAX, None, '<?xml version="1.0" encoding="utf-8"?><x/>', loggedEvents)
self.assertEquals(encoding, 'utf-8')
self._assertEqualUnicode(decoded, u'<?xml version="1.0" encoding="utf-8"?><x/>')
self.assertEquals(loggedEvents, [])
def testSupplyUnknownEncoding(self):
loggedEvents=[]
self.assertEquals(xmlEncoding.decode(ctAX, 'X-FAKE', '<x/>', loggedEvents)[-1], None)
self.assertEquals(len(loggedEvents), 1)
self.assertEquals(loggedEvents[0].__class__, UnknownEncoding, 'Must fail if an unknown encoding is used')
def testDeclareUnknownEncoding(self):
loggedEvents=[]
self.assertEquals(xmlEncoding.decode(ctAX, None, '<?xml version="1.0" encoding="X-FAKE"?><x/>', loggedEvents)[-1], None)
self.assert_(loggedEvents)
self.assertEquals(loggedEvents[-1].__class__, UnknownEncoding)
def testWarnMismatch(self):
loggedEvents=[]
self.assertEquals(xmlEncoding.decode(ctAX, 'US-ASCII', '<?xml version="1.0" encoding="UTF-8"?><x/>', loggedEvents)[-1], u'<?xml version="1.0" encoding="UTF-8"?><x/>')
self.assert_(loggedEvents)
self.assertEquals(loggedEvents[-1].__class__, EncodingMismatch)
def testDecodeUTF8(self):
loggedEvents=[]
self.assertEquals(xmlEncoding.decode(ctAX, 'utf-8', '<x>\xc2\xa3</x>', loggedEvents)[-1], u'<x>\u00a3</x>')
self.assertEquals(loggedEvents, [])
def testDecodeBadUTF8(self):
"""Ensure bad UTF-8 is flagged as such, but still decoded."""
loggedEvents=[]
self.assertEquals(xmlEncoding.decode(ctAX, 'utf-8', '<x>\xa3</x>', loggedEvents)[-1], u'<x>\ufffd</x>')
self.assert_(loggedEvents)
self.assertEquals(loggedEvents[-1].__class__, UnicodeError)
def testRemovedBOM(self):
"""Make sure the initial BOM signature is not in the decoded string."""
loggedEvents=[]
self.assertEquals(xmlEncoding.decode(ctAX, 'UTF-16', '\xff\xfe\x3c\x00\x78\x00\x2f\x00\x3e\x00', loggedEvents)[-1], u'<x/>')
self.assertEquals(loggedEvents, [])
class TestRemoveDeclaration(unittest.TestCase):
def testRemoveSimple(self):
self.assertEqual(xmlEncoding.removeDeclaration(
'<?xml version="1.0" encoding="utf-8"?>'),
'<?xml version="1.0" ?>')
self.assertEqual(xmlEncoding.removeDeclaration(
"<?xml version='1.0' encoding='us-ascii' ?>"),
"<?xml version='1.0' ?>")
def testNotRemoved(self):
"""Make sure that invalid, or missing, declarations aren't affected."""
for x in [
'<?xml encoding="utf-8"?>', # Missing version
'<doc />', # No declaration
' <?xml version="1.0" encoding="utf-8"?>' # Space before declaration
]:
self.assertEqual(xmlEncoding.removeDeclaration(x), x)
def buildTestSuite():
suite = unittest.TestSuite()
loader = unittest.TestLoader()
suite.addTest(loader.loadTestsFromTestCase(TestDecode))
suite.addTest(loader.loadTestsFromTestCase(TestRemoveDeclaration))
return suite
if __name__ == "__main__":
unittest.main()
| mit |
iliavolyova/evo-clustering | src/stats.py | 1 | 5952 | from __future__ import division
import os
from functools import partial
import log as logger
import core
import gui_graphs
from PyQt4.QtGui import *
defaultParams = {
'Dataset' : 'Iris',
'Number of generations' : 100,
'Population size': 20,
'Max clusters' : 5,
'Fitness method': 'db',
'q' : 2,
't' : 2,
'Distance measure': 'Minkowski_2',
'Feature significance': True
}
class Stats():
def __init__(self, window):
self.window = window
self.plots = {}
self.setup_ui()
def setup_ui(self):
self.setup_table()
self.populate_combo()
def populate_combo(self):
self.resfolder = os.path.join('..', 'res')
self.run_groups = []
for dirname, dirnames, filenames in os.walk(self.resfolder):
for subdirname in dirnames:
self.run_groups.append(subdirname)
for r in self.run_groups:
self.window.datasetComboBox.addItem(r)
self.window.datasetComboBox.activated.connect(self.run_group_changed)
def run_group_changed(self, rg_index):
run_paths = []
self.runs = []
if rg_index != 0:
basepath = os.path.join(self.resfolder, self.run_groups[rg_index-1])
for dirname, dirnames, filenames in os.walk(basepath):
for f in filenames:
run_paths.append(os.path.join(basepath, f))
else:
self.table.clearContents()
self.clearLabels()
return
log = logger.Log()
for path in run_paths:
run = {}
log.load(path)
run['params'] = log.head_as_array
run['colormaps'] = log.colormaps
run['measures'] = log.measures
dirs, filename = os.path.split(path)
run['dataset'] = filename.split('_')[2]
run['name'] = filename
self.runs.append(run)
params = self.get_params(self.runs[0])
params['Feature significance'] = False if params['Distance measure'] is not 'Minkowski2' else params['Feature significance']
self.window.label_dataset.setText(params['Dataset'])
opt_config = core.Config(params)
self.window.label_classes.setText(str(opt_config.dataset.params['Classes']))
distribution = []
for k, v in opt_config.dataset.params['Clusters'].iteritems():
distribution.append(v)
self.window.label_distribution.setText(str(distribution))
self.populate_table()
def populate_table(self):
self.table.clearContents()
self.table.setRowCount(len(self.runs)+1)
cls_sum=0
dist_sum=[]
dist_cnt=[]
runs_by_name = sorted(self.runs, key=lambda run: run['name'])
for row, run in enumerate(runs_by_name):
colormap = run['colormaps'][-1]
l_counts = [colormap.count(x) for x in set(colormap)]
l_counts.sort(reverse=True)
for index, val in enumerate(l_counts):
if index >= len(dist_sum):
dist_sum.append(val)
dist_cnt.append(1)
else:
dist_sum[index] += val
dist_cnt[index] += 1
cls_sum += len(l_counts)
params = self.get_params(run)
conf = core.Config(params)
for col in range(6):
item = QTableWidgetItem('')
if col == 0:
item = QTableWidgetItem(run['name'][14:])
elif col == 1:
item = QTableWidgetItem(str(len(l_counts)))
elif col == 2:
item = QTableWidgetItem(str(l_counts))
elif col == 3:
item = QTableWidgetItem('%.4f' % (1 / conf.dataset.getOptimalFitness(conf)))
elif col == 4:
item = QTableWidgetItem('%.4f' % (1 / run['measures'][-1][5]))
elif col == 5:
btn = QPushButton(self.table)
btn.setText('Show')
btn.clicked.connect(partial(self.show_details, row - 1))
self.table.setCellWidget(row+1, col, btn)
if col != 5:
self.table.setItem(row+1, col, item)
avg_clsnum = '%.3f' % (cls_sum / len(self.runs))
avg_dist = []
for index, val in enumerate(dist_sum):
avg_dist.append(dist_sum[index] / dist_cnt[index])
avg_dist_str = ["%.1f" % t for t in avg_dist]
for index, val in enumerate(['Average', avg_clsnum, '[' + ", ".join(avg_dist_str) + ']']):
item = QTableWidgetItem(val)
self.table.setItem(0, index, item)
def show_details(self, row):
self.plots[row] = gui_graphs.DetailsPlot(self.runs[row])
def get_params(self, run):
defaultParams['Dataset'] = run['dataset']
defaultParams['Number of generations'] = int(run['params'][2])
defaultParams['Population size'] = int(run['params'][1])
defaultParams['Max clusters'] = int(run['params'][0])
defaultParams['Fitness method'] = run['params'][3]
defaultParams['Distance measure'] = run['params'][4]
defaultParams['q'] = int(run['params'][5])
defaultParams['t'] = int(run['params'][6])
return defaultParams
def setup_table(self):
self.table = self.window.table_results
self.table.setColumnWidth(0, 235)
self.table.setColumnWidth(1, 50)
self.table.setColumnWidth(2, 180)
self.table.setColumnWidth(3, 65)
self.table.setColumnWidth(4, 65)
self.table.setColumnWidth(5, 60)
def clearLabels(self):
self.window.label_classes.setText('')
self.window.label_dataset.setText('')
self.window.label_distribution.setText('') | mit |
paritosh-in/tizen-extensions-crosswalk | tools/gyp/pylib/gyp/generator/ninja.py | 13 | 74892 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import hashlib
import multiprocessing
import os.path
import re
import signal
import subprocess
import sys
import gyp
import gyp.common
import gyp.msvs_emulation
import gyp.MSVSUtil as MSVSUtil
import gyp.xcode_emulation
from gyp.common import GetEnvironFallback
import gyp.ninja_syntax as ninja_syntax
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
# Gyp expects the following variables to be expandable by the build
# system to the appropriate locations. Ninja prefers paths to be
# known at gyp time. To resolve this, introduce special
# variables starting with $! and $| (which begin with a $ so gyp knows it
# should be treated specially, but is otherwise an invalid
# ninja/shell variable) that are passed to gyp here but expanded
# before writing out into the target .ninja files; see
# ExpandSpecial.
# $! is used for variables that represent a path and that can only appear at
# the start of a string, while $| is used for variables that can appear
# anywhere in a string.
'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR',
'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen',
'PRODUCT_DIR': '$!PRODUCT_DIR',
'CONFIGURATION_NAME': '$|CONFIGURATION_NAME',
# Special variables that may be used by gyp 'rule' targets.
# We generate definitions for these variables on the fly when processing a
# rule.
'RULE_INPUT_ROOT': '${root}',
'RULE_INPUT_DIRNAME': '${dirname}',
'RULE_INPUT_PATH': '${source}',
'RULE_INPUT_EXT': '${ext}',
'RULE_INPUT_NAME': '${name}',
}
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
# TODO: figure out how to not build extra host objects in the non-cross-compile
# case when this is enabled, and enable unconditionally.
generator_supports_multiple_toolsets = (
os.environ.get('GYP_CROSSCOMPILE') or
os.environ.get('AR_host') or
os.environ.get('CC_host') or
os.environ.get('CXX_host') or
os.environ.get('AR_target') or
os.environ.get('CC_target') or
os.environ.get('CXX_target'))
def StripPrefix(arg, prefix):
if arg.startswith(prefix):
return arg[len(prefix):]
return arg
def QuoteShellArgument(arg, flavor):
"""Quote a string such that it will be interpreted as a single argument
by the shell."""
# Rather than attempting to enumerate the bad shell characters, just
# whitelist common OK ones and quote anything else.
if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg):
return arg # No quoting necessary.
if flavor == 'win':
return gyp.msvs_emulation.QuoteForRspFile(arg)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
def Define(d, flavor):
"""Takes a preprocessor define and returns a -D parameter that's ninja- and
shell-escaped."""
if flavor == 'win':
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
d = d.replace('#', '\\%03o' % ord('#'))
return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor)
class Target:
"""Target represents the paths used within a single gyp target.
Conceptually, building a single target A is a series of steps:
1) actions/rules/copies generates source/resources/etc.
2) compiles generates .o files
3) link generates a binary (library/executable)
4) bundle merges the above in a mac bundle
(Any of these steps can be optional.)
From a build ordering perspective, a dependent target B could just
depend on the last output of this series of steps.
But some dependent commands sometimes need to reach inside the box.
For example, when linking B it needs to get the path to the static
library generated by A.
This object stores those paths. To keep things simple, member
variables only store concrete paths to single files, while methods
compute derived values like "the last output of the target".
"""
def __init__(self, type):
# Gyp type ("static_library", etc.) of this target.
self.type = type
# File representing whether any input dependencies necessary for
# dependent actions have completed.
self.preaction_stamp = None
# File representing whether any input dependencies necessary for
# dependent compiles have completed.
self.precompile_stamp = None
# File representing the completion of actions/rules/copies, if any.
self.actions_stamp = None
# Path to the output of the link step, if any.
self.binary = None
# Path to the file representing the completion of building the bundle,
# if any.
self.bundle = None
# On Windows, incremental linking requires linking against all the .objs
# that compose a .lib (rather than the .lib itself). That list is stored
# here.
self.component_objs = None
# Windows only. The import .lib is the output of a build step, but
# because dependents only link against the lib (not both the lib and the
# dll) we keep track of the import library here.
self.import_lib = None
def Linkable(self):
"""Return true if this is a target that can be linked against."""
return self.type in ('static_library', 'shared_library')
def UsesToc(self, flavor):
"""Return true if the target should produce a restat rule based on a TOC
file."""
# For bundles, the .TOC should be produced for the binary, not for
# FinalOutput(). But the naive approach would put the TOC file into the
# bundle, so don't do this for bundles for now.
if flavor == 'win' or self.bundle:
return False
return self.type in ('shared_library', 'loadable_module')
def PreActionInput(self, flavor):
"""Return the path, if any, that should be used as a dependency of
any dependent action step."""
if self.UsesToc(flavor):
return self.FinalOutput() + '.TOC'
return self.FinalOutput() or self.preaction_stamp
def PreCompileInput(self):
"""Return the path, if any, that should be used as a dependency of
any dependent compile step."""
return self.actions_stamp or self.precompile_stamp
def FinalOutput(self):
"""Return the last output of the target, which depends on all prior
steps."""
return self.bundle or self.binary or self.actions_stamp
# A small discourse on paths as used within the Ninja build:
# All files we produce (both at gyp and at build time) appear in the
# build directory (e.g. out/Debug).
#
# Paths within a given .gyp file are always relative to the directory
# containing the .gyp file. Call these "gyp paths". This includes
# sources as well as the starting directory a given gyp rule/action
# expects to be run from. We call the path from the source root to
# the gyp file the "base directory" within the per-.gyp-file
# NinjaWriter code.
#
# All paths as written into the .ninja files are relative to the build
# directory. Call these paths "ninja paths".
#
# We translate between these two notions of paths with two helper
# functions:
#
# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file)
# into the equivalent ninja path.
#
# - GypPathToUniqueOutput translates a gyp path into a ninja path to write
# an output file; the result can be namespaced such that it is unique
# to the input file name as well as the output target name.
class NinjaWriter:
def __init__(self, qualified_target, target_outputs, base_dir, build_dir,
output_file, flavor, toplevel_dir=None):
"""
base_dir: path from source root to directory containing this gyp file,
by gyp semantics, all input paths are relative to this
build_dir: path from source root to build output
toplevel_dir: path to the toplevel directory
"""
self.qualified_target = qualified_target
self.target_outputs = target_outputs
self.base_dir = base_dir
self.build_dir = build_dir
self.ninja = ninja_syntax.Writer(output_file)
self.flavor = flavor
self.abs_build_dir = None
if toplevel_dir is not None:
self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir,
build_dir))
self.obj_ext = '.obj' if flavor == 'win' else '.o'
if flavor == 'win':
# See docstring of msvs_emulation.GenerateEnvironmentFiles().
self.win_env = {}
for arch in ('x86', 'x64'):
self.win_env[arch] = 'environment.' + arch
# Relative path from build output dir to base dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir)
self.build_to_base = os.path.join(build_to_top, base_dir)
# Relative path from base dir to build dir.
base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir)
self.base_to_build = os.path.join(base_to_top, build_dir)
def ExpandSpecial(self, path, product_dir=None):
"""Expand specials like $!PRODUCT_DIR in |path|.
If |product_dir| is None, assumes the cwd is already the product
dir. Otherwise, |product_dir| is the relative path to the product
dir.
"""
PRODUCT_DIR = '$!PRODUCT_DIR'
if PRODUCT_DIR in path:
if product_dir:
path = path.replace(PRODUCT_DIR, product_dir)
else:
path = path.replace(PRODUCT_DIR + '/', '')
path = path.replace(PRODUCT_DIR + '\\', '')
path = path.replace(PRODUCT_DIR, '.')
INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR'
if INTERMEDIATE_DIR in path:
int_dir = self.GypPathToUniqueOutput('gen')
# GypPathToUniqueOutput generates a path relative to the product dir,
# so insert product_dir in front if it is provided.
path = path.replace(INTERMEDIATE_DIR,
os.path.join(product_dir or '', int_dir))
CONFIGURATION_NAME = '$|CONFIGURATION_NAME'
path = path.replace(CONFIGURATION_NAME, self.config_name)
return path
def ExpandRuleVariables(self, path, root, dirname, source, ext, name):
if self.flavor == 'win':
path = self.msvs_settings.ConvertVSMacros(
path, config=self.config_name)
path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root)
path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'],
dirname)
path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source)
path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext)
path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name)
return path
def GypPathToNinja(self, path, env=None):
"""Translate a gyp path to a ninja path, optionally expanding environment
variable references in |path| with |env|.
See the above discourse on path conversions."""
if env:
if self.flavor == 'mac':
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
elif self.flavor == 'win':
path = gyp.msvs_emulation.ExpandMacros(path, env)
if path.startswith('$!'):
expanded = self.ExpandSpecial(path)
if self.flavor == 'win':
expanded = os.path.normpath(expanded)
return expanded
if '$|' in path:
path = self.ExpandSpecial(path)
assert '$' not in path, path
return os.path.normpath(os.path.join(self.build_to_base, path))
def GypPathToUniqueOutput(self, path, qualified=True):
"""Translate a gyp path to a ninja path for writing output.
If qualified is True, qualify the resulting filename with the name
of the target. This is necessary when e.g. compiling the same
path twice for two separate output targets.
See the above discourse on path conversions."""
path = self.ExpandSpecial(path)
assert not path.startswith('$'), path
# Translate the path following this scheme:
# Input: foo/bar.gyp, target targ, references baz/out.o
# Output: obj/foo/baz/targ.out.o (if qualified)
# obj/foo/baz/out.o (otherwise)
# (and obj.host instead of obj for cross-compiles)
#
# Why this scheme and not some other one?
# 1) for a given input, you can compute all derived outputs by matching
# its path, even if the input is brought via a gyp file with '..'.
# 2) simple files like libraries and stamps have a simple filename.
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
path_dir, path_basename = os.path.split(path)
if qualified:
path_basename = self.name + '.' + path_basename
return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
path_basename))
def WriteCollapsedDependencies(self, name, targets):
"""Given a list of targets, return a path for a single file
representing the result of building all the targets or None.
Uses a stamp file if necessary."""
assert targets == filter(None, targets), targets
if len(targets) == 0:
return None
if len(targets) > 1:
stamp = self.GypPathToUniqueOutput(name + '.stamp')
targets = self.ninja.build(stamp, 'stamp', targets)
self.ninja.newline()
return targets[0]
def WriteSpec(self, spec, config_name, generator_flags,
case_sensitive_filesystem):
"""The main entry point for NinjaWriter: write the build rules for a spec.
Returns a Target object, which represents the output paths for this spec.
Returns None if there are no outputs (e.g. a settings-only 'none' type
target)."""
self.config_name = config_name
self.name = spec['target_name']
self.toolset = spec['toolset']
config = spec['configurations'][config_name]
self.target = Target(spec['type'])
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
arch = self.msvs_settings.GetArch(config_name)
self.ninja.variable('arch', self.win_env[arch])
# Compute predepends for all rules.
# actions_depends is the dependencies this target depends on before running
# any of its action/rule/copy steps.
# compile_depends is the dependencies this target depends on before running
# any of its compile steps.
actions_depends = []
compile_depends = []
# TODO(evan): it is rather confusing which things are lists and which
# are strings. Fix these.
if 'dependencies' in spec:
for dep in spec['dependencies']:
if dep in self.target_outputs:
target = self.target_outputs[dep]
actions_depends.append(target.PreActionInput(self.flavor))
compile_depends.append(target.PreCompileInput())
actions_depends = filter(None, actions_depends)
compile_depends = filter(None, compile_depends)
actions_depends = self.WriteCollapsedDependencies('actions_depends',
actions_depends)
compile_depends = self.WriteCollapsedDependencies('compile_depends',
compile_depends)
self.target.preaction_stamp = actions_depends
self.target.precompile_stamp = compile_depends
# Write out actions, rules, and copies. These must happen before we
# compile any sources, so compute a list of predependencies for sources
# while we do it.
extra_sources = []
mac_bundle_depends = []
self.target.actions_stamp = self.WriteActionsRulesCopies(
spec, extra_sources, actions_depends, mac_bundle_depends)
# If we have actions/rules/copies, we depend directly on those, but
# otherwise we depend on dependent target's actions/rules/copies etc.
# We never need to explicitly depend on previous target's link steps,
# because no compile ever depends on them.
compile_depends_stamp = (self.target.actions_stamp or compile_depends)
# Write out the compilation steps, if any.
link_deps = []
sources = spec.get('sources', []) + extra_sources
if sources:
pch = None
if self.flavor == 'win':
gyp.msvs_emulation.VerifyMissingSources(
sources, self.abs_build_dir, generator_flags, self.GypPathToNinja)
pch = gyp.msvs_emulation.PrecompiledHeader(
self.msvs_settings, config_name, self.GypPathToNinja,
self.GypPathToUniqueOutput, self.obj_ext)
else:
pch = gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, self.GypPathToNinja,
lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
link_deps = self.WriteSources(
config_name, config, sources, compile_depends_stamp, pch,
case_sensitive_filesystem, spec)
# Some actions/rules output 'sources' that are already object files.
link_deps += [self.GypPathToNinja(f)
for f in sources if f.endswith(self.obj_ext)]
if self.flavor == 'win' and self.target.type == 'static_library':
self.target.component_objs = link_deps
# Write out a link step, if needed.
output = None
if link_deps or self.target.actions_stamp or actions_depends:
output = self.WriteTarget(spec, config_name, config, link_deps,
self.target.actions_stamp or actions_depends)
if self.is_mac_bundle:
mac_bundle_depends.append(output)
# Bundle all of the above together, if needed.
if self.is_mac_bundle:
output = self.WriteMacBundle(spec, mac_bundle_depends)
if not output:
return None
assert self.target.FinalOutput(), output
return self.target
def _WinIdlRule(self, source, prebuild, outputs):
"""Handle the implicit VS .idl rule for one source file. Fills |outputs|
with files that are generated."""
outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData(
source, self.config_name)
outdir = self.GypPathToNinja(outdir)
def fix_path(path, rel=None):
path = os.path.join(outdir, path)
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
path = self.ExpandRuleVariables(
path, root, dirname, source, ext, basename)
if rel:
path = os.path.relpath(path, rel)
return path
vars = [(name, fix_path(value, outdir)) for name, value in vars]
output = [fix_path(p) for p in output]
vars.append(('outdir', outdir))
vars.append(('idlflags', flags))
input = self.GypPathToNinja(source)
self.ninja.build(output, 'idl', input,
variables=vars, order_only=prebuild)
outputs.extend(output)
def WriteWinIdlFiles(self, spec, prebuild):
"""Writes rules to match MSVS's implicit idl handling."""
assert self.flavor == 'win'
if self.msvs_settings.HasExplicitIdlRules(spec):
return []
outputs = []
for source in filter(lambda x: x.endswith('.idl'), spec['sources']):
self._WinIdlRule(source, prebuild, outputs)
return outputs
def WriteActionsRulesCopies(self, spec, extra_sources, prebuild,
mac_bundle_depends):
"""Write out the Actions, Rules, and Copies steps. Return a path
representing the outputs of these steps."""
outputs = []
extra_mac_bundle_resources = []
if 'actions' in spec:
outputs += self.WriteActions(spec['actions'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'rules' in spec:
outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
extra_mac_bundle_resources)
if 'copies' in spec:
outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
mac_bundle_resources = spec.get('mac_bundle_resources', []) + \
extra_mac_bundle_resources
self.WriteMacBundleResources(mac_bundle_resources, mac_bundle_depends)
self.WriteMacInfoPlist(mac_bundle_depends)
return stamp
def GenerateDescription(self, verb, message, fallback):
"""Generate and return a description of a build step.
|verb| is the short summary, e.g. ACTION or RULE.
|message| is a hand-written description, or None if not available.
|fallback| is the gyp-level name of the step, usable as a fallback.
"""
if self.toolset != 'target':
verb += '(%s)' % self.toolset
if message:
return '%s %s' % (verb, self.ExpandSpecial(message))
else:
return '%s %s: %s' % (verb, self.name, fallback)
def WriteActions(self, actions, extra_sources, prebuild,
extra_mac_bundle_resources):
# Actions cd into the base directory.
env = self.GetSortedXcodeEnv()
if self.flavor == 'win':
env = self.msvs_settings.GetVSMacroEnv(
'$!PRODUCT_DIR', config=self.config_name)
all_outputs = []
for action in actions:
# First write out a rule for the action.
name = '%s_%s' % (action['action_name'],
hashlib.md5(self.qualified_target).hexdigest())
description = self.GenerateDescription('ACTION',
action.get('message', None),
name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action)
if self.flavor == 'win' else False)
args = action['action']
rule_name, _ = self.WriteNewNinjaRule(name, args, description,
is_cygwin, env=env)
inputs = [self.GypPathToNinja(i, env) for i in action['inputs']]
if int(action.get('process_outputs_as_sources', False)):
extra_sources += action['outputs']
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += action['outputs']
outputs = [self.GypPathToNinja(o, env) for o in action['outputs']]
# Then write out an edge using the rule.
self.ninja.build(outputs, rule_name, inputs,
order_only=prebuild)
all_outputs += outputs
self.ninja.newline()
return all_outputs
def WriteRules(self, rules, extra_sources, prebuild,
extra_mac_bundle_resources):
env = self.GetSortedXcodeEnv()
all_outputs = []
for rule in rules:
# First write out a rule for the rule action.
name = '%s_%s' % (rule['rule_name'],
hashlib.md5(self.qualified_target).hexdigest())
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
args = rule['action']
description = self.GenerateDescription(
'RULE',
rule.get('message', None),
('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name)
is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule)
if self.flavor == 'win' else False)
rule_name, args = self.WriteNewNinjaRule(
name, args, description, is_cygwin, env=env)
# TODO: if the command references the outputs directly, we should
# simplify it to just use $out.
# Rules can potentially make use of some special variables which
# must vary per source file.
# Compute the list of variables we'll need to provide.
special_locals = ('source', 'root', 'dirname', 'ext', 'name')
needed_variables = set(['source'])
for argument in args:
for var in special_locals:
if ('${%s}' % var) in argument:
needed_variables.add(var)
def cygwin_munge(path):
if is_cygwin:
return path.replace('\\', '/')
return path
# For each source file, write an edge that generates all the outputs.
for source in rule.get('rule_sources', []):
dirname, basename = os.path.split(source)
root, ext = os.path.splitext(basename)
# Gather the list of inputs and outputs, expanding $vars if possible.
outputs = [self.ExpandRuleVariables(o, root, dirname,
source, ext, basename)
for o in rule['outputs']]
inputs = [self.ExpandRuleVariables(i, root, dirname,
source, ext, basename)
for i in rule.get('inputs', [])]
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
extra_bindings = []
for var in needed_variables:
if var == 'root':
extra_bindings.append(('root', cygwin_munge(root)))
elif var == 'dirname':
extra_bindings.append(('dirname', cygwin_munge(dirname)))
elif var == 'source':
# '$source' is a parameter to the rule action, which means
# it shouldn't be converted to a Ninja path. But we don't
# want $!PRODUCT_DIR in there either.
source_expanded = self.ExpandSpecial(source, self.base_to_build)
extra_bindings.append(('source', cygwin_munge(source_expanded)))
elif var == 'ext':
extra_bindings.append(('ext', ext))
elif var == 'name':
extra_bindings.append(('name', cygwin_munge(basename)))
else:
assert var == None, repr(var)
inputs = [self.GypPathToNinja(i, env) for i in inputs]
outputs = [self.GypPathToNinja(o, env) for o in outputs]
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
variables=extra_bindings)
all_outputs.extend(outputs)
return all_outputs
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
env = self.GetSortedXcodeEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
path = os.path.normpath(path)
basename = os.path.split(path)[1]
src = self.GypPathToNinja(path, env)
dst = self.GypPathToNinja(os.path.join(copy['destination'], basename),
env)
outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild)
if self.is_mac_bundle:
# gyp has mac_bundle_resources to copy things into a bundle's
# Resources folder, but there's no built-in way to copy files to other
# places in the bundle. Hence, some targets use copies for this. Check
# if this file is copied into the current bundle, and if so add it to
# the bundle depends so that dependent targets get rebuilt if the copy
# input changes.
if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()):
mac_bundle_depends.append(dst)
return outputs
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
for output, res in gyp.xcode_emulation.GetMacBundleResources(
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.xcode_settings, map(self.GypPathToNinja, resources)):
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource')])
bundle_depends.append(output)
def WriteMacInfoPlist(self, bundle_depends):
"""Write build rules for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.xcode_settings, self.GypPathToNinja)
if not info_plist:
return
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = self.GypPathToUniqueOutput(
os.path.basename(info_plist))
defines = ' '.join([Define(d, self.flavor) for d in defines])
info_plist = self.ninja.build(intermediate_plist, 'infoplist', info_plist,
variables=[('defines',defines)])
env = self.GetSortedXcodeEnv(additional_settings=extra_env)
env = self.ComputeExportEnvString(env)
self.ninja.build(out, 'mac_tool', info_plist,
variables=[('mactool_cmd', 'copy-info-plist'),
('env', env)])
bundle_depends.append(out)
def WriteSources(self, config_name, config, sources, predepends,
precompiled_header, case_sensitive_filesystem, spec):
"""Write build rules to compile all of |sources|."""
if self.toolset == 'host':
self.ninja.variable('ar', '$ar_host')
self.ninja.variable('cc', '$cc_host')
self.ninja.variable('cxx', '$cxx_host')
self.ninja.variable('ld', '$ld_host')
extra_defines = []
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(config_name)
cflags_c = self.xcode_settings.GetCflagsC(config_name)
cflags_cc = self.xcode_settings.GetCflagsCC(config_name)
cflags_objc = ['$cflags_c'] + \
self.xcode_settings.GetCflagsObjC(config_name)
cflags_objcc = ['$cflags_cc'] + \
self.xcode_settings.GetCflagsObjCC(config_name)
elif self.flavor == 'win':
cflags = self.msvs_settings.GetCflags(config_name)
cflags_c = self.msvs_settings.GetCflagsC(config_name)
cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
extra_defines = self.msvs_settings.GetComputedDefines(config_name)
pdbpath = self.msvs_settings.GetCompilerPdbName(
config_name, self.ExpandSpecial)
if not pdbpath:
obj = 'obj'
if self.toolset != 'target':
obj += '.' + self.toolset
pdbpath = os.path.normpath(os.path.join(obj, self.base_dir,
self.name + '.pdb'))
self.WriteVariableList('pdbname', [pdbpath])
self.WriteVariableList('pchprefix', [self.name])
else:
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cc = config.get('cflags_cc', [])
defines = config.get('defines', []) + extra_defines
self.WriteVariableList('defines', [Define(d, self.flavor) for d in defines])
if self.flavor == 'win':
self.WriteVariableList('rcflags',
[QuoteShellArgument(self.ExpandSpecial(f), self.flavor)
for f in self.msvs_settings.GetRcflags(config_name,
self.GypPathToNinja)])
include_dirs = config.get('include_dirs', [])
if self.flavor == 'win':
include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
config_name)
self.WriteVariableList('includes',
[QuoteShellArgument('-I' + self.GypPathToNinja(i), self.flavor)
for i in include_dirs])
pch_commands = precompiled_header.GetPchBuildCommands()
if self.flavor == 'mac':
self.WriteVariableList('cflags_pch_c',
[precompiled_header.GetInclude('c')])
self.WriteVariableList('cflags_pch_cc',
[precompiled_header.GetInclude('cc')])
self.WriteVariableList('cflags_pch_objc',
[precompiled_header.GetInclude('m')])
self.WriteVariableList('cflags_pch_objcc',
[precompiled_header.GetInclude('mm')])
self.WriteVariableList('cflags', map(self.ExpandSpecial, cflags))
self.WriteVariableList('cflags_c', map(self.ExpandSpecial, cflags_c))
self.WriteVariableList('cflags_cc', map(self.ExpandSpecial, cflags_cc))
if self.flavor == 'mac':
self.WriteVariableList('cflags_objc', map(self.ExpandSpecial,
cflags_objc))
self.WriteVariableList('cflags_objcc', map(self.ExpandSpecial,
cflags_objcc))
self.ninja.newline()
outputs = []
for source in sources:
filename, ext = os.path.splitext(source)
ext = ext[1:]
obj_ext = self.obj_ext
if ext in ('cc', 'cpp', 'cxx'):
command = 'cxx'
elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
command = 'cc'
elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files.
command = 'cc_s'
elif (self.flavor == 'win' and ext == 'asm' and
self.msvs_settings.GetArch(config_name) == 'x86' and
not self.msvs_settings.HasExplicitAsmRules(spec)):
# Asm files only get auto assembled for x86 (not x64).
command = 'asm'
# Add the _asm suffix as msvs is capable of handling .cc and
# .asm files of the same name without collision.
obj_ext = '_asm.obj'
elif self.flavor == 'mac' and ext == 'm':
command = 'objc'
elif self.flavor == 'mac' and ext == 'mm':
command = 'objcxx'
elif self.flavor == 'win' and ext == 'rc':
command = 'rc'
obj_ext = '.res'
else:
# Ignore unhandled extensions.
continue
input = self.GypPathToNinja(source)
output = self.GypPathToUniqueOutput(filename + obj_ext)
# Ninja's depfile handling gets confused when the case of a filename
# changes on a case-insensitive file system. To work around that, always
# convert .o filenames to lowercase on such file systems. See
# https://github.com/martine/ninja/issues/402 for details.
if not case_sensitive_filesystem:
output = output.lower()
implicit = precompiled_header.GetObjDependencies([input], [output])
variables = []
if self.flavor == 'win':
variables, output, implicit = precompiled_header.GetFlagsModifications(
input, output, implicit, command, cflags_c, cflags_cc,
self.ExpandSpecial)
self.ninja.build(output, command, input,
implicit=[gch for _, _, gch in implicit],
order_only=predepends, variables=variables)
outputs.append(output)
self.WritePchTargets(pch_commands)
self.ninja.newline()
return outputs
def WritePchTargets(self, pch_commands):
"""Writes ninja rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
var_name = {
'c': 'cflags_pch_c',
'cc': 'cflags_pch_cc',
'm': 'cflags_pch_objc',
'mm': 'cflags_pch_objcc',
}[lang]
map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', }
cmd = map.get(lang)
self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)])
def WriteLink(self, spec, config_name, config, link_deps):
"""Write out a link step. Fills out target.binary. """
command = {
'executable': 'link',
'loadable_module': 'solink_module',
'shared_library': 'solink',
}[spec['type']]
implicit_deps = set()
solibs = set()
if 'dependencies' in spec:
# Two kinds of dependencies:
# - Linkable dependencies (like a .a or a .so): add them to the link line.
# - Non-linkable dependencies (like a rule that generates a file
# and writes a stamp file): add them to implicit_deps
extra_link_deps = set()
for dep in spec['dependencies']:
target = self.target_outputs.get(dep)
if not target:
continue
linkable = target.Linkable()
if linkable:
if (self.flavor == 'win' and
target.component_objs and
self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
extra_link_deps |= set(target.component_objs)
elif self.flavor == 'win' and target.import_lib:
extra_link_deps.add(target.import_lib)
elif target.UsesToc(self.flavor):
solibs.add(target.binary)
implicit_deps.add(target.binary + '.TOC')
else:
extra_link_deps.add(target.binary)
final_output = target.FinalOutput()
if not linkable or final_output != target.binary:
implicit_deps.add(final_output)
link_deps.extend(list(extra_link_deps))
extra_bindings = []
if self.is_mac_bundle:
output = self.ComputeMacBundleBinaryOutput()
else:
output = self.ComputeOutput(spec)
extra_bindings.append(('postbuilds',
self.GetPostbuildCommand(spec, output, output)))
is_executable = spec['type'] == 'executable'
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(config_name,
self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
self.GypPathToNinja)
elif self.flavor == 'win':
manifest_name = self.GypPathToUniqueOutput(
self.ComputeOutputFileName(spec))
ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name,
self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable)
self.WriteVariableList('manifests', manifest_files)
else:
ldflags = config.get('ldflags', [])
if is_executable and len(solibs):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
ldflags.append('-Wl,-rpath=\$$ORIGIN/%s' % rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList('ldflags',
gyp.common.uniquer(map(self.ExpandSpecial,
ldflags)))
libraries = gyp.common.uniquer(map(self.ExpandSpecial,
spec.get('libraries', [])))
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries)
elif self.flavor == 'win':
libraries = self.msvs_settings.AdjustLibraries(libraries)
self.WriteVariableList('libs', libraries)
self.target.binary = output
if command in ('solink', 'solink_module'):
extra_bindings.append(('soname', os.path.split(output)[1]))
extra_bindings.append(('lib',
gyp.common.EncodePOSIXShellArgument(output)))
if self.flavor == 'win':
extra_bindings.append(('dll', output))
if '/NOENTRY' not in ldflags:
self.target.import_lib = output + '.lib'
extra_bindings.append(('implibflag',
'/IMPLIB:%s' % self.target.import_lib))
output = [output, self.target.import_lib]
else:
output = [output, output + '.TOC']
if len(solibs):
extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
self.ninja.build(output, command, link_deps,
implicit=list(implicit_deps),
variables=extra_bindings)
def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
if spec['type'] == 'none':
# TODO(evan): don't call this function for 'none' target types, as
# it doesn't do anything, and we fake out a 'binary' with a stamp file.
self.target.binary = compile_deps
elif spec['type'] == 'static_library':
self.target.binary = self.ComputeOutput(spec)
variables = []
postbuild = self.GetPostbuildCommand(
spec, self.target.binary, self.target.binary)
if postbuild:
variables.append(('postbuilds', postbuild))
if self.xcode_settings:
variables.append(('libtool_flags',
self.xcode_settings.GetLibtoolflags(config_name)))
if (self.flavor not in ('mac', 'win') and not
self.is_standalone_static_library):
self.ninja.build(self.target.binary, 'alink_thin', link_deps,
order_only=compile_deps, variables=variables)
else:
if self.msvs_settings:
libflags = self.msvs_settings.GetLibFlags(config_name,
self.GypPathToNinja)
variables.append(('libflags', libflags))
self.ninja.build(self.target.binary, 'alink', link_deps,
order_only=compile_deps, variables=variables)
else:
self.WriteLink(spec, config_name, config, link_deps)
return self.target.binary
def WriteMacBundle(self, spec, mac_bundle_depends):
assert self.is_mac_bundle
package_framework = spec['type'] in ('shared_library', 'loadable_module')
output = self.ComputeMacBundleOutput()
postbuild = self.GetPostbuildCommand(spec, output, self.target.binary,
is_command_start=not package_framework)
variables = []
if postbuild:
variables.append(('postbuilds', postbuild))
if package_framework:
variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
self.ninja.build(output, 'package_framework', mac_bundle_depends,
variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
self.target.bundle = output
return output
def GetSortedXcodeEnv(self, additional_settings=None):
"""Returns the variables Xcode would set for build steps."""
assert self.abs_build_dir
abs_build_dir = self.abs_build_dir
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, abs_build_dir,
os.path.join(abs_build_dir, self.build_to_base), self.config_name,
additional_settings)
def GetSortedXcodePostbuildEnv(self):
"""Returns the variables Xcode would set for postbuild steps."""
postbuild_settings = {}
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE')
if strip_save_file:
postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file
return self.GetSortedXcodeEnv(additional_settings=postbuild_settings)
def GetPostbuildCommand(self, spec, output, output_binary,
is_command_start=False):
"""Returns a shell command that runs all the postbuilds, and removes
|output| if any of them fails. If |is_command_start| is False, then the
returned string will start with ' && '."""
if not self.xcode_settings or spec['type'] == 'none' or not output:
return ''
output = QuoteShellArgument(output, self.flavor)
target_postbuilds = self.xcode_settings.GetTargetPostbuilds(
self.config_name,
os.path.normpath(os.path.join(self.base_to_build, output)),
QuoteShellArgument(
os.path.normpath(os.path.join(self.base_to_build, output_binary)),
self.flavor),
quiet=True)
postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
postbuilds = target_postbuilds + postbuilds
if not postbuilds:
return ''
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(
['cd', self.build_to_base]))
env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv())
# G will be non-null if any postbuild fails. Run all postbuilds in a
# subshell.
commands = env + ' (' + \
' && '.join([ninja_syntax.escape(command) for command in postbuilds])
command_string = (commands + '); G=$$?; '
# Remove the final output if any postbuild failed.
'((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)')
if is_command_start:
return '(' + command_string + ' && '
else:
return '$ && (' + command_string
def ComputeExportEnvString(self, env):
"""Given an environment, returns a string looking like
'export FOO=foo; export BAR="${FOO} bar;'
that exports |env| to the shell."""
export_str = []
for k, v in env:
export_str.append('export %s=%s;' %
(k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v))))
return ' '.join(export_str)
def ComputeMacBundleOutput(self):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
return os.path.join(path, self.xcode_settings.GetWrapperName())
def ComputeMacBundleBinaryOutput(self):
"""Return the 'output' (full output path) to the binary in a bundle."""
assert self.is_mac_bundle
path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR'])
return os.path.join(path, self.xcode_settings.GetExecutablePath())
def ComputeOutputFileName(self, spec, type=None):
"""Compute the filename of the final output for the current target."""
if not type:
type = spec['type']
default_variables = copy.copy(generator_default_variables)
CalculateVariables(default_variables, {'flavor': self.flavor})
# Compute filename prefix: the product prefix, or a default for
# the product type.
DEFAULT_PREFIX = {
'loadable_module': default_variables['SHARED_LIB_PREFIX'],
'shared_library': default_variables['SHARED_LIB_PREFIX'],
'static_library': default_variables['STATIC_LIB_PREFIX'],
'executable': default_variables['EXECUTABLE_PREFIX'],
}
prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, ''))
# Compute filename extension: the product extension, or a default
# for the product type.
DEFAULT_EXTENSION = {
'loadable_module': default_variables['SHARED_LIB_SUFFIX'],
'shared_library': default_variables['SHARED_LIB_SUFFIX'],
'static_library': default_variables['STATIC_LIB_SUFFIX'],
'executable': default_variables['EXECUTABLE_SUFFIX'],
}
extension = spec.get('product_extension')
if extension:
extension = '.' + extension
else:
extension = DEFAULT_EXTENSION.get(type, '')
if 'product_name' in spec:
# If we were given an explicit name, use that.
target = spec['product_name']
else:
# Otherwise, derive a name from the target name.
target = spec['target_name']
if prefix == 'lib':
# Snip out an extra 'lib' from libs if appropriate.
target = StripPrefix(target, 'lib')
if type in ('static_library', 'loadable_module', 'shared_library',
'executable'):
return '%s%s%s' % (prefix, target, extension)
elif type == 'none':
return '%s.stamp' % target
else:
raise Exception('Unhandled output type %s' % type)
def ComputeOutput(self, spec, type=None):
"""Compute the path for the final output of the spec."""
assert not self.is_mac_bundle or type
if not type:
type = spec['type']
if self.flavor == 'win':
override = self.msvs_settings.GetOutputName(self.config_name,
self.ExpandSpecial)
if override:
return override
if self.flavor == 'mac' and type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
filename = self.xcode_settings.GetExecutablePath()
else:
filename = self.ComputeOutputFileName(spec, type)
if 'product_dir' in spec:
path = os.path.join(spec['product_dir'], filename)
return self.ExpandSpecial(path)
# Some products go into the output root, libraries go into shared library
# dir, and everything else goes into the normal place.
type_in_output_root = ['executable', 'loadable_module']
if self.flavor == 'mac' and self.toolset == 'target':
type_in_output_root += ['shared_library', 'static_library']
elif self.flavor == 'win' and self.toolset == 'target':
type_in_output_root += ['shared_library']
if type in type_in_output_root or self.is_standalone_static_library:
return filename
elif type == 'shared_library':
libdir = 'lib'
if self.toolset != 'target':
libdir = os.path.join('lib', '%s' % self.toolset)
return os.path.join(libdir, filename)
else:
return self.GypPathToUniqueOutput(filename, qualified=False)
def WriteVariableList(self, var, values):
assert not isinstance(values, str)
if values is None:
values = []
self.ninja.variable(var, ' '.join(values))
def WriteNewNinjaRule(self, name, args, description, is_cygwin, env):
"""Write out a new ninja "rule" statement for a given command.
Returns the name of the new rule, and a copy of |args| with variables
expanded."""
if self.flavor == 'win':
args = [self.msvs_settings.ConvertVSMacros(
arg, self.base_to_build, config=self.config_name)
for arg in args]
description = self.msvs_settings.ConvertVSMacros(
description, config=self.config_name)
elif self.flavor == 'mac':
# |env| is an empty list on non-mac.
args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args]
description = gyp.xcode_emulation.ExpandEnvVars(description, env)
# TODO: we shouldn't need to qualify names; we do it because
# currently the ninja rule namespace is global, but it really
# should be scoped to the subninja.
rule_name = self.name
if self.toolset == 'target':
rule_name += '.' + self.toolset
rule_name += '.' + name
rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name)
# Remove variable references, but not if they refer to the magic rule
# variables. This is not quite right, as it also protects these for
# actions, not just for rules where they are valid. Good enough.
protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ]
protect = '(?!' + '|'.join(map(re.escape, protect)) + ')'
description = re.sub(protect + r'\$', '_', description)
# gyp dictates that commands are run from the base directory.
# cd into the directory before running, and adjust paths in
# the arguments to point to the proper locations.
rspfile = None
rspfile_content = None
args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args]
if self.flavor == 'win':
rspfile = rule_name + '.$unique_name.rsp'
# The cygwin case handles this inside the bash sub-shell.
run_in = '' if is_cygwin else ' ' + self.build_to_base
if is_cygwin:
rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine(
args, self.build_to_base)
else:
rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args)
command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable +
rspfile + run_in)
else:
env = self.ComputeExportEnvString(env)
command = gyp.common.EncodePOSIXShellList(args)
command = 'cd %s; ' % self.build_to_base + env + command
# GYP rules/actions express being no-ops by not touching their outputs.
# Avoid executing downstream dependencies in this case by specifying
# restat=1 to ninja.
self.ninja.rule(rule_name, command, description, restat=True,
rspfile=rspfile, rspfile_content=rspfile_content)
self.ninja.newline()
return rule_name, args
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
global generator_additional_non_configuration_keys
global generator_additional_path_sections
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Ninja generator.
import gyp.generator.xcode as xcode_generator
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
elif flavor == 'win':
default_variables.setdefault('OS', 'win')
default_variables['EXECUTABLE_SUFFIX'] = '.exe'
default_variables['STATIC_LIB_PREFIX'] = ''
default_variables['STATIC_LIB_SUFFIX'] = '.lib'
default_variables['SHARED_LIB_PREFIX'] = ''
default_variables['SHARED_LIB_SUFFIX'] = '.dll'
generator_flags = params.get('generator_flags', {})
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'lib'))
default_variables.setdefault('LIB_DIR',
os.path.join('$!PRODUCT_DIR', 'obj'))
def OpenOutput(path, mode='w'):
"""Open |path| for writing, creating directories if necessary."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
return open(path, mode)
def CommandWithWrapper(cmd, wrappers, prog):
wrapper = wrappers.get(cmd, '')
if wrapper:
return wrapper + ' ' + prog
return prog
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to ninja easier, ninja doesn't put anything here.
generator_dir = os.path.relpath(params['options'].generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(os.path.join(generator_dir,
output_dir,
config_name))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
master_ninja = ninja_syntax.Writer(
OpenOutput(os.path.join(toplevel_build, 'build.ninja')),
width=120)
case_sensitive_filesystem = not os.path.exists(
os.path.join(toplevel_build, 'BUILD.NINJA'))
# Put build-time support tools in out/{config_name}.
gyp.common.CopyTool(flavor, toplevel_build)
# Grab make settings for CC/CXX.
# The rules are
# - The priority from low to high is gcc/g++, the 'make_global_settings' in
# gyp, the environment variable.
# - If there is no 'make_global_settings' for CC.host/CXX.host or
# 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set
# to cc/cxx.
if flavor == 'win':
cc = 'cl.exe'
cxx = 'cl.exe'
ld = 'link.exe'
gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, OpenOutput)
ld_host = '$ld'
else:
cc = 'gcc'
cxx = 'g++'
ld = '$cxx'
ld_host = '$cxx_host'
cc_host = None
cxx_host = None
cc_host_global_setting = None
cxx_host_global_setting = None
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings = data[build_file].get('make_global_settings', [])
build_to_root = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
flock = 'flock'
if flavor == 'mac':
flock = './gyp-mac-tool flock'
wrappers = {}
if flavor != 'win':
wrappers['LINK'] = flock + ' linker.lock'
for key, value in make_global_settings:
if key == 'CC':
cc = os.path.join(build_to_root, value)
if key == 'CXX':
cxx = os.path.join(build_to_root, value)
if key == 'LD':
ld = os.path.join(build_to_root, value)
if key == 'CC.host':
cc_host = os.path.join(build_to_root, value)
cc_host_global_setting = value
if key == 'CXX.host':
cxx_host = os.path.join(build_to_root, value)
cxx_host_global_setting = value
if key == 'LD.host':
ld_host = os.path.join(build_to_root, value)
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
cc = GetEnvironFallback(['CC_target', 'CC'], cc)
master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
ld = GetEnvironFallback(['LD_target', 'LD'], ld)
if not cc_host:
cc_host = cc
if not cxx_host:
cxx_host = cxx
if flavor == 'win':
master_ninja.variable('ld', ld)
master_ninja.variable('idl', 'midl.exe')
master_ninja.variable('ar', 'lib.exe')
master_ninja.variable('rc', 'rc.exe')
master_ninja.variable('asm', 'ml.exe')
master_ninja.variable('mt', 'mt.exe')
master_ninja.variable('use_dep_database', '1')
else:
master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
cc_host = GetEnvironFallback(['CC_host'], cc_host)
cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
ld_host = GetEnvironFallback(['LD_host'], ld_host)
# The environment variable could be used in 'make_global_settings', like
# ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
if '$(CC)' in cc_host and cc_host_global_setting:
cc_host = cc_host_global_setting.replace('$(CC)', cc)
if '$(CXX)' in cxx_host and cxx_host_global_setting:
cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx)
master_ninja.variable('cc_host',
CommandWithWrapper('CC.host', wrappers, cc_host))
master_ninja.variable('cxx_host',
CommandWithWrapper('CXX.host', wrappers, cxx_host))
if flavor == 'win':
master_ninja.variable('ld_host', ld_host)
else:
master_ninja.variable('ld_host', CommandWithWrapper(
'LINK', wrappers, ld_host))
master_ninja.newline()
deps = None
if int(generator_flags.get('use_deps', '0')) and flavor != 'win':
deps = 'gcc'
if flavor != 'win':
master_ninja.rule(
'cc',
description='CC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'cc_s',
description='CC $out',
command=('$cc $defines $includes $cflags $cflags_c '
'$cflags_pch_c -c $in -o $out'))
master_ninja.rule(
'cxx',
description='CXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc '
'$cflags_pch_cc -c $in -o $out'),
depfile='$out.d',
deps=deps)
else:
cc_command = ('ninja -t msvc -o $out -e $arch '
'-- '
'$cc /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
cxx_command = ('ninja -t msvc -o $out -e $arch '
'-- '
'$cxx /nologo /showIncludes /FC '
'@$out.rsp /c $in /Fo$out /Fd$pdbname ')
master_ninja.rule(
'cc',
description='CC $out',
command=cc_command,
depfile='$out.d',
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_c')
master_ninja.rule(
'cxx',
description='CXX $out',
command=cxx_command,
depfile='$out.d',
rspfile='$out.rsp',
rspfile_content='$defines $includes $cflags $cflags_cc')
master_ninja.rule(
'idl',
description='IDL $in',
command=('%s gyp-win-tool midl-wrapper $arch $outdir '
'$tlb $h $dlldata $iid $proxy $in '
'$idlflags' % sys.executable))
master_ninja.rule(
'rc',
description='RC $in',
# Note: $in must be last otherwise rc.exe complains.
command=('%s gyp-win-tool rc-wrapper '
'$arch $rc $defines $includes $rcflags /fo$out $in' %
sys.executable))
master_ninja.rule(
'asm',
description='ASM $in',
command=('%s gyp-win-tool asm-wrapper '
'$arch $asm $defines $includes /c /Fo $out $in' %
sys.executable))
if flavor != 'mac' and flavor != 'win':
master_ninja.rule(
'alink',
description='AR $out',
command='rm -f $out && $ar rcs $out $in')
master_ninja.rule(
'alink_thin',
description='AR $out',
command='rm -f $out && $ar rcsT $out $in')
# This allows targets that only need to depend on $lib's API to declare an
# order-only dependency on $lib.TOC and avoid relinking such downstream
# dependencies when $lib changes only in non-public ways.
# The resulting string leaves an uninterpolated %{suffix} which
# is used in the final substitution below.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then '
'%(solink)s && %(extract_toc)s > ${lib}.TOC; else '
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; '
'fi; fi'
% { 'solink':
'$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
'extract_toc':
('{ readelf -d ${lib} | grep SONAME ; '
'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')})
master_ninja.rule(
'solink',
description='SOLINK $lib',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
'$libs'}))
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group $libs'}))
master_ninja.rule(
'link',
description='LINK $out',
command=('$ld $ldflags -o $out '
'-Wl,--start-group $in $solibs -Wl,--end-group $libs'))
elif flavor == 'win':
master_ninja.rule(
'alink',
description='LIB $out',
command=('%s gyp-win-tool link-wrapper $arch '
'$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
sys.executable),
rspfile='$out.rsp',
rspfile_content='$in_newline $libflags')
dlldesc = 'LINK(DLL) $dll'
dllcmd = ('%s gyp-win-tool link-wrapper $arch '
'$ld /nologo $implibflag /DLL /OUT:$dll '
'/PDB:$dll.pdb @$dll.rsp' % sys.executable)
dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
'cmd /c if exist $dll.manifest del $dll.manifest' %
sys.executable)
dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch '
'$mt -nologo -manifest $manifests -out:$dll.manifest' %
sys.executable)
master_ninja.rule('solink', description=dlldesc, command=dllcmd,
rspfile='$dll.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True)
master_ninja.rule('solink_module', description=dlldesc, command=dllcmd,
rspfile='$dll.rsp',
rspfile_content='$libs $in_newline $ldflags',
restat=True)
# Note that ldflags goes at the end so that it has the option of
# overriding default settings earlier in the command line.
master_ninja.rule(
'link',
description='LINK $out',
command=('%s gyp-win-tool link-wrapper $arch '
'$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && '
'%s gyp-win-tool manifest-wrapper $arch '
'cmd /c if exist $out.manifest del $out.manifest && '
'%s gyp-win-tool manifest-wrapper $arch '
'$mt -nologo -manifest $manifests -out:$out.manifest' %
(sys.executable, sys.executable, sys.executable)),
rspfile='$out.rsp',
rspfile_content='$in_newline $libs $ldflags')
else:
master_ninja.rule(
'objc',
description='OBJC $out',
command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc '
'$cflags_pch_objc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'objcxx',
description='OBJCXX $out',
command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc '
'$cflags_pch_objcc -c $in -o $out'),
depfile='$out.d',
deps=deps)
master_ninja.rule(
'alink',
description='LIBTOOL-STATIC $out, POSTBUILDS',
command='rm -f $out && '
'./gyp-mac-tool filter-libtool libtool $libtool_flags '
'-static -o $out $in'
'$postbuilds')
# Record the public interface of $lib in $lib.TOC. See the corresponding
# comment in the posix section above for details.
mtime_preserving_solink_base = (
'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
# Always force dependent targets to relink if this library
# reexports something. Handling this correctly would require
# recursive TOC dumping but this is rare in practice, so punt.
'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
'%(solink)s && %(extract_toc)s > ${lib}.TOC; '
'else '
'%(solink)s && %(extract_toc)s > ${lib}.tmp && '
'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
'mv ${lib}.tmp ${lib}.TOC ; '
'fi; '
'fi'
% { 'solink': '$ld -shared $ldflags -o $lib %(suffix)s',
'extract_toc':
'{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
# -bundle -single_module here (for osmesa.so).
master_ninja.rule(
'solink',
description='SOLINK $lib, POSTBUILDS',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '$in $solibs $libs$postbuilds'}))
master_ninja.rule(
'solink_module',
description='SOLINK(module) $lib, POSTBUILDS',
restat=True,
command=(mtime_preserving_solink_base % {
'suffix': '$in $solibs $libs$postbuilds'}))
master_ninja.rule(
'link',
description='LINK $out, POSTBUILDS',
command=('$ld $ldflags -o $out '
'$in $solibs $libs$postbuilds'))
master_ninja.rule(
'infoplist',
description='INFOPLIST $out',
command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
'plutil -convert xml1 $out $out'))
master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out')
master_ninja.rule(
'package_framework',
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
description='STAMP $out',
command='%s gyp-win-tool stamp $out' % sys.executable)
master_ninja.rule(
'copy',
description='COPY $in $out',
command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable)
else:
master_ninja.rule(
'stamp',
description='STAMP $out',
command='${postbuilds}touch $out')
master_ninja.rule(
'copy',
description='COPY $in $out',
command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)')
master_ninja.newline()
all_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_targets.add(target)
all_outputs = set()
# target_outputs is a map from qualified target name to a Target object.
target_outputs = {}
# target_short_names is a map from target short name to a list of Target
# objects.
target_short_names = {}
for qualified_target in target_list:
# qualified_target is like: third_party/icu/icu.gyp:icui18n#target
build_file, name, toolset = \
gyp.common.ParseQualifiedTarget(qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings == this_make_global_settings, (
"make_global_settings needs to be the same for all targets.")
spec = target_dicts[qualified_target]
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
build_file = gyp.common.RelativePath(build_file, options.toplevel_dir)
base_path = os.path.dirname(build_file)
obj = 'obj'
if toolset != 'target':
obj += '.' + toolset
output_file = os.path.join(obj, base_path, name + '.ninja')
abs_build_dir = os.path.abspath(toplevel_build)
writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
OpenOutput(os.path.join(toplevel_build, output_file)),
flavor, toplevel_dir=options.toplevel_dir)
master_ninja.subninja(output_file)
target = writer.WriteSpec(
spec, config_name, generator_flags, case_sensitive_filesystem)
if target:
if name != target.FinalOutput() and spec['toolset'] == 'target':
target_short_names.setdefault(name, []).append(target)
target_outputs[qualified_target] = target
if qualified_target in all_targets:
all_outputs.add(target.FinalOutput())
if target_short_names:
# Write a short name to build this target. This benefits both the
# "build chrome" case as well as the gyp tests, which expect to be
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
for short_name in target_short_names:
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
if all_outputs:
master_ninja.newline()
master_ninja.build('all', 'phony', list(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
builddir = os.path.join(options.toplevel_dir, 'out', config)
arguments = ['ninja', '-C', builddir]
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
(target_list, target_dicts, data, params, config_name) = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
user_config = params.get('generator_flags', {}).get('config', None)
if gyp.common.GetFlavor(params) == 'win':
target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
target_list, target_dicts = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append(
(target_list, target_dicts, data, params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| bsd-3-clause |
astaninger/speakout | venv/lib/python3.6/site-packages/pkg_resources/_vendor/packaging/markers.py | 228 | 8248 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import operator
import os
import platform
import sys
from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd
from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString
from pkg_resources.extern.pyparsing import Literal as L # noqa
from ._compat import string_types
from .specifiers import Specifier, InvalidSpecifier
__all__ = [
"InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName",
"Marker", "default_environment",
]
class InvalidMarker(ValueError):
"""
An invalid marker was found, users should refer to PEP 508.
"""
class UndefinedComparison(ValueError):
"""
An invalid operation was attempted on a value that doesn't support it.
"""
class UndefinedEnvironmentName(ValueError):
"""
A name was attempted to be used that does not exist inside of the
environment.
"""
class Node(object):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
def __repr__(self):
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
def serialize(self):
raise NotImplementedError
class Variable(Node):
def serialize(self):
return str(self)
class Value(Node):
def serialize(self):
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
return str(self)
VARIABLE = (
L("implementation_version") |
L("platform_python_implementation") |
L("implementation_name") |
L("python_full_version") |
L("platform_release") |
L("platform_version") |
L("platform_machine") |
L("platform_system") |
L("python_version") |
L("sys_platform") |
L("os_name") |
L("os.name") | # PEP-345
L("sys.platform") | # PEP-345
L("platform.version") | # PEP-345
L("platform.machine") | # PEP-345
L("platform.python_implementation") | # PEP-345
L("python_implementation") | # undocumented setuptools legacy
L("extra")
)
ALIASES = {
'os.name': 'os_name',
'sys.platform': 'sys_platform',
'platform.version': 'platform_version',
'platform.machine': 'platform_machine',
'platform.python_implementation': 'platform_python_implementation',
'python_implementation': 'platform_python_implementation'
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") |
L("==") |
L(">=") |
L("<=") |
L("!=") |
L("~=") |
L(">") |
L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
BOOLOP = L("and") | L("or")
MARKER_VAR = VARIABLE | MARKER_VALUE
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
def _format_marker(marker, first=True):
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
if (isinstance(marker, list) and len(marker) == 1 and
isinstance(marker[0], (list, tuple))):
return _format_marker(marker[0])
if isinstance(marker, list):
inner = (_format_marker(m, first=False) for m in marker)
if first:
return " ".join(inner)
else:
return "(" + " ".join(inner) + ")"
elif isinstance(marker, tuple):
return " ".join([m.serialize() for m in marker])
else:
return marker
_operators = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
"<=": operator.le,
"==": operator.eq,
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
}
def _eval_op(lhs, op, rhs):
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
pass
else:
return spec.contains(lhs)
oper = _operators.get(op.serialize())
if oper is None:
raise UndefinedComparison(
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
)
return oper(lhs, rhs)
_undefined = object()
def _get_env(environment, name):
value = environment.get(name, _undefined)
if value is _undefined:
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
return value
def _evaluate_markers(markers, environment):
groups = [[]]
for marker in markers:
assert isinstance(marker, (list, tuple, string_types))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
elif isinstance(marker, tuple):
lhs, op, rhs = marker
if isinstance(lhs, Variable):
lhs_value = _get_env(environment, lhs.value)
rhs_value = rhs.value
else:
lhs_value = lhs.value
rhs_value = _get_env(environment, rhs.value)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
assert marker in ["and", "or"]
if marker == "or":
groups.append([])
return any(all(item) for item in groups)
def format_full_version(info):
version = '{0.major}.{0.minor}.{0.micro}'.format(info)
kind = info.releaselevel
if kind != 'final':
version += kind[0] + str(info.serial)
return version
def default_environment():
if hasattr(sys, 'implementation'):
iver = format_full_version(sys.implementation.version)
implementation_name = sys.implementation.name
else:
iver = '0'
implementation_name = ''
return {
"implementation_name": implementation_name,
"implementation_version": iver,
"os_name": os.name,
"platform_machine": platform.machine(),
"platform_release": platform.release(),
"platform_system": platform.system(),
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
"python_version": platform.python_version()[:3],
"sys_platform": sys.platform,
}
class Marker(object):
def __init__(self, marker):
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
marker, marker[e.loc:e.loc + 8])
raise InvalidMarker(err_str)
def __str__(self):
return _format_marker(self._markers)
def __repr__(self):
return "<Marker({0!r})>".format(str(self))
def evaluate(self, environment=None):
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process.
"""
current_environment = default_environment()
if environment is not None:
current_environment.update(environment)
return _evaluate_markers(self._markers, current_environment)
| mit |
franciscod/python-telegram-bot | telegram/document.py | 2 | 2064 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2016
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains a object that represents a Telegram Document."""
from telegram import PhotoSize, TelegramObject
class Document(TelegramObject):
"""This object represents a Telegram Document.
Attributes:
file_id (str):
thumb (:class:`telegram.PhotoSize`):
file_name (str):
mime_type (str):
file_size (int):
Args:
file_id (str):
**kwargs: Arbitrary keyword arguments.
Keyword Args:
thumb (Optional[:class:`telegram.PhotoSize`]):
file_name (Optional[str]):
mime_type (Optional[str]):
file_size (Optional[int]):
"""
def __init__(self, file_id, **kwargs):
# Required
self.file_id = str(file_id)
# Optionals
self.thumb = kwargs.get('thumb')
self.file_name = kwargs.get('file_name', '')
self.mime_type = str(kwargs.get('mime_type', ''))
self.file_size = int(kwargs.get('file_size', 0))
@staticmethod
def de_json(data):
"""
Args:
data (str):
Returns:
telegram.Document:
"""
if not data:
return None
data['thumb'] = PhotoSize.de_json(data.get('thumb'))
return Document(**data)
| gpl-2.0 |
rosmo/ansible | lib/ansible/modules/network/eos/eos_interface.py | 23 | 15169 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: eos_interface
version_added: "2.5"
author: "Ganesh Nalawade (@ganeshrn)"
short_description: Manage Interface on Arista EOS network devices
description:
- This module provides declarative management of Interfaces
on Arista EOS network devices.
notes:
- Tested against EOS 4.15
options:
name:
description:
- Name of the Interface to be configured on remote device. The name of interface
should be in expanded format and not abbreviated.
required: true
description:
description:
- Description of Interface upto 240 characters.
enabled:
description:
- Interface link status. If the value is I(True) the interface state will be
enabled, else if value is I(False) interface will be in disable (shutdown) state.
default: True
type: bool
speed:
description:
- This option configures autoneg and speed/duplex/flowcontrol for the interface
given in C(name) option.
mtu:
description:
- Set maximum transmission unit size in bytes of transmit packet for the interface given
in C(name) option.
tx_rate:
description:
- Transmit rate in bits per second (bps) for the interface given in C(name) option.
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
rx_rate:
description:
- Receiver rate in bits per second (bps) for the interface given in C(name) option.
- This is state check parameter only.
- Supports conditionals, see L(Conditionals in Networking Modules,../network/user_guide/network_working_with_command_output.html)
neighbors:
description:
- Check the operational state of given interface C(name) for LLDP neighbor.
- The following suboptions are available.
suboptions:
host:
description:
- "LLDP neighbor host for given interface C(name)."
port:
description:
- "LLDP neighbor port to which given interface C(name) is connected."
aggregate:
description:
- List of Interfaces definitions. Each of the entry in aggregate list should
define name of interface C(name) and other options as required.
delay:
description:
- Time in seconds to wait before checking for the operational state on remote
device. This wait is applicable for operational state argument which are
I(state) with values C(up)/C(down), I(tx_rate) and I(rx_rate).
default: 10
state:
description:
- State of the Interface configuration, C(up) means present and
operationally up and C(down) means present and operationally C(down)
default: present
choices: ['present', 'absent', 'up', 'down']
extends_documentation_fragment: eos
"""
EXAMPLES = """
- name: configure interface
eos_interface:
name: ethernet1
description: test-interface
speed: 100full
mtu: 512
- name: remove interface
eos_interface:
name: ethernet1
state: absent
- name: make interface up
eos_interface:
name: ethernet1
enabled: True
- name: make interface down
eos_interface:
name: ethernet1
enabled: False
- name: Check intent arguments
eos_interface:
name: ethernet1
state: up
tx_rate: ge(0)
rx_rate: le(0)
- name: Check neighbors intent arguments
eos_interface:
name: ethernet1
neighbors:
- port: eth0
host: netdev
- name: Configure interface in disabled state and check if the operational state is disabled or not
eos_interface:
name: ethernet1
enabled: False
state: down
- name: Add interface using aggregate
eos_interface:
aggregate:
- { name: ethernet1, mtu: 256, description: test-interface-1 }
- { name: ethernet2, mtu: 516, description: test-interface-2 }
speed: 100full
state: present
- name: Delete interface using aggregate
eos_interface:
aggregate:
- name: loopback9
- name: loopback10
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always, except for the platforms that use Netconf transport to manage the device.
type: list
sample:
- interface ethernet1
- description test-interface
- speed 100full
- mtu 512
"""
import re
from copy import deepcopy
from time import sleep
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.config import NetworkConfig
from ansible.module_utils.network.common.utils import conditional, remove_default_spec
from ansible.module_utils.network.eos.eos import get_config, load_config, run_commands
from ansible.module_utils.network.eos.eos import eos_argument_spec
def validate_mtu(value, module):
if value and not 68 <= int(value) <= 65535:
module.fail_json(msg='mtu must be between 68 and 65535')
def validate_param_values(module, obj, param=None):
if param is None:
param = module.params
for key in obj:
# validate the param value (if validator func exists)
validator = globals().get('validate_%s' % key)
if callable(validator):
validator(param.get(key), module)
def parse_shutdown(configobj, name):
cfg = configobj['interface %s' % name]
cfg = '\n'.join(cfg.children)
match = re.search(r'shutdown', cfg, re.M)
return bool(match)
def parse_config_argument(configobj, name, arg=None):
cfg = configobj['interface %s' % name]
cfg = '\n'.join(cfg.children)
match = re.search(r'%s (.+)$' % arg, cfg, re.M)
if match:
return match.group(1)
def search_obj_in_list(name, lst):
for o in lst:
if o['name'] == name:
return o
return None
def add_command_to_interface(interface, cmd, commands):
if interface not in commands:
commands.append(interface)
commands.append(cmd)
def map_config_to_obj(module):
config = get_config(module)
configobj = NetworkConfig(indent=3, contents=config)
match = re.findall(r'^interface (\S+)', config, re.M)
if not match:
return list()
instances = list()
for item in set(match):
obj = {
'name': item.lower(),
'description': parse_config_argument(configobj, item, 'description'),
'speed': parse_config_argument(configobj, item, 'speed'),
'mtu': parse_config_argument(configobj, item, 'mtu'),
'disable': parse_shutdown(configobj, item),
'state': 'present'
}
instances.append(obj)
return instances
def map_params_to_obj(module):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
item['name'] = item['name'].lower()
validate_param_values(module, item, item)
d = item.copy()
if d['enabled']:
d['disable'] = False
else:
d['disable'] = True
obj.append(d)
else:
params = {
'name': module.params['name'].lower(),
'description': module.params['description'],
'speed': module.params['speed'],
'mtu': module.params['mtu'],
'state': module.params['state'],
'delay': module.params['delay'],
'tx_rate': module.params['tx_rate'],
'rx_rate': module.params['rx_rate'],
'neighbors': module.params['neighbors']
}
validate_param_values(module, params)
if module.params['enabled']:
params.update({'disable': False})
else:
params.update({'disable': True})
obj.append(params)
return obj
def map_obj_to_commands(updates, modules):
commands = list()
want, have = updates
args = ('speed', 'description', 'mtu')
for w in want:
name = w['name']
disable = w['disable']
state = w['state']
obj_in_have = search_obj_in_list(name, have)
interface = 'interface ' + name
if state == 'absent' and obj_in_have:
commands.append('no ' + interface)
elif state in ('present', 'up', 'down'):
if obj_in_have:
for item in args:
candidate = w.get(item)
running = obj_in_have.get(item)
if candidate != running:
if candidate:
cmd = "{0} {1}".format(item, candidate)
add_command_to_interface(interface, cmd, commands)
if disable and not obj_in_have.get('disable', False):
add_command_to_interface(interface, 'shutdown', commands)
elif not disable and obj_in_have.get('disable', False):
add_command_to_interface(interface, 'no shutdown', commands)
else:
commands.append(interface)
for item in args:
value = w.get(item)
if value:
commands.append("{0} {1}".format(item, value))
if disable:
commands.append('no shutdown')
return commands
def check_declarative_intent_params(module, want, result):
failed_conditions = []
have_neighbors = None
for w in want:
want_state = w.get('state')
want_tx_rate = w.get('tx_rate')
want_rx_rate = w.get('rx_rate')
want_neighbors = w.get('neighbors')
if want_state not in ('up', 'down') and not want_tx_rate and not want_rx_rate and not want_neighbors:
continue
if result['changed']:
sleep(w['delay'])
command = {'command': 'show interfaces %s' % w['name'], 'output': 'text'}
output = run_commands(module, [command])
if want_state in ('up', 'down'):
match = re.search(r'%s (\w+)' % 'line protocol is', output[0], re.M)
have_state = None
if match:
have_state = match.group(1)
if have_state is None or not conditional(want_state, have_state.strip()):
failed_conditions.append('state ' + 'eq(%s)' % want_state)
if want_tx_rate:
match = re.search(r'%s (\d+)' % 'output rate', output[0], re.M)
have_tx_rate = None
if match:
have_tx_rate = match.group(1)
if have_tx_rate is None or not conditional(want_tx_rate, have_tx_rate.strip(), cast=int):
failed_conditions.append('tx_rate ' + want_tx_rate)
if want_rx_rate:
match = re.search(r'%s (\d+)' % 'input rate', output[0], re.M)
have_rx_rate = None
if match:
have_rx_rate = match.group(1)
if have_rx_rate is None or not conditional(want_rx_rate, have_rx_rate.strip(), cast=int):
failed_conditions.append('rx_rate ' + want_rx_rate)
if want_neighbors:
have_host = []
have_port = []
if have_neighbors is None:
command = {'command': 'show lldp neighbors {0}'.format(w['name']), 'output': 'text'}
have_neighbors = run_commands(module, [command])
if have_neighbors[0]:
lines = have_neighbors[0].strip().split('\n')
col = None
for index, line in enumerate(lines):
if re.search(r"^Port\s+Neighbor Device ID\s+Neighbor Port ID\s+TTL", line):
col = index
break
if col and col < len(lines) - 1:
for items in lines[col + 1:]:
value = re.split(r'\s+', items)
try:
have_port.append(value[2])
have_host.append(value[1])
except IndexError:
pass
for item in want_neighbors:
host = item.get('host')
port = item.get('port')
if host and host not in have_host:
failed_conditions.append('host ' + host)
if port and port not in have_port:
failed_conditions.append('port ' + port)
return failed_conditions
def main():
""" main entry point for module execution
"""
neighbors_spec = dict(
host=dict(),
port=dict()
)
element_spec = dict(
name=dict(),
description=dict(),
speed=dict(),
mtu=dict(),
enabled=dict(default=True, type='bool'),
tx_rate=dict(),
rx_rate=dict(),
neighbors=dict(type='list', elements='dict', options=neighbors_spec),
delay=dict(default=10, type='int'),
state=dict(default='present',
choices=['present', 'absent', 'up', 'down'])
)
aggregate_spec = deepcopy(element_spec)
aggregate_spec['name'] = dict(required=True)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(eos_argument_spec)
required_one_of = [['name', 'aggregate']]
mutually_exclusive = [['name', 'aggregate']]
module = AnsibleModule(argument_spec=argument_spec,
required_one_of=required_one_of,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True)
warnings = list()
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module)
result['commands'] = commands
if commands:
commit = not module.check_mode
response = load_config(module, commands, commit=commit)
if response.get('diff') and module._diff:
result['diff'] = {'prepared': response.get('diff')}
result['session_name'] = response.get('session')
result['changed'] = True
failed_conditions = check_declarative_intent_params(module, want, result)
if failed_conditions:
msg = 'One or more conditional statements have not been satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
ool2016-seclab/quarantineSystem | api.py | 1 | 3438 | import json
import logging
import ryutest
from webob import Response
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.app.wsgi import ControllerBase, WSGIApplication, route
from ryu.lib import dpid as dpid_lib
simple_switch_instance_name = 'simple_switch_api_app'
url = '/simpleswitch/mactable/{dpid}'
class SimpleSwitchRest13(ryutest.SimpleSwitch13):
_CONTEXTS = { 'wsgi': WSGIApplication }
def __init__(self, *args, **kwargs):
super(SimpleSwitchRest13, self).__init__(*args, **kwargs)
self.switches = {}
wsgi = kwargs['wsgi']
wsgi.register(SimpleSwitchController, {simple_switch_instance_name : self})
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
super(SimpleSwitchRest13, self).switch_features_handler(ev)
datapath = ev.msg.datapath
self.switches[datapath.id] = datapath
self.mac_to_port.setdefault(datapath.id, {})
def set_mac_to_port(self, dpid, entry):
mac_table = self.mac_to_port.setdefault(dpid, {})
datapath = self.switches.get(dpid)
entry_port = entry['port']
entry_mac = entry['mac']
if datapath is not None:
parser = datapath.ofproto_parser
if entry_port not in mac_table.values():
for mac, port in mac_table.items():
# from known device to new device
actions = [parser.OFPActionOutput(entry_port)]
match = parser.OFPMatch(in_port=port, eth_dst=entry_mac)
self.add_flow(datapath, 1, match, actions)
# from new device to known device
actions = [parser.OFPActionOutput(port)]
match = parser.OFPMatch(in_port=entry_port, eth_dst=mac)
self.add_flow(datapath, 1, match, actions)
mac_table.update({entry_mac : entry_port})
return mac_table
class SimpleSwitchController(ControllerBase):
def __init__(self, req, link, data, **config):
super(SimpleSwitchController, self).__init__(req, link, data, **config)
self.simpl_switch_spp = data[simple_switch_instance_name]
@route('simpleswitch', url, methods=['GET'], requirements={'dpid': dpid_lib.DPID_PATTERN})
def list_mac_table(self, req, **kwargs):
simple_switch = self.simpl_switch_spp
dpid = dpid_lib.str_to_dpid(kwargs['dpid'])
if dpid not in simple_switch.mac_to_port:
return Response(status=404)
mac_table = simple_switch.mac_to_port.get(dpid, {})
body = json.dumps(mac_table)
return Response(content_type='application/json', body=body)
@route('simpleswitch', url, methods=['PUT'], requirements={'dpid': dpid_lib.DPID_PATTERN})
def put_mac_table(self, req, **kwargs):
simple_switch = self.simpl_switch_spp
dpid = dpid_lib.str_to_dpid(kwargs['dpid'])
new_entry = eval(req.body)
if dpid not in simple_switch.mac_to_port:
return Response(status=404)
try:
mac_table = simple_switch.set_mac_to_port(dpid, new_entry)
body = json.dumps(mac_table)
return Response(content_type='application/json', body=body)
except Exception as e:
return Response(status=500) | mit |
j00bar/ansible | contrib/inventory/brook.py | 140 | 9670 | #!/usr/bin/env python
# Copyright 2016 Doalitic.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Brook.io external inventory script
==================================
Generates inventory that Ansible can understand by making API requests to Brook.io via the libbrook
library. Hence, such dependency must be installed in the system to run this script.
The default configuration file is named 'brook.ini' and is located alongside this script. You can
choose any other file by setting the BROOK_INI_PATH environment variable.
If param 'project_id' is left blank in 'brook.ini', the inventory includes all the instances in
projects where the requesting user belongs. Otherwise, only instances from the given project are
included, provided the requesting user belongs to it.
The following variables are established for every host. They can be retrieved from the hostvars
dictionary.
- brook_pid: str
- brook_name: str
- brook_description: str
- brook_project: str
- brook_template: str
- brook_region: str
- brook_zone: str
- brook_status: str
- brook_tags: list(str)
- brook_internal_ips: list(str)
- brook_external_ips: list(str)
- brook_created_at
- brook_updated_at
- ansible_ssh_host
Instances are grouped by the following categories:
- tag:
A group is created for each tag. E.g. groups 'tag_foo' and 'tag_bar' are created if there exist
instances with tags 'foo' and/or 'bar'.
- project:
A group is created for each project. E.g. group 'project_test' is created if a project named
'test' exist.
- status:
A group is created for each instance state. E.g. groups 'status_RUNNING' and 'status_PENDING'
are created if there are instances in running and pending state.
Examples:
Execute uname on all instances in project 'test'
$ ansible -i brook.py project_test -m shell -a "/bin/uname -a"
Install nginx on all debian web servers tagged with 'www'
$ ansible -i brook.py tag_www -m apt -a "name=nginx state=present"
Run site.yml playbook on web servers
$ ansible-playbook -i brook.py site.yml -l tag_www
Support:
This script is tested on Python 2.7 and 3.4. It may work on other versions though.
Author: Francisco Ros <fjros@doalitic.com>
Version: 0.2
"""
import sys
import os
try:
from ConfigParser import SafeConfigParser as ConfigParser
except ImportError:
from configparser import ConfigParser
try:
import json
except ImportError:
import simplejson as json
try:
import libbrook
except:
sys.exit('Brook.io inventory script requires libbrook. See https://github.com/doalitic/libbrook')
class BrookInventory:
_API_ENDPOINT = 'https://api.brook.io'
def __init__(self):
self._configure_from_file()
self.client = self.get_api_client()
self.inventory = self.get_inventory()
def _configure_from_file(self):
"""Initialize from .ini file.
Configuration file is assumed to be named 'brook.ini' and to be located on the same
directory than this file, unless the environment variable BROOK_INI_PATH says otherwise.
"""
brook_ini_default_path = \
os.path.join(os.path.dirname(os.path.realpath(__file__)), 'brook.ini')
brook_ini_path = os.environ.get('BROOK_INI_PATH', brook_ini_default_path)
config = ConfigParser(defaults={
'api_token': '',
'project_id': ''
})
config.read(brook_ini_path)
self.api_token = config.get('brook', 'api_token')
self.project_id = config.get('brook', 'project_id')
if not self.api_token:
sys.exit('You must provide (at least) your Brook.io API token to generate the dynamic '
'inventory.')
def get_api_client(self):
"""Authenticate user via the provided credentials and return the corresponding API client.
"""
# Get JWT token from API token
#
unauthenticated_client = libbrook.ApiClient(host=self._API_ENDPOINT)
auth_api = libbrook.AuthApi(unauthenticated_client)
api_token = libbrook.AuthTokenRequest()
api_token.token = self.api_token
jwt = auth_api.auth_token(token=api_token)
# Create authenticated API client
#
return libbrook.ApiClient(host=self._API_ENDPOINT,
header_name='Authorization',
header_value='Bearer %s' % jwt.token)
def get_inventory(self):
"""Generate Ansible inventory.
"""
groups = dict()
meta = dict()
meta['hostvars'] = dict()
instances_api = libbrook.InstancesApi(self.client)
projects_api = libbrook.ProjectsApi(self.client)
templates_api = libbrook.TemplatesApi(self.client)
# If no project is given, get all projects the requesting user has access to
#
if not self.project_id:
projects = [project.id for project in projects_api.index_projects()]
else:
projects = [self.project_id]
# Build inventory from instances in all projects
#
for project_id in projects:
project = projects_api.show_project(project_id=project_id)
for instance in instances_api.index_instances(project_id=project_id):
# Get template used for this instance if known
template = templates_api.show_template(template_id=instance.template) if instance.template else None
# Update hostvars
try:
meta['hostvars'][instance.name] = \
self.hostvars(project, instance, template, instances_api)
except libbrook.rest.ApiException:
continue
# Group by project
project_group = 'project_%s' % project.name
if project_group in groups:
groups[project_group].append(instance.name)
else:
groups[project_group] = [instance.name]
# Group by status
status_group = 'status_%s' % meta['hostvars'][instance.name]['brook_status']
if status_group in groups:
groups[status_group].append(instance.name)
else:
groups[status_group] = [instance.name]
# Group by tags
tags = meta['hostvars'][instance.name]['brook_tags']
for tag in tags:
tag_group = 'tag_%s' % tag
if tag_group in groups:
groups[tag_group].append(instance.name)
else:
groups[tag_group] = [instance.name]
groups['_meta'] = meta
return groups
def hostvars(self, project, instance, template, api):
"""Return the hostvars dictionary for the given instance.
Raise libbrook.rest.ApiException if it cannot retrieve all required information from the
Brook.io API.
"""
hostvars = instance.to_dict()
hostvars['brook_pid'] = hostvars.pop('pid')
hostvars['brook_name'] = hostvars.pop('name')
hostvars['brook_description'] = hostvars.pop('description')
hostvars['brook_project'] = hostvars.pop('project')
hostvars['brook_template'] = hostvars.pop('template')
hostvars['brook_region'] = hostvars.pop('region')
hostvars['brook_zone'] = hostvars.pop('zone')
hostvars['brook_created_at'] = hostvars.pop('created_at')
hostvars['brook_updated_at'] = hostvars.pop('updated_at')
del hostvars['id']
del hostvars['key']
del hostvars['provider']
del hostvars['image']
# Substitute identifiers for names
#
hostvars['brook_project'] = project.name
hostvars['brook_template'] = template.name if template else None
# Retrieve instance state
#
status = api.status_instance(project_id=project.id, instance_id=instance.id)
hostvars.update({'brook_status': status.state})
# Retrieve instance tags
#
tags = api.instance_tags(project_id=project.id, instance_id=instance.id)
hostvars.update({'brook_tags': tags})
# Retrieve instance addresses
#
addresses = api.instance_addresses(project_id=project.id, instance_id=instance.id)
internal_ips = [address.address for address in addresses if address.scope == 'internal']
external_ips = [address.address for address in addresses
if address.address and address.scope == 'external']
hostvars.update({'brook_internal_ips': internal_ips})
hostvars.update({'brook_external_ips': external_ips})
try:
hostvars.update({'ansible_ssh_host': external_ips[0]})
except IndexError:
raise libbrook.rest.ApiException(status='502', reason='Instance without public IP')
return hostvars
# Run the script
#
brook = BrookInventory()
print(json.dumps(brook.inventory))
| gpl-3.0 |
anti-social/elasticmagic | tests_integ/conftest.py | 1 | 1312 | import uuid
import pytest
from elasticmagic import Document, Field
from elasticmagic.types import Text
def pytest_addoption(parser):
parser.addoption("--es-url", action="store", default="localhost:9200")
def pytest_generate_tests(metafunc):
# This is called for every test. Only get/set command line arguments
# if the argument is specified in the list of test "fixturenames".
option_value = metafunc.config.option.es_url
if 'es_url' in metafunc.fixturenames:
metafunc.parametrize("es_url", [option_value])
class Car(Document):
__doc_type__ = 'car'
name = Field(Text())
@pytest.fixture
def index_name():
return 'test-{}'.format(str(uuid.uuid4()).split('-')[0])
@pytest.fixture
def car_docs():
yield [
Car(_id=1, name='Lightning McQueen'),
Car(_id=2, name='Sally Carerra'),
]
@pytest.fixture
def all_car_docs():
yield [
Car(_id=1, name='Lightning McQueen'),
Car(_id=2, name='Sally Carerra'),
Car(_id=3, name='Doc Hudson'),
Car(_id=4, name='Ramone'),
Car(_id=5, name='Luigi'),
Car(_id=6, name='Guido'),
Car(_id=7, name='Flo'),
Car(_id=8, name='Sarge'),
Car(_id=9, name='Sheriff'),
Car(_id=10, name='Fillmore'),
Car(_id=11, name='Mack'),
]
| apache-2.0 |
rjschof/gem5 | src/arch/x86/isa/insts/x87/arithmetic/division.py | 41 | 2926 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop FDIV1_R
{
divfp st(0), st(0), sti
};
def macroop FDIV1_M
{
ldfp87 ufp1, seg, sib, disp
divfp st(0), st(0), ufp1
};
def macroop FDIV1_P
{
rdip t7
ldfp87 ufp1, seg, riprel, disp
divfp st(0), st(0), ufp1
};
def macroop FDIV2_R
{
divfp sti, sti, st(0)
};
def macroop FDIV2_M
{
ldfp87 ufp1, seg, sib, disp
divfp st(0), st(0), ufp1
};
def macroop FDIV2_P
{
rdip t7
ldfp87 ufp1, seg, riprel, disp
divfp st(0), st(0), ufp1
};
def macroop FDIVP
{
divfp st(1), st(1), st(0), spm=1
};
def macroop FDIVP_R
{
divfp sti, sti, st(0), spm=1
};
def macroop FDIVP_M
{
fault "std::make_shared<UnimpInstFault>()"
};
def macroop FDIVP_P
{
fault "std::make_shared<UnimpInstFault>()"
};
# FIDIV
# FDIVR
# FDIVRP
# FIDIVR
'''
| bsd-3-clause |
dookyoonhan/mean1 | node_modules/meanio/node_modules/npm/node_modules/node-gyp/gyp/PRESUBMIT.py | 496 | 3373 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
PYLINT_BLACKLIST = [
# TODO: fix me.
# From SCons, not done in google style.
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
# Needs style fix.
'pylib/gyp/generator/xcode.py',
]
PYLINT_DISABLED_WARNINGS = [
# TODO: fix me.
# Many tests include modules they don't use.
'W0611',
# Include order doesn't properly include local files?
'F0401',
# Some use of built-in names.
'W0622',
# Some unused variables.
'W0612',
# Operator not preceded/followed by space.
'C0323',
'C0322',
# Unnecessary semicolon.
'W0301',
# Unused argument.
'W0613',
# String has no effect (docstring in wrong place).
'W0105',
# Comma not followed by space.
'C0324',
# Access to a protected member.
'W0212',
# Bad indent.
'W0311',
# Line too long.
'C0301',
# Undefined variable.
'E0602',
# Not exception type specified.
'W0702',
# No member of that name.
'E1101',
# Dangerous default {}.
'W0102',
# Others, too many to sort.
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
'R0201', 'E0101', 'C0321',
# ************* Module copy
# W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
'W0104',
]
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
# Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
# The (c) is deprecated, but tolerate it until it's removed from all files.
license = (
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': years_re,
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import os
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
blacklist = PYLINT_BLACKLIST
if sys.platform == 'win32':
blacklist = [os.path.normpath(x).replace('\\', '\\\\')
for x in PYLINT_BLACKLIST]
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=blacklist,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally:
sys.path = old_sys_path
return report
def GetPreferredTrySlaves():
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android']
| mit |
melvinw/pktgen | controller/job_pb2.py | 1 | 9157 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: job.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='job.proto',
package='',
serialized_pb=_b('\n\tjob.proto\"\xc5\x02\n\x03Job\x12\x0f\n\x07tx_rate\x18\x01 \x01(\x05\x12\x10\n\x08\x64uration\x18\x02 \x01(\x05\x12\x0e\n\x06warmup\x18\x03 \x01(\x05\x12\x11\n\tnum_flows\x18\x04 \x01(\x05\x12\x10\n\x08port_min\x18\x05 \x01(\x05\x12\x10\n\x08port_max\x18\x06 \x01(\x05\x12\x10\n\x08size_min\x18\x07 \x01(\x05\x12\x10\n\x08size_max\x18\x08 \x01(\x05\x12\x10\n\x08life_min\x18\t \x01(\x02\x12\x10\n\x08life_max\x18\n \x01(\x02\x12\x11\n\trandomize\x18\x0b \x01(\x08\x12\x0f\n\x07latency\x18\x0c \x01(\x08\x12\x0e\n\x06online\x18\r \x01(\x08\x12\x0c\n\x04stop\x18\x0e \x01(\x08\x12\r\n\x05print\x18\x0f \x01(\x08\x12\x0b\n\x03tcp\x18\x10 \x01(\x08\x12\x0f\n\x07src_mac\x18\x11 \x01(\t\x12\x0f\n\x07\x64st_mac\x18\x12 \x01(\t\x12\x0c\n\x04port\x18\x13 \x01(\t\"\x1d\n\x07Request\x12\x12\n\x04jobs\x18\x01 \x03(\x0b\x32\x04.Job')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_JOB = _descriptor.Descriptor(
name='Job',
full_name='Job',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tx_rate', full_name='Job.tx_rate', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='duration', full_name='Job.duration', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='warmup', full_name='Job.warmup', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='num_flows', full_name='Job.num_flows', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port_min', full_name='Job.port_min', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port_max', full_name='Job.port_max', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size_min', full_name='Job.size_min', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size_max', full_name='Job.size_max', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='life_min', full_name='Job.life_min', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='life_max', full_name='Job.life_max', index=9,
number=10, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='randomize', full_name='Job.randomize', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='latency', full_name='Job.latency', index=11,
number=12, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='online', full_name='Job.online', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='stop', full_name='Job.stop', index=13,
number=14, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='print', full_name='Job.print', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='tcp', full_name='Job.tcp', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='src_mac', full_name='Job.src_mac', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='dst_mac', full_name='Job.dst_mac', index=17,
number=18, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='port', full_name='Job.port', index=18,
number=19, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=14,
serialized_end=339,
)
_REQUEST = _descriptor.Descriptor(
name='Request',
full_name='Request',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='jobs', full_name='Request.jobs', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=341,
serialized_end=370,
)
_REQUEST.fields_by_name['jobs'].message_type = _JOB
DESCRIPTOR.message_types_by_name['Job'] = _JOB
DESCRIPTOR.message_types_by_name['Request'] = _REQUEST
Job = _reflection.GeneratedProtocolMessageType('Job', (_message.Message,), dict(
DESCRIPTOR = _JOB,
__module__ = 'job_pb2'
# @@protoc_insertion_point(class_scope:Job)
))
_sym_db.RegisterMessage(Job)
Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), dict(
DESCRIPTOR = _REQUEST,
__module__ = 'job_pb2'
# @@protoc_insertion_point(class_scope:Request)
))
_sym_db.RegisterMessage(Request)
# @@protoc_insertion_point(module_scope)
| bsd-3-clause |
syphar/django | django/contrib/gis/db/backends/base/models.py | 67 | 4111 | from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundant code.
"""
@property
def srs(self):
"""
Returns a GDAL SpatialReference object.
"""
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as e:
msg = e
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as e:
msg = e
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
return self.srs.ellipsoid
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
return self.srs.projected
@property
def local(self):
"Is this Spatial Reference local?"
return self.srs.local
@property
def geographic(self):
"Is this Spatial Reference geographic?"
return self.srs.geographic
@property
def linear_name(self):
"Returns the linear units name."
return self.srs.linear_name
@property
def linear_units(self):
"Returns the linear units."
return self.srs.linear_units
@property
def angular_name(self):
"Returns the name of the angular units."
return self.srs.angular_name
@property
def angular_units(self):
"Returns the angular units."
return self.srs.angular_units
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Return a tuple of (unit_value, unit_name) for the given WKT without
using any of the database fields.
"""
return gdal.SpatialReference(wkt).units
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation, a 'pretty' OGC WKT.
"""
return six.text_type(self.srs)
| bsd-3-clause |
mlhenderson/data_api | lib/doekbase/data_api/tests/test_assembly_api.py | 4 | 12967 | """
Unit tests for assembly
"""
import logging
from unittest import skipUnless
from . import shared
from doekbase.data_api import exceptions
from doekbase.data_api.sequence.assembly.api import AssemblyAPI
from doekbase.data_api.sequence.assembly.api import AssemblyClientAPI
from doekbase.data_api.sequence.assembly.api import _KBaseGenomes_ContigSet
from doekbase.data_api.sequence.assembly.api import _Assembly
_log = logging.getLogger(__name__)
assembly_new = "ReferenceGenomeAnnotations/kb|g.166819_assembly"
assembly_old = "OriginalReferenceGenomes/kb|g.166819.contigset"
t_new = None
t_new_e = None
t_old = None
t_old_e = None
t_client_old = None
t_client_new = None
g_skip_shock = False
def setup():
shared.setup()
global t_new, t_old, t_new_e, t_old_e, t_client_new, t_client_old, g_skip_shock
g_skip_shock = not shared.services["shock_service_url"].startswith("http")
t_new = AssemblyAPI(shared.services, shared.token, assembly_new)
t_new_e = _Assembly(shared.services, shared.token, assembly_new)
t_old = AssemblyAPI(shared.services, shared.token, assembly_old)
t_old_e = _KBaseGenomes_ContigSet(shared.services, shared.token, assembly_old)
t_client_new = AssemblyClientAPI(shared.services["assembly_service_url"], shared.token, assembly_new)
t_client_old = AssemblyClientAPI(shared.services["assembly_service_url"], shared.token, assembly_old)
###### New Assembly Type tests
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_assembly_id_new():
_log.info("Input {}".format(assembly_new))
assembly_id = t_new.get_assembly_id()
_log.info("Output {}".format(assembly_id))
assert isinstance(assembly_id, basestring) and len(assembly_id) > 0
assembly_id_e = t_new_e.get_assembly_id()
assert isinstance(assembly_id_e, basestring) and len(assembly_id_e) > 0
assembly_id_c = t_client_new.get_assembly_id()
assert isinstance(assembly_id_c, basestring) and len(assembly_id_c) > 0
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_genome_annotations_new():
_log.info("Input {}".format(assembly_new))
annotations = t_new.get_genome_annotations()
_log.info("Output {}".format(annotations))
assert isinstance(annotations, list)
annotations_e = t_new_e.get_genome_annotations()
assert isinstance(annotations_e, list)
assert annotations == annotations_e
annotations_c = t_client_new.get_genome_annotations()
assert isinstance(annotations_c, list)
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_external_source_info_new():
_log.info("Input {}".format(assembly_new))
info = t_new.get_external_source_info()
_log.info("Output {}".format(info))
assert isinstance(info, dict)
info_e = t_new_e.get_external_source_info()
assert isinstance(info_e, dict) and info == info_e
info_c = t_client_new.get_external_source_info()
assert isinstance(info_c, dict) and info == info_c
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_stats_new():
_log.info("Input {}".format(assembly_new))
stats = t_new.get_stats()
_log.info("Output {}".format(stats))
assert isinstance(stats, dict)
stats_e = t_new_e.get_stats()
assert isinstance(stats_e, dict) and stats == stats_e
stats_c = t_client_new.get_stats()
assert isinstance(stats_c, dict) and stats == stats_c
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_number_contigs_new():
_log.info("Input {}".format(assembly_new))
count = t_new.get_number_contigs()
_log.info("Output {}".format(count))
assert isinstance(count, int) and count > 0
count_e = t_new_e.get_number_contigs()
assert isinstance(count_e, int) and count_e > 0 and count == count_e
count_c = t_client_new.get_number_contigs()
assert isinstance(count_c, int) and count_c > 0 and count == count_c
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_gc_content_new():
_log.info("Input {}".format(assembly_new))
gc_content = t_new.get_gc_content()
_log.info("Output {}".format(gc_content))
assert isinstance(gc_content, float) and gc_content > 0.1
gc_content_e = t_new_e.get_gc_content()
assert isinstance(gc_content_e, float) and gc_content_e > 0.1
gc_content_c = t_client_new.get_gc_content()
assert isinstance(gc_content_c, float) and gc_content_c > 0.1
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_dna_size_new():
_log.info("Input {}".format(assembly_new))
dna_size = t_new.get_dna_size()
_log.info("Output {}".format(dna_size))
assert isinstance(dna_size, int) and dna_size > 0
dna_size_e = t_new_e.get_dna_size()
assert isinstance(dna_size_e, int) and dna_size > 0 and dna_size == dna_size_e
dna_size_c = t_client_new.get_dna_size()
assert isinstance(dna_size_c, int) and dna_size > 0 and dna_size == dna_size_c
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_contig_lengths_new():
_log.info("Input {}".format(assembly_new))
contig_lengths = t_new.get_contig_lengths()
_log.info("Output {}".format(len(contig_lengths)))
assert isinstance(contig_lengths, dict) and len(contig_lengths) > 0
contig_lengths_e = t_new_e.get_contig_lengths()
assert isinstance(contig_lengths_e, dict) and len(contig_lengths_e) > 0
contig_lengths_c = t_client_new.get_contig_lengths()
assert isinstance(contig_lengths_c, dict) and len(contig_lengths_c) > 0
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_contig_gc_content_new():
_log.info("Input {}".format(assembly_new))
contig_gc_content = t_new.get_contig_gc_content()
_log.info("Output {}".format(len(contig_gc_content)))
assert isinstance(contig_gc_content, dict) and len(contig_gc_content) > 0
contig_gc_content_e = t_new_e.get_contig_gc_content()
assert isinstance(contig_gc_content_e, dict) and len(contig_gc_content_e) > 0
contig_gc_content_c = t_client_new.get_contig_gc_content()
assert isinstance(contig_gc_content_c, dict) and len(contig_gc_content_c) > 0
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_contig_ids_new():
_log.info("Input {}".format(assembly_new))
contig_ids = t_new.get_contig_ids()
_log.info("Output {}".format(len(contig_ids)))
assert isinstance(contig_ids, list) and len(contig_ids) > 0
contig_ids_e = t_new_e.get_contig_ids()
assert isinstance(contig_ids_e, list) and len(contig_ids_e) > 0
contig_ids_c = t_client_new.get_contig_ids()
assert isinstance(contig_ids_c, list) and len(contig_ids_c) > 0
@skipUnless(shared.can_connect and not g_skip_shock, 'Cannot connect to workspace')
def test_get_contigs_new():
_log.info("Input {}".format(assembly_new))
contigs = t_new.get_contigs()
_log.info("Output {}".format(len(contigs)))
assert isinstance(contigs, dict) and len(contigs) > 0
contigs_e = t_new_e.get_contigs()
assert isinstance(contigs_e, dict) and len(contigs_e) > 0
contigs_c = t_client_new.get_contigs()
assert isinstance(contigs_c, dict) and len(contigs_c) > 0
###### Old Assembly Type tests
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_assembly_id_old():
_log.info("Input {}".format(assembly_old))
assembly_id = t_old.get_assembly_id()
_log.info("Output {}".format(assembly_id))
assert isinstance(assembly_id, basestring) and len(assembly_id) > 0
assembly_id_e = t_old_e.get_assembly_id()
assert isinstance(assembly_id_e, basestring) and len(assembly_id_e) > 0 and assembly_id == assembly_id_e
assembly_id_c = t_client_old.get_assembly_id()
assert isinstance(assembly_id_c, basestring) and len(assembly_id_c) > 0 and assembly_id == assembly_id_c
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_genome_annotations_old():
_log.info("Input {}".format(assembly_old))
annotations = t_old.get_genome_annotations()
_log.info("Output {}".format(annotations))
assert isinstance(annotations, list)
annotations_e = t_old_e.get_genome_annotations()
assert isinstance(annotations_e, list)
annotations_c = t_client_old.get_genome_annotations()
assert isinstance(annotations_c, list)
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_external_source_info_old():
_log.info("Input {}".format(assembly_old))
info = t_old.get_external_source_info()
_log.info("Output {}".format(info))
assert isinstance(info, dict)
info_e = t_old_e.get_external_source_info()
assert isinstance(info_e, dict)
info_c = t_client_old.get_external_source_info()
assert isinstance(info_c, dict)
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_stats_old():
_log.info("Input {}".format(assembly_old))
stats = t_old.get_stats()
_log.info("Output {}".format(stats))
assert isinstance(stats, dict)
stats_e = t_old_e.get_stats()
assert isinstance(stats_e, dict)
stats_c = t_client_old.get_stats()
assert isinstance(stats_c, dict)
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_number_contigs_old():
_log.info("Input {}".format(assembly_old))
count = t_old.get_number_contigs()
_log.info("Output {}".format(count))
assert isinstance(count, int) and count > 0
count_e = t_old_e.get_number_contigs()
assert isinstance(count_e, int) and count_e > 0 and count == count_e
count_c = t_client_old.get_number_contigs()
assert isinstance(count_c, int) and count_c > 0 and count == count_c
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_gc_content_old():
_log.info("Input {}".format(assembly_old))
gc_content = t_old.get_gc_content()
_log.info("Output {}".format(gc_content))
assert isinstance(gc_content, float) and gc_content > 0.1
gc_content_e = t_old_e.get_gc_content()
assert isinstance(gc_content_e, float) and gc_content_e > 0.1
gc_content_c = t_client_old.get_gc_content()
assert isinstance(gc_content_c, float) and gc_content_c > 0.1
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_dna_size_old():
_log.info("Input {}".format(assembly_old))
dna_size = t_old.get_dna_size()
_log.info("Output {}".format(dna_size))
assert isinstance(dna_size, int) and dna_size > 0
dna_size_e = t_old_e.get_dna_size()
assert isinstance(dna_size_e, int) and dna_size_e > 0 and dna_size == dna_size_e
dna_size_c = t_client_old.get_dna_size()
assert isinstance(dna_size_c, int) and dna_size_c > 0 and dna_size == dna_size_c
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_contig_lengths_old():
_log.info("Input {}".format(assembly_old))
contig_lengths = t_old.get_contig_lengths()
_log.info("Output {}".format(len(contig_lengths)))
assert isinstance(contig_lengths, dict) and len(contig_lengths) > 0
contig_lengths_e = t_old_e.get_contig_lengths()
assert isinstance(contig_lengths_e, dict) and len(contig_lengths_e) > 0
contig_lengths_c = t_client_old.get_contig_lengths()
assert isinstance(contig_lengths_c, dict) and len(contig_lengths_c) > 0
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_contig_gc_content_old():
_log.info("Input {}".format(assembly_old))
contig_gc_content = t_old.get_contig_gc_content()
_log.info("Output {}".format(len(contig_gc_content)))
assert isinstance(contig_gc_content, dict) and len(contig_gc_content) > 0
contig_gc_content_e = t_old_e.get_contig_gc_content()
assert isinstance(contig_gc_content_e, dict) and len(contig_gc_content_e) > 0
contig_gc_content_c = t_client_old.get_contig_gc_content()
assert isinstance(contig_gc_content_c, dict) and len(contig_gc_content_c) > 0
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_contig_ids_old():
_log.info("Input {}".format(assembly_old))
contig_ids = t_old.get_contig_ids()
_log.info("Output {}".format(len(contig_ids)))
assert isinstance(contig_ids, list) and len(contig_ids) > 0
contig_ids_e = t_old_e.get_contig_ids()
assert isinstance(contig_ids_e, list) and len(contig_ids_e) > 0
contig_ids_c = t_client_old.get_contig_ids()
assert isinstance(contig_ids_c, list) and len(contig_ids_c) > 0
@skipUnless(shared.can_connect, 'Cannot connect to workspace')
def test_get_contigs_old():
_log.info("Input {}".format(assembly_old))
contigs = t_old.get_contigs()
_log.info("Output {}".format(len(contigs)))
assert isinstance(contigs, dict) and len(contigs) > 0
contigs_e = t_old_e.get_contigs()
assert isinstance(contigs_e, dict) and len(contigs_e) > 0
contigs_c = t_client_old.get_contigs()
assert isinstance(contigs_c, dict) and len(contigs_c) > 0 | mit |
likaiwalkman/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/flakytestreporter.py | 116 | 10213 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import codecs
import logging
import os.path
from webkitpy.common.net.layouttestresults import path_for_layout_test, LayoutTestResults
from webkitpy.common.config import urls
from webkitpy.tool.bot.botinfo import BotInfo
from webkitpy.tool.grammar import plural, pluralize, join_with_separators
_log = logging.getLogger(__name__)
class FlakyTestReporter(object):
def __init__(self, tool, bot_name):
self._tool = tool
self._bot_name = bot_name
# FIXME: Use the real port object
self._bot_info = BotInfo(tool, tool.deprecated_port().name())
def _author_emails_for_test(self, flaky_test):
test_path = path_for_layout_test(flaky_test)
commit_infos = self._tool.checkout().recent_commit_infos_for_files([test_path])
# This ignores authors which are not committers because we don't have their bugzilla_email.
return set([commit_info.author().bugzilla_email() for commit_info in commit_infos if commit_info.author()])
def _bugzilla_email(self):
# FIXME: This is kinda a funny way to get the bugzilla email,
# we could also just create a Credentials object directly
# but some of the Credentials logic is in bugzilla.py too...
self._tool.bugs.authenticate()
return self._tool.bugs.username
# FIXME: This should move into common.config
_bot_emails = set([
"commit-queue@webkit.org", # commit-queue
"eseidel@chromium.org", # old commit-queue
"webkit.review.bot@gmail.com", # style-queue, sheriff-bot, CrLx/Gtk EWS
"buildbot@hotmail.com", # Win EWS
# Mac EWS currently uses eric@webkit.org, but that's not normally a bot
])
def _lookup_bug_for_flaky_test(self, flaky_test):
bugs = self._tool.bugs.queries.fetch_bugs_matching_search(search_string=flaky_test)
if not bugs:
return None
# Match any bugs which are from known bots or the email this bot is using.
allowed_emails = self._bot_emails | set([self._bugzilla_email])
bugs = filter(lambda bug: bug.reporter_email() in allowed_emails, bugs)
if not bugs:
return None
if len(bugs) > 1:
# FIXME: There are probably heuristics we could use for finding
# the right bug instead of the first, like open vs. closed.
_log.warn("Found %s %s matching '%s' filed by a bot, using the first." % (pluralize('bug', len(bugs)), [bug.id() for bug in bugs], flaky_test))
return bugs[0]
def _view_source_url_for_test(self, test_path):
return urls.view_source_url("LayoutTests/%s" % test_path)
def _create_bug_for_flaky_test(self, flaky_test, author_emails, latest_flake_message):
format_values = {
'test': flaky_test,
'authors': join_with_separators(sorted(author_emails)),
'flake_message': latest_flake_message,
'test_url': self._view_source_url_for_test(flaky_test),
'bot_name': self._bot_name,
}
title = "Flaky Test: %(test)s" % format_values
description = """This is an automatically generated bug from the %(bot_name)s.
%(test)s has been flaky on the %(bot_name)s.
%(test)s was authored by %(authors)s.
%(test_url)s
%(flake_message)s
The bots will update this with information from each new failure.
If you believe this bug to be fixed or invalid, feel free to close. The bots will re-open if the flake re-occurs.
If you would like to track this test fix with another bug, please close this bug as a duplicate. The bots will follow the duplicate chain when making future comments.
""" % format_values
master_flake_bug = 50856 # MASTER: Flaky tests found by the commit-queue
return self._tool.bugs.create_bug(title, description,
component="Tools / Tests",
cc=",".join(author_emails),
blocked="50856")
# This is over-engineered, but it makes for pretty bug messages.
def _optional_author_string(self, author_emails):
if not author_emails:
return ""
heading_string = plural('author') if len(author_emails) > 1 else 'author'
authors_string = join_with_separators(sorted(author_emails))
return " (%s: %s)" % (heading_string, authors_string)
def _latest_flake_message(self, flaky_result, patch):
failure_messages = [failure.message() for failure in flaky_result.failures]
flake_message = "The %s just saw %s flake (%s) while processing attachment %s on bug %s." % (self._bot_name, flaky_result.test_name, ", ".join(failure_messages), patch.id(), patch.bug_id())
return "%s\n%s" % (flake_message, self._bot_info.summary_text())
def _results_diff_path_for_test(self, test_path):
# FIXME: This is a big hack. We should get this path from results.json
# except that old-run-webkit-tests doesn't produce a results.json
# so we just guess at the file path.
(test_path_root, _) = os.path.splitext(test_path)
return "%s-diffs.txt" % test_path_root
def _follow_duplicate_chain(self, bug):
while bug.is_closed() and bug.duplicate_of():
bug = self._tool.bugs.fetch_bug(bug.duplicate_of())
return bug
def _update_bug_for_flaky_test(self, bug, latest_flake_message):
self._tool.bugs.post_comment_to_bug(bug.id(), latest_flake_message)
# This method is needed because our archive paths include a leading tmp/layout-test-results
def _find_in_archive(self, path, archive):
for archived_path in archive.namelist():
# Archives are currently created with full paths.
if archived_path.endswith(path):
return archived_path
return None
def _attach_failure_diff(self, flake_bug_id, flaky_test, results_archive_zip):
results_diff_path = self._results_diff_path_for_test(flaky_test)
# Check to make sure that the path makes sense.
# Since we're not actually getting this path from the results.html
# there is a chance it's wrong.
bot_id = self._tool.status_server.bot_id or "bot"
archive_path = self._find_in_archive(results_diff_path, results_archive_zip)
if archive_path:
results_diff = results_archive_zip.read(archive_path)
description = "Failure diff from %s" % bot_id
self._tool.bugs.add_attachment_to_bug(flake_bug_id, results_diff, description, filename="failure.diff")
else:
_log.warn("%s does not exist in results archive, uploading entire archive." % results_diff_path)
description = "Archive of layout-test-results from %s" % bot_id
# results_archive is a ZipFile object, grab the File object (.fp) to pass to Mechanize for uploading.
results_archive_file = results_archive_zip.fp
# Rewind the file object to start (since Mechanize won't do that automatically)
# See https://bugs.webkit.org/show_bug.cgi?id=54593
results_archive_file.seek(0)
self._tool.bugs.add_attachment_to_bug(flake_bug_id, results_archive_file, description, filename="layout-test-results.zip")
def report_flaky_tests(self, patch, flaky_test_results, results_archive):
message = "The %s encountered the following flaky tests while processing attachment %s:\n\n" % (self._bot_name, patch.id())
for flaky_result in flaky_test_results:
flaky_test = flaky_result.test_name
bug = self._lookup_bug_for_flaky_test(flaky_test)
latest_flake_message = self._latest_flake_message(flaky_result, patch)
author_emails = self._author_emails_for_test(flaky_test)
if not bug:
_log.info("Bug does not already exist for %s, creating." % flaky_test)
flake_bug_id = self._create_bug_for_flaky_test(flaky_test, author_emails, latest_flake_message)
else:
bug = self._follow_duplicate_chain(bug)
# FIXME: Ideally we'd only make one comment per flake, not two. But that's not possible
# in all cases (e.g. when reopening), so for now file attachment and comment are separate.
self._update_bug_for_flaky_test(bug, latest_flake_message)
flake_bug_id = bug.id()
self._attach_failure_diff(flake_bug_id, flaky_test, results_archive)
message += "%s bug %s%s\n" % (flaky_test, flake_bug_id, self._optional_author_string(author_emails))
message += "The %s is continuing to process your patch." % self._bot_name
self._tool.bugs.post_comment_to_bug(patch.bug_id(), message)
| bsd-3-clause |
rembo10/headphones | lib/unidecode/x0a0.py | 253 | 4428 | data = (
'it', # 0x00
'ix', # 0x01
'i', # 0x02
'ip', # 0x03
'iet', # 0x04
'iex', # 0x05
'ie', # 0x06
'iep', # 0x07
'at', # 0x08
'ax', # 0x09
'a', # 0x0a
'ap', # 0x0b
'uox', # 0x0c
'uo', # 0x0d
'uop', # 0x0e
'ot', # 0x0f
'ox', # 0x10
'o', # 0x11
'op', # 0x12
'ex', # 0x13
'e', # 0x14
'wu', # 0x15
'bit', # 0x16
'bix', # 0x17
'bi', # 0x18
'bip', # 0x19
'biet', # 0x1a
'biex', # 0x1b
'bie', # 0x1c
'biep', # 0x1d
'bat', # 0x1e
'bax', # 0x1f
'ba', # 0x20
'bap', # 0x21
'buox', # 0x22
'buo', # 0x23
'buop', # 0x24
'bot', # 0x25
'box', # 0x26
'bo', # 0x27
'bop', # 0x28
'bex', # 0x29
'be', # 0x2a
'bep', # 0x2b
'but', # 0x2c
'bux', # 0x2d
'bu', # 0x2e
'bup', # 0x2f
'burx', # 0x30
'bur', # 0x31
'byt', # 0x32
'byx', # 0x33
'by', # 0x34
'byp', # 0x35
'byrx', # 0x36
'byr', # 0x37
'pit', # 0x38
'pix', # 0x39
'pi', # 0x3a
'pip', # 0x3b
'piex', # 0x3c
'pie', # 0x3d
'piep', # 0x3e
'pat', # 0x3f
'pax', # 0x40
'pa', # 0x41
'pap', # 0x42
'puox', # 0x43
'puo', # 0x44
'puop', # 0x45
'pot', # 0x46
'pox', # 0x47
'po', # 0x48
'pop', # 0x49
'put', # 0x4a
'pux', # 0x4b
'pu', # 0x4c
'pup', # 0x4d
'purx', # 0x4e
'pur', # 0x4f
'pyt', # 0x50
'pyx', # 0x51
'py', # 0x52
'pyp', # 0x53
'pyrx', # 0x54
'pyr', # 0x55
'bbit', # 0x56
'bbix', # 0x57
'bbi', # 0x58
'bbip', # 0x59
'bbiet', # 0x5a
'bbiex', # 0x5b
'bbie', # 0x5c
'bbiep', # 0x5d
'bbat', # 0x5e
'bbax', # 0x5f
'bba', # 0x60
'bbap', # 0x61
'bbuox', # 0x62
'bbuo', # 0x63
'bbuop', # 0x64
'bbot', # 0x65
'bbox', # 0x66
'bbo', # 0x67
'bbop', # 0x68
'bbex', # 0x69
'bbe', # 0x6a
'bbep', # 0x6b
'bbut', # 0x6c
'bbux', # 0x6d
'bbu', # 0x6e
'bbup', # 0x6f
'bburx', # 0x70
'bbur', # 0x71
'bbyt', # 0x72
'bbyx', # 0x73
'bby', # 0x74
'bbyp', # 0x75
'nbit', # 0x76
'nbix', # 0x77
'nbi', # 0x78
'nbip', # 0x79
'nbiex', # 0x7a
'nbie', # 0x7b
'nbiep', # 0x7c
'nbat', # 0x7d
'nbax', # 0x7e
'nba', # 0x7f
'nbap', # 0x80
'nbot', # 0x81
'nbox', # 0x82
'nbo', # 0x83
'nbop', # 0x84
'nbut', # 0x85
'nbux', # 0x86
'nbu', # 0x87
'nbup', # 0x88
'nburx', # 0x89
'nbur', # 0x8a
'nbyt', # 0x8b
'nbyx', # 0x8c
'nby', # 0x8d
'nbyp', # 0x8e
'nbyrx', # 0x8f
'nbyr', # 0x90
'hmit', # 0x91
'hmix', # 0x92
'hmi', # 0x93
'hmip', # 0x94
'hmiex', # 0x95
'hmie', # 0x96
'hmiep', # 0x97
'hmat', # 0x98
'hmax', # 0x99
'hma', # 0x9a
'hmap', # 0x9b
'hmuox', # 0x9c
'hmuo', # 0x9d
'hmuop', # 0x9e
'hmot', # 0x9f
'hmox', # 0xa0
'hmo', # 0xa1
'hmop', # 0xa2
'hmut', # 0xa3
'hmux', # 0xa4
'hmu', # 0xa5
'hmup', # 0xa6
'hmurx', # 0xa7
'hmur', # 0xa8
'hmyx', # 0xa9
'hmy', # 0xaa
'hmyp', # 0xab
'hmyrx', # 0xac
'hmyr', # 0xad
'mit', # 0xae
'mix', # 0xaf
'mi', # 0xb0
'mip', # 0xb1
'miex', # 0xb2
'mie', # 0xb3
'miep', # 0xb4
'mat', # 0xb5
'max', # 0xb6
'ma', # 0xb7
'map', # 0xb8
'muot', # 0xb9
'muox', # 0xba
'muo', # 0xbb
'muop', # 0xbc
'mot', # 0xbd
'mox', # 0xbe
'mo', # 0xbf
'mop', # 0xc0
'mex', # 0xc1
'me', # 0xc2
'mut', # 0xc3
'mux', # 0xc4
'mu', # 0xc5
'mup', # 0xc6
'murx', # 0xc7
'mur', # 0xc8
'myt', # 0xc9
'myx', # 0xca
'my', # 0xcb
'myp', # 0xcc
'fit', # 0xcd
'fix', # 0xce
'fi', # 0xcf
'fip', # 0xd0
'fat', # 0xd1
'fax', # 0xd2
'fa', # 0xd3
'fap', # 0xd4
'fox', # 0xd5
'fo', # 0xd6
'fop', # 0xd7
'fut', # 0xd8
'fux', # 0xd9
'fu', # 0xda
'fup', # 0xdb
'furx', # 0xdc
'fur', # 0xdd
'fyt', # 0xde
'fyx', # 0xdf
'fy', # 0xe0
'fyp', # 0xe1
'vit', # 0xe2
'vix', # 0xe3
'vi', # 0xe4
'vip', # 0xe5
'viet', # 0xe6
'viex', # 0xe7
'vie', # 0xe8
'viep', # 0xe9
'vat', # 0xea
'vax', # 0xeb
'va', # 0xec
'vap', # 0xed
'vot', # 0xee
'vox', # 0xef
'vo', # 0xf0
'vop', # 0xf1
'vex', # 0xf2
'vep', # 0xf3
'vut', # 0xf4
'vux', # 0xf5
'vu', # 0xf6
'vup', # 0xf7
'vurx', # 0xf8
'vur', # 0xf9
'vyt', # 0xfa
'vyx', # 0xfb
'vy', # 0xfc
'vyp', # 0xfd
'vyrx', # 0xfe
'vyr', # 0xff
)
| gpl-3.0 |
lerker/cupydle | cupydle/dnn/viejo/Neurons.py | 1 | 3537 | import numpy as np
__author__ = "Nelson Ponzoni"
__copyright__ = "Copyright 2015-2016, Proyecto Final de Carrera"
__credits__ = ["Nelson Ponzoni"]
__license__ = "GPL"
__version__ = "20160101"
__maintainer__ = "Nelson Ponzoni"
__email__ = "npcuadra@gmail.com"
__status__ = "Production"
"""
Neurons class, this is abstraction of various neurons, a pack of neurons, that compose a neural layer
"""
class Neurons(object):
def __init__(self, mat, shape):
if len(shape) == 1:
shape += (1,)
if isinstance(mat, list):
mat = np.array(mat)
# la matriz debe tener la forma deseada
self.matrix = mat.reshape(shape)
self.rows = shape[0]
self.cols = shape[1]
# propiedades intrisecas de las neuronas
@property
def shape(self):
return self.rows, self.cols
@property
def count(self):
rows, cols = self.shape
return rows * cols
def __str__(self):
return str(self.matrix)
def __mul__(self, other):
if isinstance(other, Neurons):
other = other.matrix
return Neurons(self.matrix * other, self.shape)
def __div__(self, other):
if isinstance(other, Neurons):
other = other.matrix
return Neurons(self.matrix / other, self.shape)
def __sub__(self, other):
if isinstance(other, Neurons):
other = other.matrix
return Neurons(self.matrix - other, self.shape)
def __add__(self, other):
if isinstance(other, Neurons):
other = other.matrix
return Neurons(self.matrix + other, self.shape)
def __pow__(self, power):
return Neurons(self.matrix ** power, self.shape)
# opraciones basicas
def mul_elemwise(self, array):
if isinstance(array, Neurons):
array = array.matrix
return Neurons(np.multiply(self.matrix, array), self.shape)
def mul_array(self, array):
if isinstance(array, Neurons):
array = array.matrix
arrshape = array.shape
if len(arrshape) == 1:
arrshape += (1,) # Le agrego la dimension que le falta
shape = self.rows, arrshape[1]
return Neurons(self.matrix.dot(array), shape)
def sum_array(self, array):
if isinstance(array, Neurons):
array = array.matrix
return Neurons(self.matrix + array, self.shape)
def dot(self, vec):
return self.matrix.dot(vec)
def outer(self, array):
if isinstance(array, Neurons):
array = array.matrix
res = np.outer(self.matrix, array)
shape = res.shape
return Neurons(res, shape)
def transpose(self):
return Neurons(self.matrix.transpose(), self.shape[::-1])
def loss(self, fun, y):
return fun(self.matrix, y)
def loss_d(self, fun, y):
return Neurons(fun(self.matrix, y), self.shape)
def activation(self, fun):
# el map no anda mas en python3 como iterador,
# ver: http://stackoverflow.com/questions/28524378/convert-map-object-to-numpy-array-in-python-3
return Neurons(list(map(lambda x: fun(x), self.matrix)), self.shape)
def softmax(self):
# Uso de tip de implementacion (http://ufldl.stanford.edu/wiki/index.php/Exercise:Softmax_Regression)
x = self.matrix
# instead: first shift the values of f so that the highest number is 0:
x = x - max(x)
softmat = np.exp(x) / (sum(np.exp(x)))
return Neurons(softmat, self.shape)
| apache-2.0 |
pipsiscool/audacity | lib-src/lv2/lilv/waflib/Tools/compiler_c.py | 343 | 1759 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys,imp,types
from waflib.Tools import ccroot
from waflib import Utils,Configure
from waflib.Logs import debug
c_compiler={'win32':['msvc','gcc'],'cygwin':['gcc'],'darwin':['gcc'],'aix':['xlc','gcc'],'linux':['gcc','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'gnu':['gcc'],'java':['gcc','msvc','icc'],'default':['gcc'],}
def configure(conf):
try:test_for_compiler=conf.options.check_c_compiler
except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')")
for compiler in test_for_compiler.split():
conf.env.stash()
conf.start_msg('Checking for %r (c compiler)'%compiler)
try:
conf.load(compiler)
except conf.errors.ConfigurationError ,e:
conf.env.revert()
conf.end_msg(False)
debug('compiler_c: %r'%e)
else:
if conf.env['CC']:
conf.end_msg(conf.env.get_flat('CC'))
conf.env['COMPILER_CC']=compiler
break
conf.end_msg(False)
else:
conf.fatal('could not configure a c compiler!')
def options(opt):
opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py'])
global c_compiler
build_platform=Utils.unversioned_sys_platform()
possible_compiler_list=c_compiler[build_platform in c_compiler and build_platform or'default']
test_for_compiler=' '.join(possible_compiler_list)
cc_compiler_opts=opt.add_option_group("C Compiler Options")
cc_compiler_opts.add_option('--check-c-compiler',default="%s"%test_for_compiler,help='On this platform (%s) the following C-Compiler will be checked by default: "%s"'%(build_platform,test_for_compiler),dest="check_c_compiler")
for x in test_for_compiler.split():
opt.load('%s'%x)
| mit |
3quarterstack/simple_blog | djangoappengine/mapreduce/input_readers.py | 28 | 2569 | from djangoappengine.db.utils import get_cursor, set_cursor, set_config
from google.appengine.api.datastore import Key
from mapreduce.datastore_range_iterators import AbstractKeyRangeIterator, _KEY_RANGE_ITERATORS
from mapreduce.input_readers import AbstractDatastoreInputReader, _get_params, BadReaderParamsError
from mapreduce import util
class DjangoModelIterator(AbstractKeyRangeIterator):
def __iter__(self):
k_range = self._key_range
# Namespaces are not supported by djangoappengine
if k_range.namespace:
return
model_class = util.for_name(self._query_spec.model_class_path)
q = model_class.objects.all()
if k_range.key_start:
if k_range.include_start:
q = q.filter(pk__gte=k_range.key_start.id_or_name())
else:
q = q.filter(pk__gt=k_range.key_start.id_or_name())
if k_range.key_end:
if k_range.include_end:
q = q.filter(pk__lte=k_range.key_end.id_or_name())
else:
q = q.filter(pk__lt=k_range.key_end.id_or_name())
q = q.order_by('pk')
q = set_config(q, batch_size=self._query_spec.batch_size)
if self._cursor:
q = set_cursor(q, self._cursor)
self._query = q
for entity in self._query.iterator():
yield entity
def _get_cursor(self):
if self._query is not None:
return get_cursor(self._query)
_KEY_RANGE_ITERATORS[DjangoModelIterator.__name__] = DjangoModelIterator
class DjangoModelInputReader(AbstractDatastoreInputReader):
"""
An input reader that takes a Django model ('app.models.Model')
and yields Django model instances
Note: This ignores all entities not in the default namespace.
"""
_KEY_RANGE_ITER_CLS = DjangoModelIterator
@classmethod
def _get_raw_entity_kind(cls, entity_kind):
"""Returns an datastore entity kind from a Django model."""
model_class = util.for_name(entity_kind)
return model_class._meta.db_table
@classmethod
def validate(cls, mapper_spec):
super(DjangoModelInputReader, cls).validate(mapper_spec)
params = _get_params(mapper_spec)
if cls.NAMESPACE_PARAM in params:
raise BadReaderParamsError("Namespaces are not supported.")
entity_kind_name = params[cls.ENTITY_KIND_PARAM]
try:
util.for_name(entity_kind_name)
except ImportError, e:
raise BadReaderParamsError("Bad entity kind: %s" % e)
| mit |
denilsonsa/django-guardian | guardian/testapp/tests/mixins_test.py | 42 | 5655 | from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ImproperlyConfigured
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import TestCase
from django.test.client import RequestFactory
from django.views.generic import View
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.mixins import LoginRequiredMixin
from guardian.mixins import PermissionRequiredMixin
class DatabaseRemovedError(Exception):
pass
class RemoveDatabaseView(View):
def get(self, request, *args, **kwargs):
raise DatabaseRemovedError("You've just allowed db to be removed!")
class TestView(PermissionRequiredMixin, RemoveDatabaseView):
permission_required = 'contenttypes.change_contenttype'
object = None # should be set at each tests explicitly
class NoObjectView(PermissionRequiredMixin, RemoveDatabaseView):
permission_required = 'contenttypes.change_contenttype'
class TestViewMixins(TestCase):
def setUp(self):
self.ctype = ContentType.objects.create(name='foo', model='bar',
app_label='fake-for-guardian-tests')
self.factory = RequestFactory()
self.user = get_user_model().objects.create_user(
'joe', 'joe@doe.com', 'doe')
self.client.login(username='joe', password='doe')
def test_permission_is_checked_before_view_is_computed(self):
"""
This test would fail if permission is checked **after** view is
actually resolved.
"""
request = self.factory.get('/')
request.user = self.user
# View.object is set
view = TestView.as_view(object=self.ctype)
response = view(request)
self.assertEqual(response.status_code, 302)
# View.get_object returns object
TestView.get_object = lambda instance: self.ctype
view = TestView.as_view()
response = view(request)
self.assertEqual(response.status_code, 302)
del TestView.get_object
def test_permission_is_checked_before_view_is_computed_perm_denied_raised(self):
"""
This test would fail if permission is checked **after** view is
actually resolved.
"""
request = self.factory.get('/')
request.user = self.user
view = TestView.as_view(raise_exception=True, object=self.ctype)
with self.assertRaises(PermissionDenied):
view(request)
def test_permission_required_view_configured_wrongly(self):
"""
This test would fail if permission is checked **after** view is
actually resolved.
"""
request = self.factory.get('/')
request.user = self.user
request.user.add_obj_perm('change_contenttype', self.ctype)
view = TestView.as_view(permission_required=None, object=self.ctype)
with self.assertRaises(ImproperlyConfigured):
view(request)
def test_permission_required(self):
"""
This test would fail if permission is checked **after** view is
actually resolved.
"""
request = self.factory.get('/')
request.user = self.user
request.user.add_obj_perm('change_contenttype', self.ctype)
view = TestView.as_view(object=self.ctype)
with self.assertRaises(DatabaseRemovedError):
view(request)
def test_permission_required_no_object(self):
"""
This test would fail if permission is checked on a view's
object when it has none
"""
request = self.factory.get('/')
request.user = self.user
request.user.add_obj_perm('change_contenttype', self.ctype)
view = NoObjectView.as_view()
response = view(request)
self.assertEqual(response.status_code, 302)
def test_permission_required_as_list(self):
"""
This test would fail if permission is checked **after** view is
actually resolved.
"""
global TestView
class SecretView(TestView):
on_permission_check_fail = mock.Mock()
request = self.factory.get('/')
request.user = self.user
request.user.add_obj_perm('change_contenttype', self.ctype)
SecretView.permission_required = ['contenttypes.change_contenttype',
'contenttypes.add_contenttype']
view = SecretView.as_view(object=self.ctype)
response = view(request)
self.assertEqual(response.status_code, 302)
SecretView.on_permission_check_fail.assert_called_once_with(request,
response, obj=self.ctype)
request.user.add_obj_perm('add_contenttype', self.ctype)
with self.assertRaises(DatabaseRemovedError):
view(request)
def test_login_required_mixin(self):
class SecretView(LoginRequiredMixin, View):
redirect_field_name = 'foobar'
login_url = '/let-me-in/'
def get(self, request):
return HttpResponse('secret-view')
request = self.factory.get('/some-secret-page/')
request.user = AnonymousUser()
view = SecretView.as_view()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'],
'/let-me-in/?foobar=/some-secret-page/')
request.user = self.user
response = view(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'secret-view')
| bsd-2-clause |
IllusionRom-deprecated/android_platform_external_chromium_org | chrome/tools/build/win/create_installer_archive.py | 23 | 25503 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to create Chrome Installer archive.
This script is used to create an archive of all the files required for a
Chrome install in appropriate directory structure. It reads chrome.release
file as input, creates chrome.7z archive, compresses setup.exe and
generates packed_files.txt for mini_installer project.
"""
import ConfigParser
import glob
import optparse
import os
import shutil
import subprocess
import sys
ARCHIVE_DIR = "installer_archive"
# suffix to uncompresed full archive file, appended to options.output_name
ARCHIVE_SUFFIX = ".7z"
BSDIFF_EXEC = "bsdiff.exe"
CHROME_DIR = "Chrome-bin"
CHROME_PATCH_FILE_SUFFIX = "_patch" # prefixed by options.output_name
# compressed full archive suffix, will be prefixed by options.output_name
COMPRESSED_ARCHIVE_SUFFIX = ".packed.7z"
COMPRESSED_FILE_EXT = ".packed.7z" # extension of patch archive file
COURGETTE_EXEC = "courgette.exe"
MINI_INSTALLER_INPUT_FILE = "packed_files.txt"
PATCH_FILE_EXT = '.diff'
SETUP_EXEC = "setup.exe"
SETUP_PATCH_FILE_PREFIX = "setup_patch"
TEMP_ARCHIVE_DIR = "temp_installer_archive"
VERSION_FILE = "VERSION"
def BuildVersion(build_dir):
"""Returns the full build version string constructed from information in
VERSION_FILE. Any segment not found in that file will default to '0'.
"""
major = 0
minor = 0
build = 0
patch = 0
for line in open(os.path.join(build_dir, '../../chrome', VERSION_FILE), 'r'):
line = line.rstrip()
if line.startswith('MAJOR='):
major = line[6:]
elif line.startswith('MINOR='):
minor = line[6:]
elif line.startswith('BUILD='):
build = line[6:]
elif line.startswith('PATCH='):
patch = line[6:]
return '%s.%s.%s.%s' % (major, minor, build, patch)
def CompressUsingLZMA(build_dir, compressed_file, input_file):
lzma_exec = GetLZMAExec(build_dir)
cmd = [lzma_exec,
'a', '-t7z',
# Flags equivalent to -mx9 (ultra) but with the bcj2 turned on (exe
# pre-filter). This results in a ~2.3MB decrease in installer size on
# a 24MB installer.
# Additionally, these settings reflect a 7zip 4.42 and up change in
# the definition of -mx9, increasting the dicionary size moving to
# 26bit = 64MB. This results in an additional ~3.5MB decrease.
# Older 7zip versions can support these settings, as these changes
# rely on existing functionality in the lzma format.
'-m0=BCJ2',
'-m1=LZMA:d27:fb128',
'-m2=LZMA:d22:fb128:mf=bt2',
'-m3=LZMA:d22:fb128:mf=bt2',
'-mb0:1',
'-mb0s1:2',
'-mb0s2:3',
compressed_file,
input_file,]
if os.path.exists(compressed_file):
os.remove(compressed_file)
RunSystemCommand(cmd)
def CopyAllFilesToStagingDir(config, distribution, staging_dir, build_dir,
enable_hidpi, enable_touch_ui):
"""Copies the files required for installer archive.
Copies all common files required for various distributions of Chromium and
also files for the specific Chromium build specified by distribution.
"""
CopySectionFilesToStagingDir(config, 'GENERAL', staging_dir, build_dir)
if distribution:
if len(distribution) > 1 and distribution[0] == '_':
distribution = distribution[1:]
CopySectionFilesToStagingDir(config, distribution.upper(),
staging_dir, build_dir)
if enable_hidpi == '1':
CopySectionFilesToStagingDir(config, 'HIDPI', staging_dir, build_dir)
if enable_touch_ui == '1':
CopySectionFilesToStagingDir(config, 'TOUCH', staging_dir, build_dir)
def CopySectionFilesToStagingDir(config, section, staging_dir, src_dir):
"""Copies installer archive files specified in section from src_dir to
staging_dir. This method reads section from config and copies all the
files specified from src_dir to staging dir.
"""
for option in config.options(section):
if option.endswith('dir'):
continue
dst_dir = os.path.join(staging_dir, config.get(section, option))
src_paths = glob.glob(os.path.join(src_dir, option))
if src_paths and not os.path.exists(dst_dir):
os.makedirs(dst_dir)
for src_path in src_paths:
dst_path = os.path.join(dst_dir, os.path.basename(src_path))
if not os.path.exists(dst_path):
shutil.copy(src_path, dst_dir)
def GenerateDiffPatch(options, orig_file, new_file, patch_file):
if (options.diff_algorithm == "COURGETTE"):
exe_file = os.path.join(options.last_chrome_installer, COURGETTE_EXEC)
cmd = '%s -gen "%s" "%s" "%s"' % (exe_file, orig_file, new_file, patch_file)
else:
exe_file = os.path.join(options.build_dir, BSDIFF_EXEC)
cmd = [exe_file, orig_file, new_file, patch_file,]
RunSystemCommand(cmd)
def GetLZMAExec(build_dir):
lzma_exec = os.path.join(build_dir, "..", "..", "third_party",
"lzma_sdk", "Executable", "7za.exe")
return lzma_exec
def GetPrevVersion(build_dir, temp_dir, last_chrome_installer, output_name):
if not last_chrome_installer:
return ''
lzma_exec = GetLZMAExec(build_dir)
prev_archive_file = os.path.join(last_chrome_installer,
output_name + ARCHIVE_SUFFIX)
cmd = [lzma_exec,
'x',
'-o"%s"' % temp_dir,
prev_archive_file,
'Chrome-bin/*/chrome.dll',]
RunSystemCommand(cmd)
dll_path = glob.glob(os.path.join(temp_dir, 'Chrome-bin', '*', 'chrome.dll'))
return os.path.split(os.path.split(dll_path[0])[0])[1]
def MakeStagingDirectories(staging_dir):
"""Creates a staging path for installer archive. If directory exists already,
deletes the existing directory.
"""
file_path = os.path.join(staging_dir, TEMP_ARCHIVE_DIR)
if os.path.exists(file_path):
shutil.rmtree(file_path)
os.makedirs(file_path)
temp_file_path = os.path.join(staging_dir, TEMP_ARCHIVE_DIR)
if os.path.exists(temp_file_path):
shutil.rmtree(temp_file_path)
os.makedirs(temp_file_path)
return (file_path, temp_file_path)
def Readconfig(input_file, current_version):
"""Reads config information from input file after setting default value of
global variabes.
"""
variables = {}
variables['ChromeDir'] = CHROME_DIR
variables['VersionDir'] = os.path.join(variables['ChromeDir'],
current_version)
config = ConfigParser.SafeConfigParser(variables)
config.read(input_file)
return config
def RunSystemCommand(cmd, **kw):
print 'Running', cmd
exit_code = subprocess.call(cmd, **kw)
if (exit_code != 0):
raise Exception("Error while running cmd: %s, exit_code: %s" %
(cmd, exit_code))
def CreateArchiveFile(options, staging_dir, current_version, prev_version):
"""Creates a new installer archive file after deleting any existing old file.
"""
# First create an uncompressed archive file for the current build (chrome.7z)
lzma_exec = GetLZMAExec(options.build_dir)
archive_file = os.path.join(options.output_dir,
options.output_name + ARCHIVE_SUFFIX)
cmd = [lzma_exec,
'a',
'-t7z',
archive_file,
os.path.join(staging_dir, CHROME_DIR),
'-mx0',]
# There doesnt seem to be any way in 7za.exe to override existing file so
# we always delete before creating a new one.
if not os.path.exists(archive_file):
RunSystemCommand(cmd)
elif options.skip_rebuild_archive != "true":
os.remove(archive_file)
RunSystemCommand(cmd)
# Do not compress the archive in developer (component) builds.
if options.component_build == '1':
compressed_file = os.path.join(
options.output_dir, options.output_name + COMPRESSED_ARCHIVE_SUFFIX)
if os.path.exists(compressed_file):
os.remove(compressed_file)
return os.path.basename(archive_file)
# If we are generating a patch, run bsdiff against previous build and
# compress the resulting patch file. If this is not a patch just compress the
# uncompressed archive file.
patch_name_prefix = options.output_name + CHROME_PATCH_FILE_SUFFIX
if options.last_chrome_installer:
prev_archive_file = os.path.join(options.last_chrome_installer,
options.output_name + ARCHIVE_SUFFIX)
patch_file = os.path.join(options.build_dir, patch_name_prefix +
PATCH_FILE_EXT)
GenerateDiffPatch(options, prev_archive_file, archive_file, patch_file)
compressed_archive_file = patch_name_prefix + '_' + \
current_version + '_from_' + prev_version + \
COMPRESSED_FILE_EXT
orig_file = patch_file
else:
compressed_archive_file = options.output_name + COMPRESSED_ARCHIVE_SUFFIX
orig_file = archive_file
compressed_archive_file_path = os.path.join(options.output_dir,
compressed_archive_file)
CompressUsingLZMA(options.build_dir, compressed_archive_file_path, orig_file)
return compressed_archive_file
def PrepareSetupExec(options, current_version, prev_version):
"""Prepares setup.exe for bundling in mini_installer based on options."""
if options.setup_exe_format == "FULL":
setup_file = SETUP_EXEC
elif options.setup_exe_format == "DIFF":
if not options.last_chrome_installer:
raise Exception(
"To use DIFF for setup.exe, --last_chrome_installer is needed.")
prev_setup_file = os.path.join(options.last_chrome_installer, SETUP_EXEC)
new_setup_file = os.path.join(options.build_dir, SETUP_EXEC)
patch_file = os.path.join(options.build_dir, SETUP_PATCH_FILE_PREFIX +
PATCH_FILE_EXT)
GenerateDiffPatch(options, prev_setup_file, new_setup_file, patch_file)
setup_file = SETUP_PATCH_FILE_PREFIX + '_' + current_version + \
'_from_' + prev_version + COMPRESSED_FILE_EXT
setup_file_path = os.path.join(options.build_dir, setup_file)
CompressUsingLZMA(options.build_dir, setup_file_path, patch_file)
else:
cmd = ['makecab.exe',
'/D', 'CompressionType=LZX',
'/V1',
'/L', options.output_dir,
os.path.join(options.build_dir, SETUP_EXEC),]
# Send useless makecab progress on stdout to the bitbucket.
RunSystemCommand(cmd, stdout=open(os.devnull, "w"))
setup_file = SETUP_EXEC[:-1] + "_"
return setup_file
_RESOURCE_FILE_TEMPLATE = """\
// This file is automatically generated by create_installer_archive.py.
// It contains the resource entries that are going to be linked inside
// mini_installer.exe. For each file to be linked there should be two
// lines:
// - The first line contains the output filename (without path) and the
// type of the resource ('BN' - not compressed , 'BL' - LZ compressed,
// 'B7' - LZMA compressed)
// - The second line contains the path to the input file. Uses '/' to
// separate path components.
%(setup_file)s %(setup_file_resource_type)s
"%(setup_file_path)s"
%(archive_file)s B7
"%(archive_file_path)s"
"""
def CreateResourceInputFile(
output_dir, setup_format, archive_file, setup_file, resource_file_path):
"""Creates resource input file (packed_files.txt) for mini_installer project.
This method checks the format of setup.exe being used and according sets
its resource type.
"""
setup_resource_type = "BL"
if (setup_format == "FULL"):
setup_resource_type = "BN"
elif (setup_format == "DIFF"):
setup_resource_type = "B7"
# Expand the resource file template.
args = {
'setup_file': setup_file,
'setup_file_resource_type': setup_resource_type,
'setup_file_path':
os.path.join(output_dir, setup_file).replace("\\","/"),
'archive_file': archive_file,
'archive_file_path':
os.path.join(output_dir, archive_file).replace("\\","/"),
}
resource_file = _RESOURCE_FILE_TEMPLATE % args
with open(resource_file_path, 'w') as f:
f.write(resource_file)
# Reads |manifest_name| from |build_dir| and writes |manifest_name| to
# |output_dir| with the same content plus |inserted_string| added just before
# |insert_before|.
def CopyAndAugmentManifest(build_dir, output_dir, manifest_name,
inserted_string, insert_before):
with open(os.path.join(build_dir, manifest_name), 'r') as f:
manifest_lines = f.readlines()
insert_line = -1
insert_pos = -1
for i in xrange(len(manifest_lines)):
insert_pos = manifest_lines[i].find(insert_before)
if insert_pos != -1:
insert_line = i
break
if insert_line == -1:
raise ValueError('Could not find {0} in the manifest:\n{1}'.format(
insert_before, ''.join(manifest_lines)))
old = manifest_lines[insert_line]
manifest_lines[insert_line] = (old[:insert_pos] + '\n' + inserted_string +
'\n' + old[insert_pos:])
with open(os.path.join(output_dir, manifest_name), 'w') as f :
f.write(''.join(manifest_lines))
def CopyIfChanged(src, target_dir):
"""Copy specified |src| file to |target_dir|, but only write to target if
the file has changed. This avoids a problem during packaging where parts of
the build have not completed and have the runtime DLL locked when we try to
copy over it. See http://crbug.com/305877 for details."""
assert os.path.isdir(target_dir)
dest = os.path.join(target_dir, os.path.basename(src))
if os.path.exists(dest):
# We assume the files are OK to buffer fully into memory since we know
# they're only 1-2M.
with open(src, 'rb') as fsrc:
src_data = fsrc.read()
with open(dest, 'rb') as fdest:
dest_data = fdest.read()
if src_data != dest_data:
# This may still raise if we get here, but this really should almost
# never happen (it would mean that the contents of e.g. msvcr100d.dll
# had been changed).
shutil.copyfile(src, dest)
else:
shutil.copyfile(src, dest)
# Copy the relevant CRT DLLs to |build_dir|. We copy DLLs from all versions
# of VS installed to make sure we have the correct CRT version, unused DLLs
# should not conflict with the others anyways.
def CopyVisualStudioRuntimeDLLs(build_dir, target_arch):
is_debug = os.path.basename(build_dir).startswith('Debug')
if not is_debug and not os.path.basename(build_dir).startswith('Release'):
print ("Warning: could not determine build configuration from "
"output directory, assuming Release build.")
crt_dlls = []
sys_dll_dir = None
if is_debug:
crt_dlls = glob.glob(
"C:/Program Files (x86)/Microsoft Visual Studio */VC/redist/"
"Debug_NonRedist/" + target_arch + "/Microsoft.*.DebugCRT/*.dll")
else:
crt_dlls = glob.glob(
"C:/Program Files (x86)/Microsoft Visual Studio */VC/redist/" +
target_arch + "/Microsoft.*.CRT/*.dll")
# Also handle the case where someone is building using only winsdk and
# doesn't have Visual Studio installed.
if not crt_dlls:
if target_arch == 'x64':
# check we are are on a 64bit system by existence of WOW64 dir
if os.access("C:/Windows/SysWOW64", os.F_OK):
sys_dll_dir = "C:/Windows/System32"
else:
# only support packaging of 64bit installer on 64bit system
# but this just as bad as not finding DLLs at all so we
# don't abort here to mirror behavior below
print ("Warning: could not find x64 CRT DLLs on x86 system.")
else:
# On a 64-bit system, 32-bit dlls are in SysWOW64 (don't ask).
if os.access("C:/Windows/SysWOW64", os.F_OK):
sys_dll_dir = "C:/Windows/SysWOW64"
else:
sys_dll_dir = "C:/Windows/System32"
if sys_dll_dir is not None:
if is_debug:
crt_dlls = glob.glob(os.path.join(sys_dll_dir, "msvc*0d.dll"))
else:
crt_dlls = glob.glob(os.path.join(sys_dll_dir, "msvc*0.dll"))
if not crt_dlls:
print ("Warning: could not find CRT DLLs to copy to build dir - target "
"may not run on a system that doesn't have those DLLs.")
for dll in crt_dlls:
CopyIfChanged(dll, build_dir)
# Copies component build DLLs and generates required config files and manifests
# in order for chrome.exe and setup.exe to be able to find those DLLs at
# run-time.
# This is meant for developer builds only and should never be used to package
# an official build.
def DoComponentBuildTasks(staging_dir, build_dir, target_arch, current_version):
# Get the required directories for the upcoming operations.
chrome_dir = os.path.join(staging_dir, CHROME_DIR)
version_dir = os.path.join(chrome_dir, current_version)
installer_dir = os.path.join(version_dir, 'Installer')
# |installer_dir| is technically only created post-install, but we need it
# now to add setup.exe's config and manifest to the archive.
if not os.path.exists(installer_dir):
os.mkdir(installer_dir)
# Copy the VS CRT DLLs to |build_dir|. This must be done before the general
# copy step below to ensure the CRT DLLs are added to the archive and marked
# as a dependency in the exe manifests generated below.
CopyVisualStudioRuntimeDLLs(build_dir, target_arch)
# Stage all the component DLLs found in |build_dir|. These are all the DLLs
# which have not already been added to the staged |version_dir| by virtue of
# chrome.release.
build_dlls = glob.glob(os.path.join(build_dir, '*.dll'))
staged_dll_basenames = [os.path.basename(staged_dll) for staged_dll in \
glob.glob(os.path.join(version_dir, '*.dll'))]
component_dll_filenames = []
for component_dll in [dll for dll in build_dlls if \
os.path.basename(dll) not in staged_dll_basenames]:
# remoting_*.dll's don't belong in the archive (it doesn't depend on them
# in gyp). Trying to copy them causes a build race when creating the
# installer archive in component mode. See: crbug.com/180996
if os.path.basename(component_dll).startswith('remoting_'):
continue
# Copy them to the version_dir (for the version assembly to be able to refer
# to them below and make sure chrome.exe can find them at runtime).
shutil.copy(component_dll, version_dir)
# Also copy them directly to the Installer directory for the installed
# setup.exe to be able to run (as it doesn't statically link in component
# DLLs).
# This makes the archive ~1.5X bigger (Release ~185MB => ~278MB;
# Debug ~520MB => ~875MB) this is however simpler than any other installer
# change and doesn't make archive generation itself slower so it only
# matters when copying the archive to other test machines. This approach
# can be revised if this is a problem.
shutil.copy(component_dll, installer_dir)
component_dll_filenames.append(os.path.basename(component_dll))
# Copy chrome.exe.manifest as-is. It is required, among other things, to
# declare a dependency on the version dir assembly.
shutil.copy(os.path.join(build_dir, 'chrome.exe.manifest'), chrome_dir)
# Also copy setup.exe.manifest as-is. It is required, among other things, to
# let setup.exe UAC when it wants to, by specifying that it handles elevation
# "asInvoker", rather than have Windows auto-elevate it when launched.
shutil.copy(os.path.join(build_dir, 'setup.exe.manifest'), installer_dir)
# Augment {version}.manifest to include all component DLLs as part of the
# assembly it constitutes, which will allow dependents of this assembly to
# find these DLLs.
version_assembly_dll_additions = []
for dll_filename in component_dll_filenames:
version_assembly_dll_additions.append(" <file name='%s'/>" % dll_filename)
CopyAndAugmentManifest(build_dir, version_dir,
'%s.manifest' % current_version,
'\n'.join(version_assembly_dll_additions),
'</assembly>')
def main(options):
"""Main method that reads input file, creates archive file and write
resource input file.
"""
current_version = BuildVersion(options.build_dir)
config = Readconfig(options.input_file, current_version)
(staging_dir, temp_dir) = MakeStagingDirectories(options.staging_dir)
prev_version = GetPrevVersion(options.build_dir, temp_dir,
options.last_chrome_installer,
options.output_name)
# Preferentially copy the files we can find from the output_dir, as
# this is where we'll find the Syzygy-optimized executables when
# building the optimized mini_installer.
if options.build_dir != options.output_dir:
CopyAllFilesToStagingDir(config, options.distribution,
staging_dir, options.output_dir,
options.enable_hidpi, options.enable_touch_ui)
# Now copy the remainder of the files from the build dir.
CopyAllFilesToStagingDir(config, options.distribution,
staging_dir, options.build_dir,
options.enable_hidpi, options.enable_touch_ui)
if options.component_build == '1':
DoComponentBuildTasks(staging_dir, options.build_dir,
options.target_arch, current_version)
version_numbers = current_version.split('.')
current_build_number = version_numbers[2] + '.' + version_numbers[3]
prev_build_number = ''
if prev_version:
version_numbers = prev_version.split('.')
prev_build_number = version_numbers[2] + '.' + version_numbers[3]
# Name of the archive file built (for example - chrome.7z or
# patch-<old_version>-<new_version>.7z or patch-<new_version>.7z
archive_file = CreateArchiveFile(options, staging_dir,
current_build_number, prev_build_number)
setup_file = PrepareSetupExec(options,
current_build_number, prev_build_number)
CreateResourceInputFile(options.output_dir, options.setup_exe_format,
archive_file, setup_file, options.resource_file_path)
def _ParseOptions():
parser = optparse.OptionParser()
parser.add_option('-i', '--input_file',
help='Input file describing which files to archive.')
parser.add_option('-b', '--build_dir',
help='Build directory. The paths in input_file are relative to this.')
parser.add_option('--staging_dir',
help='Staging directory where intermediate files and directories '
'will be created')
parser.add_option('-o', '--output_dir',
help='The output directory where the archives will be written. '
'Defaults to the build_dir.')
parser.add_option('--resource_file_path',
help='The path where the resource file will be output. '
'Defaults to %s in the build directory.' %
MINI_INSTALLER_INPUT_FILE)
parser.add_option('-d', '--distribution',
help='Name of Chromium Distribution. Optional.')
parser.add_option('-s', '--skip_rebuild_archive',
default="False", help='Skip re-building Chrome.7z archive if it exists.')
parser.add_option('-l', '--last_chrome_installer',
help='Generate differential installer. The value of this parameter '
'specifies the directory that contains base versions of '
'setup.exe, courgette.exe (if --diff_algorithm is COURGETTE) '
'& chrome.7z.')
parser.add_option('-f', '--setup_exe_format', default='COMPRESSED',
help='How setup.exe should be included {COMPRESSED|DIFF|FULL}.')
parser.add_option('-a', '--diff_algorithm', default='BSDIFF',
help='Diff algorithm to use when generating differential patches '
'{BSDIFF|COURGETTE}.')
parser.add_option('-n', '--output_name', default='chrome',
help='Name used to prefix names of generated archives.')
parser.add_option('--enable_hidpi', default='0',
help='Whether to include HiDPI resource files.')
parser.add_option('--enable_touch_ui', default='0',
help='Whether to include resource files from the "TOUCH" section of the '
'input file.')
parser.add_option('--component_build', default='0',
help='Whether this archive is packaging a component build. This will '
'also turn off compression of chrome.7z into chrome.packed.7z and '
'helpfully delete any old chrome.packed.7z in |output_dir|.')
parser.add_option('--target_arch', default='x86',
help='Specify the target architecture for installer - this is used '
'to determine which CRT runtime files to pull and package '
'with the installer archive {x86|x64}.')
options, _ = parser.parse_args()
if not options.build_dir:
parser.error('You must provide a build dir.')
options.build_dir = os.path.normpath(options.build_dir)
if not options.staging_dir:
parser.error('You must provide a staging dir.')
if not options.input_file:
parser.error('You must provide an input file')
if not options.output_dir:
options.output_dir = options.build_dir
if not options.resource_file_path:
options.resource_file_path = os.path.join(options.build_dir,
MINI_INSTALLER_INPUT_FILE)
return options
if '__main__' == __name__:
print sys.argv
sys.exit(main(_ParseOptions()))
| bsd-3-clause |
shepdelacreme/ansible | lib/ansible/modules/network/cli/cli_config.py | 24 | 12544 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: cli_config
version_added: "2.7"
author: "Trishna Guha (@trishnaguha)"
short_description: Push text based configuration to network devices over network_cli
description:
- This module provides platform agnostic way of pushing text based
configuration to network devices over network_cli connection plugin.
options:
config:
description:
- The config to be pushed to the network device. This argument
is mutually exclusive with C(rollback) and either one of the
option should be given as input. The config should have
indentation that the device uses.
type: 'str'
commit:
description:
- The C(commit) argument instructs the module to push the
configuration to the device. This is mapped to module check mode.
type: 'bool'
replace:
description:
- If the C(replace) argument is set to C(yes), it will replace
the entire running-config of the device with the C(config)
argument value. For NXOS devices, C(replace) argument takes
path to the file on the device that will be used for replacing
the entire running-config. Nexus 9K devices only support replace.
Use I(net_put) or I(nxos_file_copy) module to copy the flat file
to remote device and then use set the fullpath to this argument.
type: 'str'
rollback:
description:
- The C(rollback) argument instructs the module to rollback the
current configuration to the identifier specified in the
argument. If the specified rollback identifier does not
exist on the remote device, the module will fail. To rollback
to the most recent commit, set the C(rollback) argument to 0.
This option is mutually exclusive with C(config).
commit_comment:
description:
- The C(commit_comment) argument specifies a text string to be used
when committing the configuration. If the C(commit) argument
is set to False, this argument is silently ignored. This argument
is only valid for the platforms that support commit operation
with comment.
type: 'str'
defaults:
description:
- The I(defaults) argument will influence how the running-config
is collected from the device. When the value is set to true,
the command used to collect the running-config is append with
the all keyword. When the value is set to false, the command
is issued without the all keyword.
default: 'no'
type: 'bool'
multiline_delimiter:
description:
- This argument is used when pushing a multiline configuration
element to the device. It specifies the character to use as
the delimiting character. This only applies to the configuration
action.
type: 'str'
diff_replace:
description:
- Instructs the module on the way to perform the configuration
on the device. If the C(diff_replace) argument is set to I(line)
then the modified lines are pushed to the device in configuration
mode. If the argument is set to I(block) then the entire command
block is pushed to the device in configuration mode if any
line is not correct. Note that this parameter will be ignored if
the platform has onbox diff support.
choices: ['line', 'block', 'config']
diff_match:
description:
- Instructs the module on the way to perform the matching of
the set of commands against the current device config. If C(diff_match)
is set to I(line), commands are matched line by line. If C(diff_match)
is set to I(strict), command lines are matched with respect to position.
If C(diff_match) is set to I(exact), command lines must be an equal match.
Finally, if C(diff_match) is set to I(none), the module will not attempt
to compare the source configuration with the running configuration on the
remote device. Note that this parameter will be ignored if the platform
has onbox diff support.
choices: ['line', 'strict', 'exact', 'none']
diff_ignore_lines:
description:
- Use this argument to specify one or more lines that should be
ignored during the diff. This is used for lines in the configuration
that are automatically updated by the system. This argument takes
a list of regular expressions or exact line matches.
Note that this parameter will be ignored if the platform has onbox
diff support.
"""
EXAMPLES = """
- name: configure device with config
cli_config:
config: "{{ lookup('template', 'basic/config.j2') }}"
- name: configure device with config with defaults enabled
cli_config:
config: "{{ lookup('template', 'basic/config.j2') }}"
defaults: yes
- name: Use diff_match
cli_config:
config: "{{ lookup('file', 'interface_config') }}"
diff_match: none
- name: nxos replace config
cli_config:
replace: 'bootflash:nxoscfg'
- name: commit with comment
cli_config:
config: set system host-name foo
commit_comment: this is a test
"""
RETURN = """
commands:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['interface Loopback999', 'no shutdown']
"""
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils._text import to_text
def validate_args(module, capabilities):
"""validate param if it is supported on the platform
"""
if (module.params['replace'] and
not capabilities['device_operations']['supports_replace']):
module.fail_json(msg='replace is not supported on this platform')
if (module.params['rollback'] is not None and
not capabilities['device_operations']['supports_rollback']):
module.fail_json(msg='rollback is not supported on this platform')
if (module.params['commit_comment'] and
not capabilities['device_operations']['supports_commit_comment']):
module.fail_json(msg='commit_comment is not supported on this platform')
if (module.params['defaults'] and
not capabilities['device_operations']['supports_defaults']):
module.fail_json(msg='defaults is not supported on this platform')
if (module.params['multiline_delimiter'] and
not capabilities['device_operations']['supports_multiline_delimiter']):
module.fail_json(msg='multiline_delimiter is not supported on this platform')
if (module.params['diff_replace'] and
not capabilities['device_operations']['supports_diff_replace']):
module.fail_json(msg='diff_replace is not supported on this platform')
if (module.params['diff_match'] and
not capabilities['device_operations']['supports_diff_match']):
module.fail_json(msg='diff_match is not supported on this platform')
if (module.params['diff_ignore_lines'] and
not capabilities['device_operations']['supports_diff_ignore_lines']):
module.fail_json(msg='diff_ignore_lines is not supported on this platform')
def run(module, capabilities, connection, candidate, running):
result = {}
resp = {}
config_diff = []
banner_diff = {}
replace = module.params['replace']
rollback_id = module.params['rollback']
commit_comment = module.params['commit_comment']
multiline_delimiter = module.params['multiline_delimiter']
diff_replace = module.params['diff_replace']
diff_match = module.params['diff_match']
diff_ignore_lines = module.params['diff_ignore_lines']
commit = not module.check_mode
if replace in ('yes', 'true', 'True'):
replace = True
elif replace in ('no', 'false', 'False'):
replace = False
if rollback_id is not None:
resp = connection.rollback(rollback_id, commit)
if 'diff' in resp:
result['changed'] = True
elif capabilities['device_operations']['supports_onbox_diff']:
if diff_replace:
module.warn('diff_replace is ignored as the device supports onbox diff')
if diff_match:
module.warn('diff_mattch is ignored as the device supports onbox diff')
if diff_ignore_lines:
module.warn('diff_ignore_lines is ignored as the device supports onbox diff')
if not isinstance(candidate, list):
candidate = candidate.strip('\n').splitlines()
kwargs = {'candidate': candidate, 'commit': commit, 'replace': replace,
'comment': commit_comment}
resp = connection.edit_config(**kwargs)
if 'diff' in resp:
result['changed'] = True
elif capabilities['device_operations']['supports_generate_diff']:
kwargs = {'candidate': candidate, 'running': running}
if diff_match:
kwargs.update({'diff_match': diff_match})
if diff_replace:
kwargs.update({'diff_replace': diff_replace})
if diff_ignore_lines:
kwargs.update({'diff_ignore_lines': diff_ignore_lines})
diff_response = connection.get_diff(**kwargs)
config_diff = diff_response.get('config_diff')
banner_diff = diff_response.get('banner_diff')
if config_diff:
if isinstance(config_diff, list):
candidate = config_diff
else:
candidate = config_diff.splitlines()
kwargs = {'candidate': candidate, 'commit': commit, 'replace': replace,
'comment': commit_comment}
connection.edit_config(**kwargs)
result['changed'] = True
if banner_diff:
candidate = json.dumps(banner_diff)
kwargs = {'candidate': candidate, 'commit': commit}
if multiline_delimiter:
kwargs.update({'multiline_delimiter': multiline_delimiter})
connection.edit_banner(**kwargs)
result['changed'] = True
if module._diff:
if 'diff' in resp:
result['diff'] = {'prepared': resp['diff']}
else:
diff = ''
if config_diff:
if isinstance(config_diff, list):
diff += '\n'.join(config_diff)
else:
diff += config_diff
if banner_diff:
diff += json.dumps(banner_diff)
result['diff'] = {'prepared': diff}
return result
def main():
"""main entry point for execution
"""
argument_spec = dict(
config=dict(type='str'),
commit=dict(type='bool'),
replace=dict(type='str'),
rollback=dict(type='int'),
commit_comment=dict(type='str'),
defaults=dict(default=False, type='bool'),
multiline_delimiter=dict(type='str'),
diff_replace=dict(choices=['line', 'block', 'config']),
diff_match=dict(choices=['line', 'strict', 'exact', 'none']),
diff_ignore_lines=dict(type='list')
)
mutually_exclusive = [('config', 'rollback')]
required_one_of = [['config', 'rollback']]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_one_of=required_one_of,
supports_check_mode=True)
result = {'changed': False}
connection = Connection(module._socket_path)
capabilities = module.from_json(connection.get_capabilities())
if capabilities:
validate_args(module, capabilities)
if module.params['defaults']:
if 'get_default_flag' in capabilities.get('rpc'):
flags = connection.get_default_flag()
else:
flags = 'all'
else:
flags = []
candidate = to_text(module.params['config'])
running = connection.get_config(flags=flags)
try:
result.update(run(module, capabilities, connection, candidate, running))
except Exception as exc:
module.fail_json(msg=to_text(exc))
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
guaix-ucm/numina | numina/util/tests/test_context.py | 3 | 1483 |
import os
from ..context import environ
def test_environ_context1():
"""Remove env var if it was not set"""
cache_dir = '/cache/dir'
with environ(OOO_CACHE_HOME=cache_dir):
assert os.environ['OOO_CACHE_HOME'] == cache_dir
is_in = 'OOO_CACHE_HOME' in os.environ
assert is_in == False
def test_environ_context2():
"""Reset env var if it was set"""
cache_dir1 = '/cache/dir/1'
cache_dir2 = '/cache/dir/2'
os.environ['OOO_CACHE_HOME'] = cache_dir1
with environ(OOO_CACHE_HOME=cache_dir2):
assert os.environ['OOO_CACHE_HOME'] == cache_dir2
assert os.environ['OOO_CACHE_HOME'] == cache_dir1
def test_environ_context3():
"""Reset multiple variables"""
cache_dir1a = '/cache/dir/1a'
cache_dir2a = '/cache/dir/2a'
cache_dir3a = '/cache/dir/3a'
cache_dir1b = '/cache/dir/1b'
cache_dir2b = '/cache/dir/2b'
os.environ['OOO_CACHE_HOME1'] = cache_dir1a
os.environ['OOO_CACHE_HOME2'] = cache_dir2a
os.environ['OOO_CACHE_HOME3'] = cache_dir3a
with environ(OOO_CACHE_HOME1=cache_dir1b,
OOO_CACHE_HOME2=cache_dir2b):
assert os.environ['OOO_CACHE_HOME1'] == cache_dir1b
assert os.environ['OOO_CACHE_HOME2'] == cache_dir2b
assert os.environ['OOO_CACHE_HOME3'] == cache_dir3a
assert os.environ['OOO_CACHE_HOME1'] == cache_dir1a
assert os.environ['OOO_CACHE_HOME2'] == cache_dir2a
assert os.environ['OOO_CACHE_HOME3'] == cache_dir3a | gpl-3.0 |
yonglehou/WinObjC | deps/scripts/libpng.py | 159 | 8895 |
# python 2.7.9
import subprocess, os.path, os, shutil
DEBUG = False
BUILD_CONFIGURATION = "RelWithDebInfo"
PATCH_FILE = os.path.realpath(os.path.join(".", "libpng.patch"))
BUILD_DIR = "temp_build"
PATH_FILE = "paths.txt"
OUTPUT_DIR = os.path.realpath(os.path.join("..", "prebuilt"))
HEADER_DIR = os.path.realpath(os.path.join(OUTPUT_DIR, "include"))
MSBUILD_PATH = None
CMAKE_PATH = None
ARCH_TO_DIR_NAME = {"Win32": "x86",
"Win64": "x64",
"ARM" : "ARM"}
PLATFORM_TO_DIR_NAME = {"Windows8.1": "Windows 8.1",
"WindowsPhone8.1": "Windows Phone 8.1",
"Windows10.0": "Windows Universal"}
class Configuration:
def __init__(self, generator, platform, arch, system_name, system_version):
self.generator = generator
self.platform = platform
self.arch = arch
self.system_name = system_name
self.system_version = system_version
def generate_cmake_args(self):
'''returns list of configuration specific args to pass to cmake'''
args = []
# generator
args.append("-G")
if self.arch == "Win32":
args.append(self.generator)
else:
args.append("%s %s" % (self.generator, self.arch))
# system name
args.append('-DCMAKE_SYSTEM_NAME=%s' % self.system_name)
# system version
args.append('-DCMAKE_SYSTEM_VERSION=%s' % self.system_version)
# zlib headers
args.append('-DZLIB_INCLUDE_DIR=%s' % HEADER_DIR)
# zlib binaries
args.append('-DZLIB_LIBRARY=%s' % os.path.join(self.output_directory(OUTPUT_DIR), 'libz.lib'))
return args
def output_directory(self, prefix):
return os.path.join(prefix,
PLATFORM_TO_DIR_NAME[self.platform],
ARCH_TO_DIR_NAME[self.arch])
def __repr__(self):
return "Configuration(%s, %s, %s, %s, %s)" % (self.generator, self.platform, self.arch,
self.system_name, self.system_version)
def create_configurations():
'''returns list of predefined Configuration objects for builds'''
configs = [
Configuration("Visual Studio 12 2013", "WindowsPhone8.1", "Win32", "WindowsPhone", "8.1"),
Configuration("Visual Studio 12 2013", "WindowsPhone8.1", "ARM", "WindowsPhone", "8.1"),
Configuration("Visual Studio 12 2013", "Windows8.1", "Win32", "WindowsStore", "8.1"),
Configuration("Visual Studio 12 2013", "Windows8.1", "Win64", "WindowsStore", "8.1"),
Configuration("Visual Studio 12 2013", "Windows8.1", "ARM", "WindowsStore", "8.1"),
Configuration("Visual Studio 14 2015", "Windows10.0", "Win32", "WindowsStore", "10.0"),
Configuration("Visual Studio 14 2015", "Windows10.0", "Win64", "WindowsStore", "10.0"),
Configuration("Visual Studio 14 2015", "Windows10.0", "ARM", "WindowsStore", "10.0"),
]
return configs
def check_path_file():
'''returns True if path file meets requirements, False otherwise'''
if not os.path.exists(PATH_FILE):
return False
fp = open(PATH_FILE, 'r')
lines = fp.readlines()
fp.close()
if len(lines) < 2:
return False
return True
def import_paths():
'''reads PATH_FILE if it exists, otherwise queries user for paths'''
while not check_path_file():
query_paths()
fp = open(PATH_FILE, 'r')
lines = fp.readlines()
fp.close()
if len(lines) >= 2:
global MSBUILD_PATH
global CMAKE_PATH
MSBUILD_PATH = ' '.join(lines[0].split(' ')[2:]).rstrip()
CMAKE_PATH = ' '.join(lines[1].split(' ')[2:]).rstrip()
if DEBUG:
print "MSBUILD_PATH: %s" % MSBUILD_PATH
print "CMAKE_PATH: %s" % CMAKE_PATH
def query_paths():
'''queries user for paths and writes them to PATH_FILE'''
msbuild_path = raw_input("msbuild path: ")
while not os.path.exists(msbuild_path):
msbuild_path = raw_input("msbuild path: ")
cmake_path = raw_input("cmake path: ")
while not os.path.exists(msbuild_path):
cmake_path = raw_input("cmake path: ")
fp = open(PATH_FILE, 'w')
fp.write("MSBUILD_PATH = %s\n" % msbuild_path)
fp.write("CMAKE_PATH = %s\n" % cmake_path)
fp.close()
def make_prebuilt_dirs(configs):
'''creates output directory for each config'''
if not os.path.exists(HEADER_DIR):
os.makedirs(HEADER_DIR)
for config in configs:
if not os.path.exists(config.output_directory(OUTPUT_DIR)):
os.makedirs(config.output_directory(OUTPUT_DIR))
def cmake_generate_project_files(config, source_code_dir):
'''calls CMAKE_PATH to generates vcxproj files for the given config. returns
True if cmake exited with a result of 0, False otherwise'''
cmake_command = [CMAKE_PATH] + config.generate_cmake_args() + [source_code_dir]
if DEBUG:
print cmake_command
wait = raw_input()
result = subprocess.call(cmake_command)
return result == 0
def compile_project(config, project_filename):
'''calls MSBUILD_PATH to compile the existing vcxproj. returns True if the
process exited with a result of 0, False otherwise'''
msbuild_command = [MSBUILD_PATH, project_filename, "/p:configuration=%s" % BUILD_CONFIGURATION]
if DEBUG:
print msbuild_command
wait = raw_input()
result = subprocess.call(msbuild_command)
return result == 0
def add_to_prebuilt(output_dir, filename, output_filename=None):
'''copies filename to output_dir. can optionally change copied file name'''
source_file = os.path.join(BUILD_CONFIGURATION, filename)
if output_filename == None:
output_filename = filename
shutil.copyfile(source_file, os.path.join(output_dir, output_filename))
def clean_build(build_dir):
'''cleans generated files'''
shutil.rmtree(build_dir, ignore_errors=True)
def add_header(input_directory, filename):
'''copies file in input_directory to HEADER_DIR'''
source_file = os.path.join(input_directory, filename)
shutil.copyfile(source_file, os.path.join(HEADER_DIR, filename))
def apply_patch(patch_file, git_dir):
'''uses git to apply patch_file to git_dir'''
cwd = os.getcwd()
os.chdir(git_dir)
result = subprocess.call(["git", "apply", patch_file, "--ignore-whitespace"])
os.chdir(cwd)
return result == 0
def need_to_apply_patch(git_dir):
'''returns True if patch needs to be applied (which means no changes have
happened to git_dir)'''
cwd = os.getcwd()
os.chdir(git_dir)
result = subprocess.call(["git", "diff", "--exit-code"])
os.chdir(cwd)
return result == 0
def run():
'''runs builds on all configurations, places binaries in prebuilt dir'''
original_dir = os.path.realpath(".")
import_paths()
configs = create_configurations()
# make sure prebuilt dir tree exists
make_prebuilt_dirs(configs)
libpng_dir = os.path.realpath(os.path.join("..", "3rdParty", "libpng"))
# apply patch to libpng
if need_to_apply_patch(libpng_dir):
result = apply_patch(PATCH_FILE, libpng_dir)
if not result:
print "Patch failed to apply, won't be able to generate correct builds"
exit(1)
# remove old build dir if it exists
if os.path.exists(BUILD_DIR):
shutil.rmtree(BUILD_DIR)
os.mkdir(BUILD_DIR)
os.chdir(BUILD_DIR)
build_dir = os.path.realpath(".")
failed_builds = []
for config in configs:
os.chdir(build_dir)
os.makedirs(config.output_directory("."))
os.chdir(config.output_directory("."))
result = cmake_generate_project_files(config, libpng_dir)
if result:
result = compile_project(config, "png.vcxproj")
if result:
output_dir = config.output_directory(OUTPUT_DIR)
add_to_prebuilt(output_dir, "libpng.dll")
add_to_prebuilt(output_dir, "libpng.lib")
if BUILD_CONFIGURATION in ["Debug", "RelWithDebInfo"]:
add_to_prebuilt(output_dir, "libpng.pdb")
continue
# cmake or build failed
failed_builds.append(config)
if DEBUG:
print "FAILED: %s" % config
wait = raw_input()
os.chdir(original_dir)
# copy relevant header files to right directory
add_header(configs[0].output_directory(build_dir), "pnglibconf.h")
add_header(libpng_dir, "png.h")
add_header(libpng_dir, "pngconf.h")
clean_build(build_dir)
if len(failed_builds) == 0:
print "Successful"
exit()
else:
print "%d failed builds!" % len(failed_builds)
for config in failed_builds:
print config
exit(1)
if __name__ == "__main__":
run()
| mit |
jhseu/tensorflow | tensorflow/python/data/experimental/kernel_tests/serialization/dataset_constructor_serialization_test.py | 15 | 3785 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the dataset constructors serialization."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python.data.experimental.kernel_tests.serialization import dataset_serialization_test_base
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import combinations
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.platform import test
class FromTensorsSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase,
parameterized.TestCase):
def _build_tensor_dataset(self, variable_array):
components = (variable_array, np.array([1, 2, 3]), np.array(37.0))
return dataset_ops.Dataset.from_tensors(components)
@combinations.generate(test_base.default_test_combinations())
def testFromTensorsCore(self):
# Equal length components
arr = np.array(1)
num_outputs = 1
self.run_core_tests(lambda: self._build_tensor_dataset(arr),
num_outputs)
class FromTensorSlicesSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase,
parameterized.TestCase):
def _build_tensor_slices_dataset(self, components):
return dataset_ops.Dataset.from_tensor_slices(components)
@combinations.generate(test_base.default_test_combinations())
def testFromTensorSlicesCore(self):
# Equal length components
components = (np.tile(np.array([[1], [2], [3], [4]]), 20),
np.tile(np.array([[12], [13], [14], [15]]), 22),
np.array([37.0, 38.0, 39.0, 40.0]))
dict_components = {"foo": [1, 2, 3], "bar": [[4.0], [5.0], [6.0]]}
self.run_core_tests(lambda: self._build_tensor_slices_dataset(components),
4)
self.run_core_tests(
lambda: self._build_tensor_slices_dataset(dict_components), 3)
class FromSparseTensorSlicesSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase,
parameterized.TestCase):
def _build_sparse_tensor_slice_dataset(self, slices):
indices = np.array(
[[i, j] for i in range(len(slices)) for j in range(len(slices[i]))],
dtype=np.int64)
values = np.array([val for s in slices for val in s], dtype=np.float64)
dense_shape = np.array(
[len(slices), max(len(s) for s in slices) + 1], dtype=np.int64)
sparse_components = sparse_tensor.SparseTensor(indices, values, dense_shape)
return dataset_ops.Dataset.from_sparse_tensor_slices(sparse_components)
@combinations.generate(
combinations.combine(
tf_api_version=1,
mode=["graph", "eager"]))
def testFromSparseTensorSlicesCore(self):
slices = [[1., 2., 3.], [1.], [1.], [1., 2.], [], [1., 2.], [], [], []]
self.run_core_tests(
lambda: self._build_sparse_tensor_slice_dataset(slices),
9,
sparse_tensors=True)
if __name__ == "__main__":
test.main()
| apache-2.0 |
Imaginashion/cloud-vision | .fr-d0BNfn/django-jquery-file-upload/venv/lib/python3.5/site-packages/pip/compat/dictconfig.py | 921 | 23096 | # This is a copy of the Python logging.config.dictconfig module,
# reproduced with permission. It is provided here for backwards
# compatibility for Python versions prior to 2.7.
#
# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import
import logging.handlers
import re
import sys
import types
from pip._vendor import six
# flake8: noqa
IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
def valid_ident(s):
m = IDENTIFIER.match(s)
if not m:
raise ValueError('Not a valid Python identifier: %r' % s)
return True
#
# This function is defined in logging only in recent versions of Python
#
try:
from logging import _checkLevel
except ImportError:
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in logging._levelNames:
raise ValueError('Unknown level: %r' % level)
rv = logging._levelNames[level]
else:
raise TypeError('Level not an integer or a '
'valid string: %r' % level)
return rv
# The ConvertingXXX classes are wrappers around standard Python containers,
# and they serve to convert any suitable values in the container. The
# conversion converts base dicts, lists and tuples to their wrapped
# equivalents, whereas strings which match a conversion format are converted
# appropriately.
#
# Each wrapper should have a configurator attribute holding the actual
# configurator to use for conversion.
class ConvertingDict(dict):
"""A converting dictionary wrapper."""
def __getitem__(self, key):
value = dict.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def get(self, key, default=None):
value = dict.get(self, key, default)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, key, default=None):
value = dict.pop(self, key, default)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class ConvertingList(list):
"""A converting list wrapper."""
def __getitem__(self, key):
value = list.__getitem__(self, key)
result = self.configurator.convert(value)
# If the converted value is different, save for next time
if value is not result:
self[key] = result
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
def pop(self, idx=-1):
value = list.pop(self, idx)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
return result
class ConvertingTuple(tuple):
"""A converting tuple wrapper."""
def __getitem__(self, key):
value = tuple.__getitem__(self, key)
result = self.configurator.convert(value)
if value is not result:
if type(result) in (ConvertingDict, ConvertingList,
ConvertingTuple):
result.parent = self
result.key = key
return result
class BaseConfigurator(object):
"""
The configurator base class which defines some useful defaults.
"""
CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
DIGIT_PATTERN = re.compile(r'^\d+$')
value_converters = {
'ext' : 'ext_convert',
'cfg' : 'cfg_convert',
}
# We might want to use a different one, e.g. importlib
importer = __import__
def __init__(self, config):
self.config = ConvertingDict(config)
self.config.configurator = self
def resolve(self, s):
"""
Resolve strings to objects using standard import and attribute
syntax.
"""
name = s.split('.')
used = name.pop(0)
try:
found = self.importer(used)
for frag in name:
used += '.' + frag
try:
found = getattr(found, frag)
except AttributeError:
self.importer(used)
found = getattr(found, frag)
return found
except ImportError:
e, tb = sys.exc_info()[1:]
v = ValueError('Cannot resolve %r: %s' % (s, e))
v.__cause__, v.__traceback__ = e, tb
raise v
def ext_convert(self, value):
"""Default converter for the ext:// protocol."""
return self.resolve(value)
def cfg_convert(self, value):
"""Default converter for the cfg:// protocol."""
rest = value
m = self.WORD_PATTERN.match(rest)
if m is None:
raise ValueError("Unable to convert %r" % value)
else:
rest = rest[m.end():]
d = self.config[m.groups()[0]]
# print d, rest
while rest:
m = self.DOT_PATTERN.match(rest)
if m:
d = d[m.groups()[0]]
else:
m = self.INDEX_PATTERN.match(rest)
if m:
idx = m.groups()[0]
if not self.DIGIT_PATTERN.match(idx):
d = d[idx]
else:
try:
n = int(idx) # try as number first (most likely)
d = d[n]
except TypeError:
d = d[idx]
if m:
rest = rest[m.end():]
else:
raise ValueError('Unable to convert '
'%r at %r' % (value, rest))
# rest should be empty
return d
def convert(self, value):
"""
Convert values to an appropriate type. dicts, lists and tuples are
replaced by their converting alternatives. Strings are checked to
see if they have a conversion format and are converted if they do.
"""
if not isinstance(value, ConvertingDict) and isinstance(value, dict):
value = ConvertingDict(value)
value.configurator = self
elif not isinstance(value, ConvertingList) and isinstance(value, list):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
isinstance(value, tuple):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, six.string_types): # str for py3k
m = self.CONVERT_PATTERN.match(value)
if m:
d = m.groupdict()
prefix = d['prefix']
converter = self.value_converters.get(prefix, None)
if converter:
suffix = d['suffix']
converter = getattr(self, converter)
value = converter(suffix)
return value
def configure_custom(self, config):
"""Configure an object with a user-supplied factory."""
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
props = config.pop('.', None)
# Check for valid identifiers
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
result = c(**kwargs)
if props:
for name, value in props.items():
setattr(result, name, value)
return result
def as_tuple(self, value):
"""Utility function which converts lists to tuples."""
if isinstance(value, list):
value = tuple(value)
return value
class DictConfigurator(BaseConfigurator):
"""
Configure logging using a dictionary-like object to describe the
configuration.
"""
def configure(self):
"""Do the configuration."""
config = self.config
if 'version' not in config:
raise ValueError("dictionary doesn't specify a version")
if config['version'] != 1:
raise ValueError("Unsupported version: %s" % config['version'])
incremental = config.pop('incremental', False)
EMPTY_DICT = {}
logging._acquireLock()
try:
if incremental:
handlers = config.get('handlers', EMPTY_DICT)
# incremental handler config only if handler name
# ties in to logging._handlers (Python 2.7)
if sys.version_info[:2] == (2, 7):
for name in handlers:
if name not in logging._handlers:
raise ValueError('No handler found with '
'name %r' % name)
else:
try:
handler = logging._handlers[name]
handler_config = handlers[name]
level = handler_config.get('level', None)
if level:
handler.setLevel(_checkLevel(level))
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
try:
self.configure_logger(name, loggers[name], True)
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
root = config.get('root', None)
if root:
try:
self.configure_root(root, True)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
else:
disable_existing = config.pop('disable_existing_loggers', True)
logging._handlers.clear()
del logging._handlerList[:]
# Do formatters first - they don't refer to anything else
formatters = config.get('formatters', EMPTY_DICT)
for name in formatters:
try:
formatters[name] = self.configure_formatter(
formatters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'formatter %r: %s' % (name, e))
# Next, do filters - they don't refer to anything else, either
filters = config.get('filters', EMPTY_DICT)
for name in filters:
try:
filters[name] = self.configure_filter(filters[name])
except StandardError as e:
raise ValueError('Unable to configure '
'filter %r: %s' % (name, e))
# Next, do handlers - they refer to formatters and filters
# As handlers can refer to other handlers, sort the keys
# to allow a deterministic order of configuration
handlers = config.get('handlers', EMPTY_DICT)
for name in sorted(handlers):
try:
handler = self.configure_handler(handlers[name])
handler.name = name
handlers[name] = handler
except StandardError as e:
raise ValueError('Unable to configure handler '
'%r: %s' % (name, e))
# Next, do loggers - they refer to handlers and filters
# we don't want to lose the existing loggers,
# since other threads may have pointers to them.
# existing is set to contain all existing loggers,
# and as we go through the new configuration we
# remove any which are configured. At the end,
# what's left in existing is the set of loggers
# which were in the previous configuration but
# which are not in the new configuration.
root = logging.root
existing = list(root.manager.loggerDict)
# The list needs to be sorted so that we can
# avoid disabling child loggers of explicitly
# named loggers. With a sorted list it is easier
# to find the child loggers.
existing.sort()
# We'll keep the list of existing loggers
# which are children of named loggers here...
child_loggers = []
# now set up the new ones...
loggers = config.get('loggers', EMPTY_DICT)
for name in loggers:
if name in existing:
i = existing.index(name)
prefixed = name + "."
pflen = len(prefixed)
num_existing = len(existing)
i = i + 1 # look at the entry after name
while (i < num_existing) and\
(existing[i][:pflen] == prefixed):
child_loggers.append(existing[i])
i = i + 1
existing.remove(name)
try:
self.configure_logger(name, loggers[name])
except StandardError as e:
raise ValueError('Unable to configure logger '
'%r: %s' % (name, e))
# Disable any old loggers. There's no point deleting
# them as other threads may continue to hold references
# and by disabling them, you stop them doing any logging.
# However, don't disable children of named loggers, as that's
# probably not what was intended by the user.
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
logger.level = logging.NOTSET
logger.handlers = []
logger.propagate = True
elif disable_existing:
logger.disabled = True
# And finally, do the root logger
root = config.get('root', None)
if root:
try:
self.configure_root(root)
except StandardError as e:
raise ValueError('Unable to configure root '
'logger: %s' % e)
finally:
logging._releaseLock()
def configure_formatter(self, config):
"""Configure a formatter from a dictionary."""
if '()' in config:
factory = config['()'] # for use in exception handler
try:
result = self.configure_custom(config)
except TypeError as te:
if "'format'" not in str(te):
raise
# Name of parameter changed from fmt to format.
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
config['fmt'] = config.pop('format')
config['()'] = factory
result = self.configure_custom(config)
else:
fmt = config.get('format', None)
dfmt = config.get('datefmt', None)
result = logging.Formatter(fmt, dfmt)
return result
def configure_filter(self, config):
"""Configure a filter from a dictionary."""
if '()' in config:
result = self.configure_custom(config)
else:
name = config.get('name', '')
result = logging.Filter(name)
return result
def add_filters(self, filterer, filters):
"""Add filters to a filterer from a list of names."""
for f in filters:
try:
filterer.addFilter(self.config['filters'][f])
except StandardError as e:
raise ValueError('Unable to add filter %r: %s' % (f, e))
def configure_handler(self, config):
"""Configure a handler from a dictionary."""
formatter = config.pop('formatter', None)
if formatter:
try:
formatter = self.config['formatters'][formatter]
except StandardError as e:
raise ValueError('Unable to set formatter '
'%r: %s' % (formatter, e))
level = config.pop('level', None)
filters = config.pop('filters', None)
if '()' in config:
c = config.pop('()')
if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType:
c = self.resolve(c)
factory = c
else:
klass = self.resolve(config.pop('class'))
# Special case for handler which refers to another handler
if issubclass(klass, logging.handlers.MemoryHandler) and\
'target' in config:
try:
config['target'] = self.config['handlers'][config['target']]
except StandardError as e:
raise ValueError('Unable to set target handler '
'%r: %s' % (config['target'], e))
elif issubclass(klass, logging.handlers.SMTPHandler) and\
'mailhost' in config:
config['mailhost'] = self.as_tuple(config['mailhost'])
elif issubclass(klass, logging.handlers.SysLogHandler) and\
'address' in config:
config['address'] = self.as_tuple(config['address'])
factory = klass
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
try:
result = factory(**kwargs)
except TypeError as te:
if "'stream'" not in str(te):
raise
# The argument name changed from strm to stream
# Retry with old name.
# This is so that code can be used with older Python versions
#(e.g. by Django)
kwargs['strm'] = kwargs.pop('stream')
result = factory(**kwargs)
if formatter:
result.setFormatter(formatter)
if level is not None:
result.setLevel(_checkLevel(level))
if filters:
self.add_filters(result, filters)
return result
def add_handlers(self, logger, handlers):
"""Add handlers to a logger from a list of names."""
for h in handlers:
try:
logger.addHandler(self.config['handlers'][h])
except StandardError as e:
raise ValueError('Unable to add handler %r: %s' % (h, e))
def common_logger_config(self, logger, config, incremental=False):
"""
Perform configuration which is common to root and non-root loggers.
"""
level = config.get('level', None)
if level is not None:
logger.setLevel(_checkLevel(level))
if not incremental:
# Remove any existing handlers
for h in logger.handlers[:]:
logger.removeHandler(h)
handlers = config.get('handlers', None)
if handlers:
self.add_handlers(logger, handlers)
filters = config.get('filters', None)
if filters:
self.add_filters(logger, filters)
def configure_logger(self, name, config, incremental=False):
"""Configure a non-root logger from a dictionary."""
logger = logging.getLogger(name)
self.common_logger_config(logger, config, incremental)
propagate = config.get('propagate', None)
if propagate is not None:
logger.propagate = propagate
def configure_root(self, config, incremental=False):
"""Configure a root logger from a dictionary."""
root = logging.getLogger()
self.common_logger_config(root, config, incremental)
dictConfigClass = DictConfigurator
def dictConfig(config):
"""Configure logging using a dictionary."""
dictConfigClass(config).configure()
| mit |
byohay/Remarkable | remarkable/AboutRemarkableDialog.py | 1 | 1798 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2016 <Jamie McGowan> <jamiemcgowan.dev@gmail.com>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
### END LICENSE
from locale import gettext as _
import logging
logger = logging.getLogger('remarkable')
from remarkable_lib.AboutDialog import AboutDialog
# See remarkable_lib.AboutDialog.py for more details about how this class works.
class AboutRemarkableDialog(AboutDialog):
__gtype_name__ = "AboutRemarkableDialog"
def finish_initializing(self, builder): # pylint: disable=E1002
"""Set up the about dialog"""
super(AboutRemarkableDialog, self).finish_initializing(builder)
# Code for other initialization actions should be added here.
| mit |
guillaume-philippon/aquilon | lib/aquilon/worker/commands/show_desk_desk.py | 4 | 1248 | # -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq show desk --desk`."""
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.show_location_type import CommandShowLocationType
class CommandShowDeskDesk(CommandShowLocationType):
required_parameters = ["desk"]
def render(self, session, desk, **arguments):
return CommandShowLocationType.render(self, session=session,
type='desk', name=desk,
**arguments)
| apache-2.0 |
hogarthj/ansible | lib/ansible/modules/cloud/amazon/ec2_snapshot.py | 27 | 9687 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: ec2_snapshot
short_description: creates a snapshot from an existing volume
description:
- creates an EC2 snapshot from an existing EBS volume
version_added: "1.5"
options:
volume_id:
description:
- volume from which to take the snapshot
required: false
description:
description:
- description to be applied to the snapshot
required: false
instance_id:
description:
- instance that has the required volume to snapshot mounted
required: false
device_name:
description:
- device name of a mounted volume to be snapshotted
required: false
snapshot_tags:
description:
- a hash/dictionary of tags to add to the snapshot
required: false
version_added: "1.6"
wait:
description:
- wait for the snapshot to be ready
choices: ['yes', 'no']
required: false
default: yes
version_added: "1.5.1"
wait_timeout:
description:
- how long before wait gives up, in seconds
- specify 0 to wait forever
required: false
default: 0
version_added: "1.5.1"
state:
description:
- whether to add or create a snapshot
required: false
default: present
choices: ['absent', 'present']
version_added: "1.9"
snapshot_id:
description:
- snapshot id to remove
required: false
version_added: "1.9"
last_snapshot_min_age:
description:
- If the volume's most recent snapshot has started less than `last_snapshot_min_age' minutes ago, a new snapshot will not be created.
required: false
default: 0
version_added: "2.0"
author: "Will Thames (@willthames)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Simple snapshot of volume using volume_id
- ec2_snapshot:
volume_id: vol-abcdef12
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume mounted on device_name attached to instance_id
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume with tagging
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
snapshot_tags:
frequency: hourly
source: /data
# Remove a snapshot
- local_action:
module: ec2_snapshot
snapshot_id: snap-abcd1234
state: absent
# Create a snapshot only if the most recent one is older than 1 hour
- local_action:
module: ec2_snapshot
volume_id: vol-abcdef12
last_snapshot_min_age: 60
'''
import time
import datetime
try:
import boto.exception
except ImportError:
pass # Taken care of by ec2.HAS_BOTO
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import HAS_BOTO, ec2_argument_spec, ec2_connect
# Find the most recent snapshot
def _get_snapshot_starttime(snap):
return datetime.datetime.strptime(snap.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
def _get_most_recent_snapshot(snapshots, max_snapshot_age_secs=None, now=None):
"""
Gets the most recently created snapshot and optionally filters the result
if the snapshot is too old
:param snapshots: list of snapshots to search
:param max_snapshot_age_secs: filter the result if its older than this
:param now: simulate time -- used for unit testing
:return:
"""
if len(snapshots) == 0:
return None
if not now:
now = datetime.datetime.utcnow()
youngest_snapshot = max(snapshots, key=_get_snapshot_starttime)
# See if the snapshot is younger that the given max age
snapshot_start = datetime.datetime.strptime(youngest_snapshot.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
snapshot_age = now - snapshot_start
if max_snapshot_age_secs is not None:
if snapshot_age.total_seconds() > max_snapshot_age_secs:
return None
return youngest_snapshot
def _create_with_wait(snapshot, wait_timeout_secs, sleep_func=time.sleep):
"""
Wait for the snapshot to be created
:param snapshot:
:param wait_timeout_secs: fail this step after this many seconds
:param sleep_func:
:return:
"""
time_waited = 0
snapshot.update()
while snapshot.status != 'completed':
sleep_func(3)
snapshot.update()
time_waited += 3
if wait_timeout_secs and time_waited > wait_timeout_secs:
return False
return True
def create_snapshot(module, ec2, state=None, description=None, wait=None,
wait_timeout=None, volume_id=None, instance_id=None,
snapshot_id=None, device_name=None, snapshot_tags=None,
last_snapshot_min_age=None):
snapshot = None
changed = False
required = [volume_id, snapshot_id, instance_id]
if required.count(None) != len(required) - 1: # only 1 must be set
module.fail_json(msg='One and only one of volume_id or instance_id or snapshot_id must be specified')
if instance_id and not device_name or device_name and not instance_id:
module.fail_json(msg='Instance ID and device name must both be specified')
if instance_id:
try:
volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id, 'attachment.device': device_name})
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
if not volumes:
module.fail_json(msg="Could not find volume with name %s attached to instance %s" % (device_name, instance_id))
volume_id = volumes[0].id
if state == 'absent':
if not snapshot_id:
module.fail_json(msg='snapshot_id must be set when state is absent')
try:
ec2.delete_snapshot(snapshot_id)
except boto.exception.BotoServerError as e:
# exception is raised if snapshot does not exist
if e.error_code == 'InvalidSnapshot.NotFound':
module.exit_json(changed=False)
else:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
# successful delete
module.exit_json(changed=True)
if last_snapshot_min_age > 0:
try:
current_snapshots = ec2.get_all_snapshots(filters={'volume_id': volume_id})
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
last_snapshot_min_age = last_snapshot_min_age * 60 # Convert to seconds
snapshot = _get_most_recent_snapshot(current_snapshots,
max_snapshot_age_secs=last_snapshot_min_age)
try:
# Create a new snapshot if we didn't find an existing one to use
if snapshot is None:
snapshot = ec2.create_snapshot(volume_id, description=description)
changed = True
if wait:
if not _create_with_wait(snapshot, wait_timeout):
module.fail_json(msg='Timed out while creating snapshot.')
if snapshot_tags:
for k, v in snapshot_tags.items():
snapshot.add_tag(k, v)
except boto.exception.BotoServerError as e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
module.exit_json(changed=changed,
snapshot_id=snapshot.id,
volume_id=snapshot.volume_id,
volume_size=snapshot.volume_size,
tags=snapshot.tags.copy())
def create_snapshot_ansible_module():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
volume_id=dict(),
description=dict(),
instance_id=dict(),
snapshot_id=dict(),
device_name=dict(),
wait=dict(type='bool', default=True),
wait_timeout=dict(type='int', default=0),
last_snapshot_min_age=dict(type='int', default=0),
snapshot_tags=dict(type='dict', default=dict()),
state=dict(choices=['absent', 'present'], default='present'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
return module
def main():
module = create_snapshot_ansible_module()
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
volume_id = module.params.get('volume_id')
snapshot_id = module.params.get('snapshot_id')
description = module.params.get('description')
instance_id = module.params.get('instance_id')
device_name = module.params.get('device_name')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
last_snapshot_min_age = module.params.get('last_snapshot_min_age')
snapshot_tags = module.params.get('snapshot_tags')
state = module.params.get('state')
ec2 = ec2_connect(module)
create_snapshot(
module=module,
state=state,
description=description,
wait=wait,
wait_timeout=wait_timeout,
ec2=ec2,
volume_id=volume_id,
instance_id=instance_id,
snapshot_id=snapshot_id,
device_name=device_name,
snapshot_tags=snapshot_tags,
last_snapshot_min_age=last_snapshot_min_age
)
if __name__ == '__main__':
main()
| gpl-3.0 |
rulz/django-allauth | allauth/socialaccount/providers/facebook/south_migrations/0003_tosocialaccount.py | 82 | 8978 | # encoding: utf-8
from south.v2 import DataMigration
class Migration(DataMigration):
depends_on = (('socialaccount', '0002_genericmodels'),)
def forwards(self, orm):
# Migrate FB apps
app_id_to_sapp = {}
for app in orm.FacebookApp.objects.all():
sapp = orm['socialaccount.SocialApp'].objects \
.create(site=app.site,
provider='facebook',
name=app.name,
key=app.application_id,
secret=app.application_secret)
app_id_to_sapp[app.id] = sapp
# Migrate FB accounts
acc_id_to_sacc = {}
for acc in orm.FacebookAccount.objects.all():
sacc = acc.socialaccount_ptr
sacc.uid = acc.social_id
sacc.extra_data = { 'link': acc.link,
'name': acc.name }
sacc.provider = 'facebook'
sacc.save()
acc_id_to_sacc[acc.id] = sacc
# Migrate tokens
for token in orm.FacebookAccessToken.objects.all():
sapp = app_id_to_sapp[token.app.id]
sacc = acc_id_to_sacc[token.account.id]
orm['socialaccount.SocialToken'].objects \
.create(app=sapp,
account=sacc,
token=token.access_token,
token_secret='')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'facebook.facebookaccesstoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'FacebookAccessToken'},
'access_token': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['facebook.FacebookAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['facebook.FacebookApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'facebook.facebookaccount': {
'Meta': {'object_name': 'FacebookAccount', '_ormbases': ['socialaccount.SocialAccount']},
'link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'social_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'socialaccount_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['socialaccount.SocialAccount']", 'unique': 'True', 'primary_key': 'True'})
},
'facebook.facebookapp': {
'Meta': {'object_name': 'FacebookApp'},
'api_key': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'application_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'application_secret': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'socialaccount.socialaccount': {
'Meta': {'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'socialaccount.socialapp': {
'Meta': {'object_name': 'SocialApp'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"})
},
'socialaccount.socialtoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'token_secret': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['socialaccount', 'facebook']
| mit |
jacshfr/mozilla-bedrock | bedrock/events/tests/test_models.py | 6 | 1679 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os.path
from datetime import datetime
from django.test.utils import override_settings
from mock import patch
from nose.tools import eq_
from bedrock.events.models import Event
from bedrock.mozorg.tests import TestCase
TEST_DATA = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'test_data')
class TestICalSync(TestCase):
def test_sync_data(self):
"""Data should sync successfully."""
with open(os.path.join(TEST_DATA, 'reps.ical')) as fh:
Event.objects.sync_with_ical(fh.read())
self.assertEqual(Event.objects.count(), 117)
with open(os.path.join(TEST_DATA, 'reps_fewer.ical')) as fh:
Event.objects.sync_with_ical(fh.read())
self.assertEqual(Event.objects.count(), 14)
class TestFutureQuerySet(TestCase):
@override_settings(USE_TZ=True)
@patch('bedrock.events.models.datetime')
def test_future_dst_use_tz(self, mock_datetime):
"""
Should not raise error during DST change
"""
mock_datetime.utcnow.return_value = datetime(2014, 11, 02, 01, 01)
eq_(Event.objects.future().count(), 0)
@override_settings(USE_TZ=False)
@patch('bedrock.events.models.datetime')
def test_future_dst(self, mock_datetime):
"""
Should not raise error during DST change
"""
mock_datetime.utcnow.return_value = datetime(2014, 11, 02, 01, 01)
eq_(Event.objects.future().count(), 0)
| mpl-2.0 |
kenshay/ImageScript | Script_Runner/PYTHON/Lib/site-packages/asn1crypto/_ordereddict.py | 29 | 4533 | # Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import sys
if not sys.version_info < (2, 7):
from collections import OrderedDict
else:
from UserDict import DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next_ = self.__map.pop(key)
prev[2] = next_
next_[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
| gpl-3.0 |
azumimuo/family-xbmc-addon | plugin.video.playlistLoader/resources/lib/chardet/utf8prober.py | 2919 | 2652 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
| gpl-2.0 |
idaholab/raven | scripts/conversionScripts/toDistributions_MVN_Node.py | 2 | 2699 | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import xml.etree.ElementTree as ET
import xml.dom.minidom as pxml
import os
def convert(tree,fileName=None):
"""
Converts input files to be compatible with merge request #269 (wangc/nd_dist_dev). Removes the <data_filename> and <workingDir> node
from the <MultivariateNormal> block, add <covariance> child node to the parent node <MultivariateNormal>.
@ In, tree, xml.etree.ElementTree.ElementTree object, the contents of a RAVEN input file
@Out, tree, xml.etree.ElementTree.ElementTree object, the modified RAVEN input file
"""
simulation = tree.getroot()
if simulation.tag!='Simulation': return tree #this isn't an input file
for child in simulation:
if child.tag == 'Distributions':
MVNNode = child.find('MultivariateNormal')
if MVNNode is not None:
dataFileNameNode = MVNNode.find('data_filename')
covFileName = dataFileNameNode.text
dataWorkingDirNode = MVNNode.find('workingDir')
covFileDir = dataWorkingDirNode.text
if '~' in covFileDir: covFileDir = os.path.expanduser(covFileDir)
if os.path.isabs(covFileDir): covFileDir = covFileDir
elif "runRelative" in dataWorkingDirNode.attrib:
covFileDir = os.path.abspath(covFileName)
else:
if covFileDir == None: raise IOError('Relative working directory is requested but the given name is None' )
covFileDir = os.path.join(os.getcwd(),covFileDir.strip())
covFileName = os.path.join(covFileDir,covFileName.strip())
MVNNode.remove(dataFileNameNode)
MVNNode.remove(dataWorkingDirNode)
covData = ''
if os.path.isfile(covFileName):
for line in file(covFileName,'r'):
covData += line.rstrip() + ' '
else:
print 'Error! The following file is not exist: ', covFileName
covNode = ET.Element('covariance')
covNode.text = covData
MVNNode.append(covNode)
else:
print 'No conversion needed'
return tree
if __name__=='__main__':
import convert_utils
import sys
convert_utils.standardMain(sys.argv,convert)
| apache-2.0 |
mokieyue/mopidy | mopidy/httpclient.py | 4 | 1669 | from __future__ import unicode_literals
import platform
import mopidy
"Helpers for configuring HTTP clients used in Mopidy extensions."
def format_proxy(proxy_config, auth=True):
"""Convert a Mopidy proxy config to the commonly used proxy string format.
Outputs ``scheme://host:port``, ``scheme://user:pass@host:port`` or
:class:`None` depending on the proxy config provided.
You can also opt out of getting the basic auth by setting ``auth`` to
:class:`False`.
.. versionadded:: 1.1
"""
if not proxy_config.get('hostname'):
return None
port = proxy_config.get('port')
if not port or port < 0:
port = 80
if proxy_config.get('username') and proxy_config.get('password') and auth:
template = '{scheme}://{username}:{password}@{hostname}:{port}'
else:
template = '{scheme}://{hostname}:{port}'
return template.format(scheme=proxy_config.get('scheme') or 'http',
username=proxy_config.get('username'),
password=proxy_config.get('password'),
hostname=proxy_config['hostname'], port=port)
def format_user_agent(name=None):
"""Construct a User-Agent suitable for use in client code.
This will identify use by the provided ``name`` (which should be on the
format ``dist_name/version``), Mopidy version and Python version.
.. versionadded:: 1.1
"""
parts = ['Mopidy/%s' % (mopidy.__version__),
'%s/%s' % (platform.python_implementation(),
platform.python_version())]
if name:
parts.insert(0, name)
return ' '.join(parts)
| apache-2.0 |
tboyce1/home-assistant | homeassistant/components/mqtt/server.py | 7 | 3068 | """
Support for a local MQTT broker.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/mqtt/#use-the-embedded-broker
"""
import asyncio
import logging
import tempfile
import voluptuous as vol
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['hbmqtt==0.9.1']
DEPENDENCIES = ['http']
# None allows custom config to be created through generate_config
HBMQTT_CONFIG_SCHEMA = vol.Any(None, vol.Schema({
vol.Optional('auth'): vol.Schema({
vol.Optional('password-file'): cv.isfile,
}, extra=vol.ALLOW_EXTRA),
vol.Optional('listeners'): vol.Schema({
vol.Required('default'): vol.Schema(dict),
str: vol.Schema(dict)
})
}, extra=vol.ALLOW_EXTRA))
@asyncio.coroutine
def async_start(hass, server_config):
"""Initialize MQTT Server.
This method is a coroutine.
"""
from hbmqtt.broker import Broker, BrokerException
try:
passwd = tempfile.NamedTemporaryFile()
if server_config is None:
server_config, client_config = generate_config(hass, passwd)
else:
client_config = None
broker = Broker(server_config, hass.loop)
yield from broker.start()
except BrokerException:
logging.getLogger(__name__).exception("Error initializing MQTT server")
return False, None
finally:
passwd.close()
@asyncio.coroutine
def async_shutdown_mqtt_server(event):
"""Shut down the MQTT server."""
yield from broker.shutdown()
hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_STOP, async_shutdown_mqtt_server)
return True, client_config
def generate_config(hass, passwd):
"""Generate a configuration based on current Home Assistant instance."""
from homeassistant.components.mqtt import PROTOCOL_311
config = {
'listeners': {
'default': {
'max-connections': 50000,
'bind': '0.0.0.0:1883',
'type': 'tcp',
},
'ws-1': {
'bind': '0.0.0.0:8080',
'type': 'ws',
},
},
'auth': {
'allow-anonymous': hass.config.api.api_password is None
},
'plugins': ['auth_anonymous'],
}
if hass.config.api.api_password:
username = 'homeassistant'
password = hass.config.api.api_password
# Encrypt with what hbmqtt uses to verify
from passlib.apps import custom_app_context
passwd.write(
'homeassistant:{}\n'.format(
custom_app_context.encrypt(
hass.config.api.api_password)).encode('utf-8'))
passwd.flush()
config['auth']['password-file'] = passwd.name
config['plugins'].append('auth_file')
else:
username = None
password = None
client_config = ('localhost', 1883, username, password, None, PROTOCOL_311)
return config, client_config
| apache-2.0 |
ryuunosukeyoshi/PartnerPoi-Bot | lib/youtube_dl/extractor/camdemy.py | 64 | 5772 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_urlencode,
compat_urlparse,
)
from ..utils import (
clean_html,
parse_duration,
str_to_int,
unified_strdate,
)
class CamdemyIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?camdemy\.com/media/(?P<id>\d+)'
_TESTS = [{
# single file
'url': 'http://www.camdemy.com/media/5181/',
'md5': '5a5562b6a98b37873119102e052e311b',
'info_dict': {
'id': '5181',
'ext': 'mp4',
'title': 'Ch1-1 Introduction, Signals (02-23-2012)',
'thumbnail': r're:^https?://.*\.jpg$',
'creator': 'ss11spring',
'duration': 1591,
'upload_date': '20130114',
'view_count': int,
}
}, {
# With non-empty description
# webpage returns "No permission or not login"
'url': 'http://www.camdemy.com/media/13885',
'md5': '4576a3bb2581f86c61044822adbd1249',
'info_dict': {
'id': '13885',
'ext': 'mp4',
'title': 'EverCam + Camdemy QuickStart',
'thumbnail': r're:^https?://.*\.jpg$',
'description': 'md5:2a9f989c2b153a2342acee579c6e7db6',
'creator': 'evercam',
'duration': 318,
}
}, {
# External source (YouTube)
'url': 'http://www.camdemy.com/media/14842',
'info_dict': {
'id': '2vsYQzNIsJo',
'ext': 'mp4',
'title': 'Excel 2013 Tutorial - How to add Password Protection',
'description': 'Excel 2013 Tutorial for Beginners - How to add Password Protection',
'upload_date': '20130211',
'uploader': 'Hun Kim',
'uploader_id': 'hunkimtutorials',
},
'params': {
'skip_download': True,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
src_from = self._html_search_regex(
r"class=['\"]srcFrom['\"][^>]*>Sources?(?:\s+from)?\s*:\s*<a[^>]+(?:href|title)=(['\"])(?P<url>(?:(?!\1).)+)\1",
webpage, 'external source', default=None, group='url')
if src_from:
return self.url_result(src_from)
oembed_obj = self._download_json(
'http://www.camdemy.com/oembed/?format=json&url=' + url, video_id)
title = oembed_obj['title']
thumb_url = oembed_obj['thumbnail_url']
video_folder = compat_urlparse.urljoin(thumb_url, 'video/')
file_list_doc = self._download_xml(
compat_urlparse.urljoin(video_folder, 'fileList.xml'),
video_id, 'Downloading filelist XML')
file_name = file_list_doc.find('./video/item/fileName').text
video_url = compat_urlparse.urljoin(video_folder, file_name)
# Some URLs return "No permission or not login" in a webpage despite being
# freely available via oembed JSON URL (e.g. http://www.camdemy.com/media/13885)
upload_date = unified_strdate(self._search_regex(
r'>published on ([^<]+)<', webpage,
'upload date', default=None))
view_count = str_to_int(self._search_regex(
r'role=["\']viewCnt["\'][^>]*>([\d,.]+) views',
webpage, 'view count', default=None))
description = self._html_search_meta(
'description', webpage, default=None) or clean_html(
oembed_obj.get('description'))
return {
'id': video_id,
'url': video_url,
'title': title,
'thumbnail': thumb_url,
'description': description,
'creator': oembed_obj.get('author_name'),
'duration': parse_duration(oembed_obj.get('duration')),
'upload_date': upload_date,
'view_count': view_count,
}
class CamdemyFolderIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?camdemy\.com/folder/(?P<id>\d+)'
_TESTS = [{
# links with trailing slash
'url': 'http://www.camdemy.com/folder/450',
'info_dict': {
'id': '450',
'title': '信號與系統 2012 & 2011 (Signals and Systems)',
},
'playlist_mincount': 145
}, {
# links without trailing slash
# and multi-page
'url': 'http://www.camdemy.com/folder/853',
'info_dict': {
'id': '853',
'title': '科學計算 - 使用 Matlab'
},
'playlist_mincount': 20
}, {
# with displayMode parameter. For testing the codes to add parameters
'url': 'http://www.camdemy.com/folder/853/?displayMode=defaultOrderByOrg',
'info_dict': {
'id': '853',
'title': '科學計算 - 使用 Matlab'
},
'playlist_mincount': 20
}]
def _real_extract(self, url):
folder_id = self._match_id(url)
# Add displayMode=list so that all links are displayed in a single page
parsed_url = list(compat_urlparse.urlparse(url))
query = dict(compat_urlparse.parse_qsl(parsed_url[4]))
query.update({'displayMode': 'list'})
parsed_url[4] = compat_urllib_parse_urlencode(query)
final_url = compat_urlparse.urlunparse(parsed_url)
page = self._download_webpage(final_url, folder_id)
matches = re.findall(r"href='(/media/\d+/?)'", page)
entries = [self.url_result('http://www.camdemy.com' + media_path)
for media_path in matches]
folder_title = self._html_search_meta('keywords', page)
return self.playlist_result(entries, folder_id, folder_title)
| gpl-3.0 |
zploskey/servo | tests/wpt/harness/wptrunner/vcs.py | 156 | 1368 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import subprocess
from functools import partial
from mozlog import get_default_logger
logger = None
def vcs(bin_name):
def inner(command, *args, **kwargs):
global logger
if logger is None:
logger = get_default_logger("vcs")
repo = kwargs.pop("repo", None)
log_error = kwargs.pop("log_error", True)
if kwargs:
raise TypeError, kwargs
args = list(args)
proc_kwargs = {}
if repo is not None:
proc_kwargs["cwd"] = repo
command_line = [bin_name, command] + args
logger.debug(" ".join(command_line))
try:
return subprocess.check_output(command_line, stderr=subprocess.STDOUT, **proc_kwargs)
except subprocess.CalledProcessError as e:
if log_error:
logger.error(e.output)
raise
return inner
git = vcs("git")
hg = vcs("hg")
def bind_to_repo(vcs_func, repo):
return partial(vcs_func, repo=repo)
def is_git_root(path):
try:
rv = git("rev-parse", "--show-cdup", repo=path)
except subprocess.CalledProcessError:
return False
return rv == "\n"
| mpl-2.0 |
hesseltuinhof/mxnet | python/mxnet/gluon/model_zoo/vision/__init__.py | 1 | 3746 | # coding: utf-8
# pylint: disable=wildcard-import, arguments-differ
r"""Module for pre-defined neural network models.
This module contains definitions for the following model architectures:
- `AlexNet`_
- `DenseNet`_
- `Inception V3`_
- `ResNet V1`_
- `ResNet V2`_
- `SqueezeNet`_
- `VGG`_
You can construct a model with random weights by calling its constructor:
.. code:: python
import mxnet.gluon.models as models
resnet18 = models.resnet18_v1()
alexnet = models.alexnet()
squeezenet = models.squeezenet1_0()
densenet = models.densenet_161()
We provide pre-trained models for all the models except ResNet V2.
These can constructed by passing
``pretrained=True``:
.. code:: python
import mxnet.gluon.models as models
resnet18 = models.resnet18_v1(pretrained=True)
alexnet = models.alexnet(pretrained=True)
Pretrained model is converted from torchvision.
All pre-trained models expect input images normalized in the same way,
i.e. mini-batches of 3-channel RGB images of shape (N x 3 x H x W),
where N is the batch size, and H and W are expected to be at least 224.
The images have to be loaded in to a range of [0, 1] and then normalized
using ``mean = [0.485, 0.456, 0.406]`` and ``std = [0.229, 0.224, 0.225]``.
The transformation should preferrably happen at preprocessing. You can use
``mx.image.color_normalize`` for such transformation::
image = image/255
normalized = mx.image.color_normalize(image,
mean=mx.nd.array([0.485, 0.456, 0.406]),
std=mx.nd.array([0.229, 0.224, 0.225]))
.. _AlexNet: https://arxiv.org/abs/1404.5997
.. _DenseNet: https://arxiv.org/abs/1608.06993
.. _Inception V3: http://arxiv.org/abs/1512.00567
.. _ResNet V1: https://arxiv.org/abs/1512.03385
.. _ResNet V2: https://arxiv.org/abs/1512.03385
.. _SqueezeNet: https://arxiv.org/abs/1602.07360
.. _VGG: https://arxiv.org/abs/1409.1556
"""
from .alexnet import *
from .densenet import *
from .inception import *
from .resnet import *
from .squeezenet import *
from .vgg import *
def get_model(name, **kwargs):
"""Returns a pre-defined model by name
Parameters
----------
name : str
Name of the model.
pretrained : bool
Whether to load the pretrained weights for model.
classes : int
Number of classes for the output layer.
Returns
-------
HybridBlock
The model.
"""
models = {'resnet18_v1': resnet18_v1,
'resnet34_v1': resnet34_v1,
'resnet50_v1': resnet50_v1,
'resnet101_v1': resnet101_v1,
'resnet152_v1': resnet152_v1,
'resnet18_v2': resnet18_v2,
'resnet34_v2': resnet34_v2,
'resnet50_v2': resnet50_v2,
'resnet101_v2': resnet101_v2,
'resnet152_v2': resnet152_v2,
'vgg11': vgg11,
'vgg13': vgg13,
'vgg16': vgg16,
'vgg19': vgg19,
'vgg11_bn': vgg11_bn,
'vgg13_bn': vgg13_bn,
'vgg16_bn': vgg16_bn,
'vgg19_bn': vgg19_bn,
'alexnet': alexnet,
'densenet121': densenet121,
'densenet161': densenet161,
'densenet169': densenet169,
'densenet201': densenet201,
'squeezenet1.0': squeezenet1_0,
'squeezenet1.1': squeezenet1_1,
'inceptionv3': inception_v3,
}
name = name.lower()
if name not in models:
raise ValueError(
'Model %s is not supported. Available options are\n\t%s'%(
name, '\n\t'.join(sorted(models.keys()))))
return models[name](**kwargs)
| apache-2.0 |
gmacchi93/serverInfoParaguay | apps/venv/lib/python2.7/site-packages/django/db/backends/oracle/compiler.py | 407 | 2180 | from django.db.models.sql import compiler
class SQLCompiler(compiler.SQLCompiler):
def as_sql(self, with_limits=True, with_col_aliases=False, subquery=False):
"""
Creates the SQL for this query. Returns the SQL string and list
of parameters. This is overridden from the original Query class
to handle the additional SQL Oracle requires to emulate LIMIT
and OFFSET.
If 'with_limits' is False, any limit/offset information is not
included in the query.
"""
if with_limits and self.query.low_mark == self.query.high_mark:
return '', ()
# The `do_offset` flag indicates whether we need to construct
# the SQL needed to use limit/offset with Oracle.
do_offset = with_limits and (self.query.high_mark is not None
or self.query.low_mark)
if not do_offset:
sql, params = super(SQLCompiler, self).as_sql(
with_limits=False,
with_col_aliases=with_col_aliases,
subquery=subquery,
)
else:
sql, params = super(SQLCompiler, self).as_sql(
with_limits=False,
with_col_aliases=True,
subquery=subquery,
)
# Wrap the base query in an outer SELECT * with boundaries on
# the "_RN" column. This is the canonical way to emulate LIMIT
# and OFFSET on Oracle.
high_where = ''
if self.query.high_mark is not None:
high_where = 'WHERE ROWNUM <= %d' % (self.query.high_mark,)
sql = (
'SELECT * FROM (SELECT "_SUB".*, ROWNUM AS "_RN" FROM (%s) '
'"_SUB" %s) WHERE "_RN" > %d' % (sql, high_where, self.query.low_mark)
)
return sql, params
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
pass
| apache-2.0 |
AtomLaw/clamwin-0.1 | py/throb/throbImages.py | 1 | 261647 | #----------------------------------------------------------------------
# This file was generated by L:\Projects\ClamWin\py\throb\ENCODE~1.PY
#
from wxPython.wx import wxImageFromStream, wxBitmapFromImage
import cStringIO
catalog = {}
index = []
class ImageClass: pass
def getscanprogress01Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x19\xf9IDATx\x9c\xed\x9dm\x8c$\xc7y\xdf\x7f{G\xf2j\x8f\xab\xbb"\xc5\xdc\xf5\
\xd14Y\xa2E\xb3)\x13\xd2H\x86\xa5\xb6N\x08G\x86a\x8f\x10 \x19!\x86\xb5\xf2\
\x8b<\x0e\x82xd\xc3\xc0\x18q\xe0\xb5c\xc0\xf3!\x08\xd6\x08\x02l\x12 ^$\x06\
\xbc\x10,d\xf4I#\xc0\xb1G\x92\x15\x0f\xe1X\x1a\xdb\x028\x90d]S\xa2\xcc>\x91\
\xbc\xeb#H_\x1dE\xde\xd6\xca$7\x1fz^zfzf\xbagjfv\x99\xfd\x03w;/=U\xfd\xef\
\xaaz\xea\xa9\xe7\xa9\xe7\xa9\xb5\xcb\x97/\x1f\xf2\x16\xc6\x1d\x00\x8e\xe3\
\xac\xfa>\x16\x820\x0c#\x82\x00\xf5z}\xee\x02+\xe5<\x95r!\xd5\xb5\xbe\x7f\
\x9d\xc2\xe6\x7f\x99\xbb\xceq(\x16\x8b\x00\x9cZX\rS\xa0\xf5\xad\xa5\xd4\xb32\
\x82\xcb\x82U\x82\xc6\x98\x0c\xd7\xda\xacy<\xee\x98~Iz\xec\xd5Z8\x8eD\n9\xf1\
\xba\x03\x0c\x7f\xf0GM\x9bU\x8f\x85U\x82\xc6@u\xbba\xb3\xc8\xb9\xf1\x96\x1f\
\x83V[\xd0U\x97\xd8\xdb\xfd%\x1cgr\x17\xd5ZS\xf9\x9d:\xcd/\xfbS\xcb\xcc\xb9\
\x0e\xdb\xd5\x8f!\xe4::\xd4\x94\xb7j\x84\xa1N}OV[\xb0Pxt*9\x00)%\xe5Ox\xa9\
\xca\xacT\n\xb8\xee%\x94#\xc9\xe5\x14\x9b\xc5\\\xa6{Jl\xc1j\xb5\x9a\xa9\x90.\
rn\xfak=\xcf\xa5X\xc8\x11\x84\x9ar\xa9\x94x\x8d\x94\xe0\xe5\xc4\xc0g\xf9|\
\x1e\xe9\xe4\x13\xafO\xbao\xab]4+v\xb67\x17^\xc7\xf1\x132\x19\xe7\xcf\x95\
\xb6`\xb3\xe9\xa3\xb5!\x97\xb2o\x87\x1a\xfc [\x1d+#\xe8\xfb\xd7)U\xf6\x80\
\xd9\xc7|\x1a\x9c(\xdb\xc7\x1d\x96\x95\xed\xc5\\;\x0f\xac\x8eA?\x00)\x0c\xa2\
3u\xb5\xdb\xc9\x9a\xca\xb1U\xb6\x01Z1N\xd5j\xcdv\xf1\x99a\x8d`W\x12n\x16s=u\
\xadR\xce\'^\xab\xb5\xa1Vo\xa5\xee\xa6\xaac22&\x9a*\xe2\xf5M\x83\xd5\x16\xac\
\xedUGT\xabqp\x95\xc3\xd6\xf6t;P\xce\x85\x9c\xdb/\xb3\xd9> \x08"C`\x9cd\xb5Z\
\xc5u]|\x7fpXX\x152"\x1d7\x00\xf2\xf9t\x93\xbb\xab\x06\x0b\x95b\xd4\xca\xd9%\
:L\x0eV8\xd1;\x8e\xa4Y\xaf\x10\x86\x1a\xa5\x92\xaf\x91RL}h\xd3\xba\xaaU\x82\
\xcdf\x93\x9c\x9b\xcel\x08\xa0\x94\x83R\xb3\xdbd\xd3\x8cC\xab\xcb\xa5ECk\xc3\
\xf6\xce\x1e\xadV\x90\xfa7+\xeb\xa2\xc6\x18\x8a\xa5]\xb4N?\xe3gY\xc9w\xb12\
\x82\xedv\x80\xef\x87\x0b\xaf\xe7D\x17=\xee\xb0J0\xcb\xe0of\xb8v\x1eX\x1d\
\x83\xadv@._E\xa4\x98\xf1g\x11\x18\xb3\xc0\xba\x90\x89\xa4\xe2\x92\xd6B)\xb0\
\x10)\xea8\x92J)O>\xef\xa6\xb2\x93NB\x10\x84\xec\xd5Z\xec\xd5Z3\xfd\xde:A\
\xd7u\xa8\xed\x96\x90r>b](\xe5P\xdd*\x92\xcf\xbb\x94\xca{\x99\x7fo\x9d\xe0\
\xee\xf6\xe6\x00\xb9`\x8e\xb1\xe6H\xd1\x1b\xcfy\xcf\xa5R\xce\xb3\xb3\xdb\xcc\
T\x86U\x82\x85\xbc\xdb\xd3-\xbb\x9a\xca\xbc\x93\xf9\xefW\x8b|\xac\x18\x99\
\xf9K\x9b\x9e\x1d\x82\xb3\xea\xa2^l\x05\x14\x84\xa4&7\xa9\xbe\xfd\xd8k)%;\
\xdbU\xc6iwI\xe5,l=\x18Z\xd4\xc2\xc2\xb0\xcf(\xcb\x9a\x13\x96\xa4\x8b:\x8e\
\x1c\x91\xa6\xedv\xb0\x8c\xaa\x97C\xb0Q\xab \xe5\xe0\xa3\xdf\xd9md\x1eO\xb3`\
)\xba\xe809\x00\xb5\xa4\xcdG\'\xca\xf6q\xc7R\x08&\xed\x9f\xd1\xe6\x98*\xdbI\
\xd8,\xed\x8e\x18\x97\x9a\xad\xe9\x1b\x10l`q\x04\xc5Z\xefe\xdb\x0fi/\xc1<\
\x91\x04\xab\xa6\xfb\xeaV\x81\x92\xca\x03p#x\xcaV\xd18N_\nw;\xfbJL\xf7J\xf5\
\xb7\x86|\xac\xe8q\x06\xc1\xd6vmfW\x99\x10\xe0\xe5N\x0f|\xa6cC7\x8d\xe9\xde*\
\xc1 \x8cn\xa0\xbb\x98(\x16s\x143\xeek\x99\x04?H~R\x93L\xf7\xd6\xa5h\xf1\x97\
\xb6\xd1\xda\xbe\x84\xd4\x1aZ\xed\xd1\xcf\xa7uU\xab\x04\xab\xd5*\xc1UMas\x97\
\xbd\xda_\xe3\x07\xf3\x0b\x16c\xa2\x96\xab7G[oe\xa6\xfb0\xd4T\xb7?;W\x19\xb6\
\xf0\x96\xd7dN\x8cNYqbt\xca\x80\x13\xa3S\nX\x152\xb9\x9c\xea\xbd\xae\xd5[V\
\xdcc\xbf\xb7]\xef\xadF\xa4\x94\xb8\xeaR\xa6\xdf[%\xe8\xc4\xbaf\xbbmG\xb96f\
\xd0~#\xe4\x99L\xbf?1:\xd9\xc0\x89\xd1i\x81\xf8\xffS\x93\x99y\xd7}leT,\x16\
\xa97\x12\xd4\xff\x19\xea\x8bo\x14*\x97J\x8c\x9bZ\x17n\xba\x1f\x87\x13\xa3\
\xd3\x02\xf1\x967:\xd9\xdd\xd2\x1c{\xad\xa4\xbd\xd8\xe7$\xa3SZX%\x18w\x99)%\
\xa8\x94\xf3\x99\xdd]q\x08\x01yo\xbc\xd1)\r\x16jt\xaa\x94\x0b\xa9\x83\x96\
\xd3`\x9c\xd1i\x12\xacK\xd1F\xcb,dG\xfd8\xa3\xd34X\x172\xc6@\xbdi:\x91h\x1aw\
\x8e\xfd\xa0\xdd\xf2\x82\xd0\xccD\x0e\x16$E\x8d\x89\x9ev\xb5\xba\x93\xea\xfa\
E\xeeO\xb5\xbe\xeb~YX\x89\xe9\xbeZ\xad\xa2\xd4\x1aR\x1c\xd2l6Q\x8eC\x90\xb0\
\x1b\xc1FXA\xb7\xbe\xf8\xeb\x85\x9b\xee]\x05^.Z\x90N\xdb\xbbm#\xac\xa0\x8b\
\xa5\x99\xeeW\x11Vpdw\xdd\xdb\x08+8\xf2\xbb\xeeg\t+h6\x9b\x99,\x01\xc7j\xc1\
\xab\xb5\xa1\x95\xd1\x96s\x12V\xb0(\x9c\x84\x15X\xc2\t\xc1,8\t+\x88\xe1$\xac\
\xc0\x12N<\xbcYq\xe2\xe1\xcd\x80\x13\x0fo\n\x9c\x84\x15d\xc1IXA\x0c\xc7\xde\
\xc3\xdb}\xd2\xae\xeb\xf2\xd9O\xfd&o\xbf\xa0x\xe3\x8d[\x9c>}\x1e\x80\xa7\xfc\
W\xf9J\xfb;\xf8\xdf\xfc\x06\xae\xebb\x8c!\x0c_\xc4\x98\xdbV\xef\xc3.Aq\x1a!.\
\x80x\x1f\xca5\xd4\x1b\x97\x81\xb7\xf1\xe4\xdf\xefc\xfe\x0e\xf4\xc1\xdd`^\
\xc7p\x00\xe6\x10x\x18\xf5\xae\x87i\x87\xa7p\xce_\xc4Q.R\n\x1cy\n\xf3\xfd\
\xdb\x04\x7f\xef\x13\x86/\x02\xb3\x93\xb6B\xd0u]\x94z\x17\xfa\xb5\xdb\xd4\
\xdb\xfbh\x1d\xa2\xf5>B\xacc\xccu\x84X\x07\x88\x88\x01\x987\xa3?ot<P\xaf\x1d\
\x12\xdc\xba\x11q6@\xa7\xf5\x1d\xe9\x90\xf3r\xc8\x8bpv\xfd%n\xeb\xaff\xbe\
\xb7\x99\t\x9e\x93\x92\x87\x95\x8bt\xdeA\x10\xde\xa4\xd1\xba\x16\xddXG\x1etI\
U+\x1fI]\xe6\xd6\x7fn\xc0\x9d\x9d7\xaf\x1bB\xad\xa97\xa2yT\xbeM\xe0\xfdH\x9e\
\x9c\x07_n\xb5x%\xa5\x17&3A\xc7qPJap\x08\xb5\xc1o?\x1b=t\x01\x95R>kq\x03\xd8\
\xfe\xb7\x83\x06+!\x04\xe5NN\x1a\xf3\x8f\x86F\xcb\x07\x04^\xce\xe3\x83\xdeY\
\xbe\xdc\xfa?S\x89\xa6&(\xc4Y\\\xf7\x87A8\x04\x1a\xcc\xfeM\xccA\xd4\xe5\xb6\
\xe6$6\t\xbb\xd5M\x8c1T\xaauD\xa7\xef\xb6\xda\x01\xad6x\xb9\xf7\x90\xcf\xdf\
\xc3\x17\x1a_\x18+\x9cR\x11t\x1c\x07\xf5H\x8e\xf0{`4\x98\xfd}\xca\x1f\xff\
\xf1\x81kZ\x7f\xdd\xe2\xf3\x8d\x06\xd7\xaf\x87\x04A\x80\xd6\x9a0\x0c\'no\x96\
R\xe28\x0eRJ\x94Rx\x9e\x87\xfb\x98\x8b\xf7\x81\xc1\xbc\x87\xc3\xcb\xaf\xd2V\
\xadC\xf4\x06\xed\xf6\r\xf2O\xfc\x14r-y\xe2\x9dJ\xd0\xfbP\x1e\xb1q\x89 \xb8\
\x891\xfb\x946\xfb\xc4\x8a\xc5\x8f\xd2l\xfeE\xe2&\x03)%o"\x90\xf7\x8e7\x0b\
\xde\xbem\xf0}\xbf\xf7\xfb\xbd\xbd\xbdi\xb7\xc3Vu\x87o}7\xe0\xdc\x85\x8eJ\
\x084\x9e\xf4q\xe4\x19\xf6\xf6j\x94J\x83\xe9\x04\xc7\x12<\'%O\xe4\x0b|\xfb\
\xba!\xbcr\r\xb1\x0e\x95\xf2G\xa8\x94\xa3\xef\x8d1\xb8\x8f\xbd\x97\xe2\xcf~\
\x1c\xe7\xfew"\xc4\xdd\xb8\xaeC\xceU\xbd9\xd0\xf7C\xc2P\xf7V\x871\x194\x16:|\
\x99\x9b7\x9f\xe3\xeas!\xfe\x95\xa7\xf1\xaf\xf4\xe3/Z_i\xd2x\xf2i\xde\xfd\
\xe0\x1b\xbc\xa8\xaf\x11\xber\x8as\x17\x14`\x08u\xe4Cl|\xa9E\xf5\xdfW&\x13<\
\'%\xef{\x9f\xc7\xd7\x03\x8d1P\xf9\xe5\x0f\xf7\xbe\xdb\xfc\xc4/\x93{\xd7\x07\
z\xa2\\t\xff\x17\xd1b\xd7\xec\x9b^\x97r]\x07\xd7\xed\xb7`\x97`\x1a\xa2\x00\
\x98C\x0c\xd1Tb\xf6\r\xe1\x8dNw\x17\x1b\\p68\'\xf7y\xe6\xaa\xcf\xc6E\x05\xc0\
k\xc6P\xfbb@\xb9\xb2C\xbd\xb6\x9dLpss\x93\x9f(\x14\xf1\x9f\xd5\x80A\xf0\x1a\
\xbb{5^\xd6\x86\xd3\xc6\xc4\x88\x89\xde\xeb>Q\xf0\x9f\x0e\xf1r\x0f!b\xbb,\
\xe2\x10C\x7fG8\x01\x1cv+1\xbd\xeb\xcc~\xb4\xf4\x12\x1d\xdf\x841k\x08\xb1\
\xce\xe3\x0fE\xady\xf3\xf5\xb3\x9c\xdfp0\x0c\x1a\x87\x07\x08\xba\xae\xcbf\
\xa9\x12m\x85\xec\x94\xac\xf5M\x8c\xd6\x9c\x16t\x1aJ\x0c\xb5\xde\xf0\r\x1aZ\
\xed\xab(%\x91Rr\xfe\x8c`m}\x0c\x1b\xe0p\x1f\xd6\xd6b\xca4\xc0\x9a\xc1\x1c\n\
8\x14\x98\x03C\x18j\x82N\x88\x82@`0}\xa2tZ\xd3\xec\x13\xfeC\x80\xb8W\r\xb8\
\xe5z\x04\xf3\xf9<\xdb\xdb\xff\x95P\xbf\x898\x0f\xe6\xd6\xcb\x84\xe1\xb71\
\xe65\x84\xb8\xbbW\xf8Xr\xb1\x0f\x0c\x86 \xd0\x80\xee\xbc\x1f\xbd~\xf83\x93\
\xf0Ju\xe6\xdcn9\xddz\x04\x02\x8c\xe9\xcc\xbf\x87\x9d\xd6|\x03u\xff\x9d\x04\
\xdf\xfd;\xc4\xb9\x1f\x1a%X.W\xf0\x83\xdb=\x01\xa1\xf5sQy\xe2\xee\xe8FDo\xb4\
%\x90\x12\xa3\x1f%s\x9f\xfa\x99\x89}\x13\x84!\xdc!PJq\xc1q\xf8\x9a\xefctD\
\x0c!\x10\x03$7\xa2\x87r\x01\x82\x17\xbf3JPH\x85 \xdaC\xf6B\x10p\xbaW\x95 \
\xd6p\x89\xe4\xe2\xdf9\x8e\x9c\xbaf3f\xd4l8<6M\xe7]\xf0|\x88\xdc\x10H)\xf1r9\
\xc2\x1b!\xfe\x95 \xbaF\x08\x84\xe9\\\xddiI\xc4\x06\xeaAh_\x1b"h\x0e\xa3\xdd\
W:|\xa9OnJ\xab\r\xf3p\x1c9 5\'A\x9e\x17\xf8OO^\x15\x0b@)\x89\xe3\x08\xcc\xbe\
\x815\x81s\xd1\xe1\xdcy\xc9\xd7\xda~\xd4ME\xe7^LD\x12\x13\x8d\xcb.\xfa+\xfa\
\x03C\x10\xb41\xe6\xa5\x1e9\x91\x81\\\xe7\'\xa9!\xef\x99|q\xf7\xdb\xeeBY\xc4\
\x04\xd5Y!\xf0\xbc\x1c\xd2\x91\xb1[\xea\xdc\x93\x00A\xdf\x0b\xdc\x97\xa2\x87\
\xafb\x8c\x19 7Zc2\xb1Y \x84\xc0\xf3\xd4\xc4\xfc\xf8B\x88\x015M\x08\x831\xfd\
\xf79\xd7%\xd8\x08\t\x9e\tz\xe3\x92\xd8T\x061\x82\xe1\x8d\xe7{R2K\xab\xcd\
\x83a\x02i\xa0o\x86\xc8{\xfa\xc3@=\x10\xbd\xee\x92\x14B\x0c<\xb4~\x17\xed0_\
\x06\xb9\x99\xcb2\x9163\xac\xc0\xab\x07\x1c\x1c\xc7\xe9\x0b\xaa\xd8C\xebw\
\xd1\xe1\'i\x81\xdc8\x95\xac\xfbY\xbb\x1d\xcct\x84\x836\x01^|\xff4\xe0\xba*\
\xdaE\xdc\x9dF:\x180\x1bv\x07i\xfc\xc5,\xe4\x06E}\xf2\xf7Z\x9bHw5\xa4\xfe\
\xd7g\n~\xf0\xdcH\xb9\xefv\xdd\x81\xfa\x07\x08\x8a\x81o\x12$h\x06$i.\xf1y\
\xce\x86\xefI\x87/s{\xa8\xf5\xcf\x8a\xd1\t{\xc4\xf0+,\x8d\xb7$\x12\xf1\xd5\
\x84\r|k(\xab\xb8\xd6\xdf\x1f\xa9xh51_\xd5\xc3;&\xe2\xad\x15_\x13F\xd7\xce\
\xef\x005\xc6\xd0n\xfb\x08y7\x98\xd7\xa2\xfa\x87(\xc4\x84\xcc\xfcOVkC\xab\
\xb7\x9b~\xb83\x0e\xae\x06mm\x9a5\xc6`:\xa6\xfd\xaez\x17/\xbc\xaf\x8bZ\xea8\
\xfd\xb2\x87\x19\x0c\xb7\xe5\xa20f\xa2\xb7\x01)\x059\xd7\x19\xab\xb3u?\xdd\
\xdf\x87o]\r\xb8\x1eN\'+\xa5\xc0U\x17A\xacE!\xe7~0\xb6\xf5\xfb\xab\x91\xa4\
\x89\xde\x02\xa4\x14\x89:l|\xda0\xc0\xfa:\\L\x19\x9c\xa5\x94D\xc8u\x84\x10\
\x91Y\x7f\x8aK|\xf8\xd1Zw\xbe\xa4\x9d\x06\xba7;M\x17\xcd\xea\n\xef\x8d\xc3\
\x0eV\xb6\x95K@\xea\xa5U\xd6r\xe3\x8fl!\x04\x934\x99I\xab\xfaE\x8a\x1d\xfba\
\x05S\xbe\x8bO\x16\xa1\xd6\x98\xfd\t?\x18\xfe\xfdA\xd7\xd63\x19\xf1\x87\xb98\
\x07(\xe3\'\xf9\xc84b\xac\x052OB_\x8aZ^\xecMZE\xc0\xf2\xce\x9b\x88\xad\x07\
\x17_\x99\xed\x05s\x1aX\xeb\xa2\x86\xe9\xadb&\xbc[\x14\xac\x11\x14D\xa6@!@\
\x9c\x99\xdeV\xfe\xd5\x103A\xce\xdajm\xebB&\x8d\x94\x83\xe1\t9\xb6Cq\xf0O\
\x1f3\x1a\xbdV:\xd1\x0f\xbf\xeb\xba\xc2G\xd7 \xd1\xc4\x92Dn\xda\xfa\xd2*A!\
\x04\xb9\x9c3\xd5Rf\x8c\xc1\xf7\xc3\xc4\xf5c\xf7ooj\x91\x02O=\x14Y\xa5\xcd!m\
\xff\xc6\x80z\x97\xac,,H\xd9v\x9ctf@!\x04J\x8d\xea\x98\xf1y\xb3\xab\xb0\xbb\
\x1de[\n\x81\x94\xeb\x03\xf1\xbc\x03e\x92\xdc\x92+\xeb\xa2\x91\x7f\xdedR\xb6\
\xd3hI\xc3X\xe9\xe1nY\x94\xedi\x93\xca8\xa3\xd6\x91\x0f+\xc8*5\x8f\xccr\t@\
\x8771\x87\xc9\xae\xee$\xa4Q\xb6\x97\xb2\\J\x03c\x0cm\xff\xc6\xfc\x05MYa\xaf\
\xac\x8bf1\xd9\x8fC\x1a\xbf\xcd\x91\x1f\x83\xe3\xd0S\x08\x86\x9e\xd3\xf0c[\
\x1dA3{\xae\x0b\x91<\xbb\xc7\x0b\xef\xbd\xb2\x9c\xeaA\xa7R\xb4\x01\x82\xab\
\xb3\x8d\xbf.\xb9\xc9=|QR\xd40\xd5\xef>\x0f\xa6\x91K\xfax\xa1{\xb6\xa5\x9c\
\xbe\xe3b\x1az;2R\xb4\\_\xa0.\xa8\x8bv\xe1>\xea\xe0\\\x92\x03\x8a\xb0\x18\
\xfa;|cq\x0c\x7f\xaf\x94$\x084aj\x87M\xbfD\xebB\xc6}\xd4A\xc5\xc8\rV7\xa8H\'\
!\xa9\x81\xa4<C\xceu\x90i\x14\xf9\xa1\xf7\xa3-8\x87\x87R\x08\x81\xba\xd4W\
\x8eC\xad\x07\x82:\xb2\x97\xc7@\x18\xbaRNr\xbc\x85\x88\xdd\xf6\x90\x07m\x94`\
\x1a\xadvL_\x8b/e\xb4\xde\xb7b\x16<0\x86\xf7\xe4\x1e\x02\xfa\x89\x03&9_\x18Z\
0\x8fvQ1\xf4w\xf8u\x17\t\xeb\x93\xf8\x14\x11Z:H\xf8\xba6h}\xd0\xbb\x8d\xc9\
\xeb\xcd\xd1}\x05=\x82Ng\xbf\xc9\xc0\x8dw\x07\xcb\xf0\xc0\x19S\x87X\xebO\xde\
\x994\xb1I\xf7\x1c\xdb\x80\x90b^\x1f)\xb0\xd7Es\x8f\xe7h\xbe\xda\xc4h3\xda:\
\xe3\x9c\xeb\xf1\xcf&\x10r\x1fu\x10\xeb\x83?\x0e\x82Q\x93\xc5<0\x06\x84\x18\
\x1d;\xa7\xfa\x17\x1c\xf0\x84\xe7\xf5My\xd3dy\xbcu{\xef\x93\x97>\xea\x92\xc4\
\x91b\xf0\xdfE\x99\\\xee8\x02i\xae1`\x86\xae\xec\x11l\xb6\xaf\xb0\x86\xe0\
\xfd\xf1\r6\x93\x06s\xe2\x9b\xf4\x10k\x87\x8bq/\x99\xc1\xe1\xd1#X)\xff\x1c\
\xcd\xb6\xcf\xfd\x8e"\x9f\xffp\xbf\xd6\x84\xca3\xddO\x9a\x070\xf5\x9a\xf45F\
\x16\xf6\x84.\xaa\xc3kT\x7f\xe3\x934\x9f\n\x90\xf2<\xf9\xfcG\xc6V>\xf1~\x86\
\xcf\xc9M\xd5\xb7R\\\x93\xb2\x18c\xcc\x80\xa1j`\x9a\xf0\xaf<\xc5o\xfez\x89F\
\xb3\x8d\xe3\x9c\xe7\xa7\x0b\xf9\xbe\xd1\xd9\x0c\x164\xbe\x96\xd1q\x18j\x1dm\
\xf7\xe8\xfc\xd3\xc6\x10vD\xbf-t\xb7\x82\xba\x8f\xb8x^?xed\xa2\x7f\xfe\x996~\
\xfb\xaf\xd8\xbd\xb9O\xe9\xa3\xef\xa5P,\xd0l4\xa3f\x1fv\xf4%!\xde\x82\x9d\
\xeb\x12\'\xfcLcw\xb2\xc9\xde\x00\xee\xe3\x91\xecP\x0f\\"\x0c\xfbK\xb11\xba\
\xe8\xf7\xd0W\x9fd\xf7\x8f\xbf\x84\xd6\x86b\xb1\xc0{>\xf0\x9e\xd1R\xc7\xd6\
\xb86yQ\x9aQ0M\x9a\xdb\xd5\xc3\x8a\xf7\xe7\\\xd4\x03\x97p\xeesh\xf9\xcfR\xdd\
\xfe\xfd\xde\xf7\x13\x95m\xf3\xd2\xd7\xa9\xff\xc9_\xd1j_\xe5\xb1w\xbc\x83B\
\xa1\x10i\x12c\xc8imz]Tu\xb7\x1b\xcf)d.9\xa2\xa7d\xc7]t\xc6D\x116\xe7\xff\
\xc9y\xeeu\x14\xda\xecS\xab}\x9a\x7f\xb3Y\xa4Q\xfft\xef\xf7\x13\x97KB\x9c\
\xc5}\xfc\x87\x08\x82\x9b\xb4\xaf\\c\xf3#\xef\xa5P(\x10\x86\x01\xadV{\xb0\
\xcb\x9a(ZZ\xa9\xbeJ\xe5y.\xe1\xf5N\xec\xc4\x81\x815\x81\xb8k\xb4\x1es\x90\
\xfc\xc4\xc4\x19\x81\x13S\xde\xa3\xf1\xabq\x1fv\x11w\x9dE\xa9\x071\xe6\x800\
\xf8.\xbb\xff\xe3\xbf\xd3\xfez\x9bW\xf4\xe0p\x98HP\xca\x0b\x88S\x82B^\x01\
\xf0\xef\xb6\xeb\xfc\xb3\x9f\xce\xe1\xbdWQ,\x16\x08_\xd4\xe8\x7fx\x05\xdf\
\xfff\xef\x06\x82 \xec\xe53\x140\xb0\xba\xe8\xdfyl\x9c&\x08\xa5$\x18\xe0\xb6\
\x11\xe4?\x98G\x9e\x8b\xca|!\x0c\xf9\xf6\xb3\xcf\xb3\xfd\x1f\x7f\x97\xf6\xdf\
\xb6\x12\x7f7\xb1\x8bJ\xe5bn\x1b\x8c9\xc0\x0fn\xf0\xdf\xaaEX3\xfc\xf6\x7f\
\xd8\xa3\xf5\xb5\x10q\xd7:\xca}\x90|\xfe\xc3=\xd1\x1c\x04\x1a\xff\xea\x94\
\x85\xa9Y\xeb\xff\x9bB\xca\x00\x87\x87\x02\x90\xdc\xef(\xc4]\xeb\x84\xa1\xa6\
V\xffs\xfe\xb2\xd9d\xf3\xa3?9\x96\x1cLh\xc1\x9cw\x19\xa7\xb3\xdd\xaa\xb2\xfd\
\x19r?\xfc\x03\xb8\xeeE\\\xa5\xd0\x1f\x82/4[4\xef:O\xfe\x83\x8f\xe1\\\x90x\
\x9e\xc7!\x86k\xa1\xe6\xb9\xc0\xa7y]\x8f\xcf\x8b\xb6\xd6\xf9\xf0p\xb0k:\x17\
\x07}\x15b]rF\x08\xd6\x10h\xad\t\xfc\x00\x1d\xfah\xad\xd9\xda\xaa\xa4\xb2\
\xad&\x12\xfc\xd3/6)\x14KH!\xfa\xcb\x9eSw\xb3Y\xda\xa1P\xf0(\x15=L\xde\xa5\
\xfegOQ\xf9\x95\x8fs\xdf}\x17\xf9\x99\x9f\xff\xd7\xb8\xefz\x1c\xf7\xc1{\xf8\
\x01/\x0f\x02\x0eMD\xf8{\xafh\xcc\xf7\r\xe6\xd5N,aG@\x18\xa3\x11\xa2\x13\x17\
\xb1!\xd0\xb7\x0c\xf2^\x87\xd7n\x1b\xee\x96\x12m \xf0\x03\xfcg^\xe0O>\xf3?\
\xf9\xeaW[\x98\xfdl\xfbL\x13\t\xfe\xab_,\x01\xd1\x026\xd4\xb7\xa8V~\x96\xbd\
\xcf4A\x98\xc8t \x05m?\xa4\xf1\xb9?\xec\x050\xfe\xdf\xbfl \x84 _\xf8\x97<\
\xf2\xc8\x8fq\xf9\xb2\x87\xbcW\xe2\x9c\x138\x0f\xaaH\x12v\x1aN\xc4\xf7\x8c\n\
0\xfa\x00c\x0e\t\xf5-\xda\xdf\x0c\xd1:\xe0\xb6\x0ey\xe9\xc63|\xf3\xca\xf34\
\x1a\xf5\x99\xb7\x9d\x8c\x10,\x16\x8bx\xf9< h\xb5\x03Bmp\xc5\x19\nO\xb8\xb8\
\x8f*\x94\x8a\xa2\xaf\x83\xeb\x9a\xf0\x85\xef\x0c\xfc\xd6\x18C\xa3\xfeir\x95\
\x97\xf1TD`\x7f\xff>B}\'W\xcdy\xf6\x8d\x00q\x1ax\x1b\xe2\x0e\x83y\xfd\x1fY\
\xe75.\x9c\x7f\tq\x1a.\x9e\x02\xa5^\x8a\x95\xf8\x00\xf0\x00a\xd9\xa5R\xade\
\xca\x195\x96`\xa9\xfck\x91\xc8\x7fE\xf3\xe9z\x8b\xb7\xdf#\xf9\xfc\x9f\xb78\
\xbf!\xd9,\xfe8ax\x0b\xb8E\xeb\xc9\xff\x9d8\xb8\xb7*\x05\xca\x9d\xa8\xec\xa8\
\xd1^\xe2\x9eu\x80\xeb\x99o\xae\x0b\xc7\x91\xec\xed\x94fJ\xdf2@\xb0\\.\x93s\
\xdf\x8d1\x87\xf8O=\x0b\x80\xd97\xdc|\x15\x9e\xf8\xd0Cx\xb9\x87h\xfeM@x\xed\
\x05\xf6v\xffS\xe2\x8d\x94c!\xe7\xcd\xa6\x9f:\x1d|\x12\x94\x92\x946\xbd\x8e}\
UP\xad\x14\xd8\xcc\x98\x15\xa8GP)\xc5\xe6/\x94\xd1\xe6\x10! \xf8\xee\r~\xc4U\
\xb4\xda\xdf\xe2\xec)C>\xf7\x18\xda\x18\xc4]k\xec\xed\xed$n*/\x16\xdc\xde\
\xebv;\xa0T\xc9v3Ih\xb5\x02j{e\x00</\xcau\x91\xc5\x12\xd0\x9b\x07\xcb\xe5_E\
\xde\'{\x1a\xff\xe6\xbf\xf0\x08B\x8d1o\xf2\xcf\x7f\xea\xc7p\x1cI\x18\x1a\xda\
\xad\xaf\xd0j~1\xb1\xb0w\xc4L|\xf3\xb4\\\x1c\xadv\xd0\x0bo\x052g\xb3\xec\xb5\
\xa0\xa3\xfe)\x10-mv\xff\xb0\x81\xfb\x88\xa2\x90w\xc1\xb8\xe4\xdcK\x04a\x881\
P\xab\xed\x8e\x9d\x7f\xce\xc4\x94J\x9b\xf6\x96(\x07i6b]\xf4\x08\xea\xfd}\xf6\
>\xd3\xc2y\xfb\x9d\xa8w*\xdaW\xda\xb4\xfe\xb6\xc5\xf6\xef~2\x12\xe1/\x1a\xc2\
kmZ\xad\xf1Z\xc38T\xb7\n#\xe9\xa8wv\x9b\x99Si\xce\x82\x1e\xc1\xcf~\xee\xf3\
\x1c\xbcn\x90\x1b\xb2\x93\xd7\xe5~~\xf0\x07\xdf\xc4\xcb=D\xa8\r\x84\x86\xeao\
Wg\xaa\xa4\xb4\x99\x1f\xf9\xec\xa3\xa1^.\xc1\x03\x13\xad\xba\xc3Wu\xa4al8|-\
\xd4\xb4\xfd\x10!\xe0O?W#x\xd6\xb7V\xf1\x99Y\xadU\x19\xd1\x9f&^\xef\xdb%\xb4\
\x0eA\x87\x88\r\x87\xdf\xaa\xee\xf2\xfe\x1fU|\xea\x7f}j)7d\x1b\xfd \xe5\xd7\
\x07\xbf\x10w\x08\xc4F\x14\xaa\xf5\xc5/5\xd1\xe1\xb5e\xdf\x9b\x15\xc4&\xfaA\
\xa9wf\xc3A\x10)\xc4\xedfm\xaeJ\xfe\xec/\xda<\xf6\x98\x8aUeh4\xedL#\xd3\x90\
\xa8l\x0b!Y\xef\xacs\x9e\xf9Fs\xeeJ>\xf9\x1b\xf3=\xa0y\x90H\xf0Lg\t\xb3\xb1\
\x0e\xbf\xf03E\x84(\xa6*,n\x14/m\xda;\xb5\xc7\x8deJ\x9f\xfb\xd4\x9en\xebm\
\xac\xc3;/\xa6\xdbl\xd3E\xbcb\xd7\x15\x89\x07Je\x85\xe3\xac\x11\x8f\xee\xc9\
\xaa?\x8c\x04H\x8aNiY\xc9A\x94\xcf\xd0\xeb\x96$\xa2#\xc1\xea\x8d\xf6\xd4\x95\
w\xceM\xfe\\\x08pU\xff07\xad3\xba\xe5\x18"(\xa5D\x10\xb5\xde,0\x06\xda\xbe\
\xe9%\xdf\x1f^]\xcc\x8b\x96\x9f\xdd\x1a\xde\x0fR\x16\xb2g>x\xe0\xe2\x9d3o\
\xffh\xfb\x11I\xdbh\xb5\ra\x98}wT?\x02\xb4\xc3(\xf0[x\xae7\xf6\x07i\xd0\xf6\
\xa3\x03h\x9a\x8d\x1aJ9S\x1fV>\x9f\x1f\xfb\x9d6k\x84\xe1\xe1\xfc&\x0b!$Z\x87\
\xf8\xed\x06\x14\xe7#\x08Qwm4}`\xbaz\'\x9d\xfc\x84o\xe7;\xbfi@\x8a\xea0\x98\
\xab\xb0\xa3\x88^\x0b\xee\x1bC\xf0t\xf6\xa5P\x1cR@!\xdf\xb7\x85\x96\x8a\xdb\
\x89\xd7i\xad\xa9\xfcN\x9d\xe6\x97S\xb4\xae\x84|.*S\x1bhf<\xf6\xa8\xd7\x82\
\xdf\xf9F#\xb3\xcdq\x18J\xa5\x9bZ\xa4\x94\x94?\x91n\x18xnDR\x08pdt\xecJ\x16\
\xf4Z\xf0\xf9g\xe6\xd3\r\xab\xd5*\x95r>\xf5\x89!\x9e\xe7R,\xe4\x08B\xcd\xb8\
\xb8c)\x19\x1b\'\xb1\x92S{&I\xc3$\xeclo\xceU_\x9aS{\x8e\xdf\x96\xe6\x84\xf17\
\xe9\xd4\x9e\x95\x86\x154\x9b>Z\x1br\xe3t\xb5!\x84\x1a\x86\xf2s\x1c\xddS{|\
\xffz\xcfn:k^\xef#}j\x8f\x8em\xd6[d}\xc7o\x0cf\x84\xdd#2g\xc8\xcf\xb4hX\x1d\
\x83{\xb5\x16\x8e#\x91br\xfe\x89\x03\x0c\x7f\xf0GM\x9bU\x8f\x85U\x82\xc6@u\
\xbba\xb3\xc8\xb9qrbHV\x9c\x9c\x18\x92\x01\'\'\x86\xa4\x80\xd5i"\x97S\xbd\
\xd7\xb5z\xcb\xca\xd9f\xbf\xb7]\xefM?RJ\\u)\xd3\xef\xed\x86\x99\xc7\xba\xa6\
\xadT*\xc6\x0c\x1e\xbe!\xe4\x99\xf1\x17\'\xe0\xe4\xc4\x10\x1bh\xd4*#V\xee\
\x9d\xddF\xe6\xf14\x0b\x96\xa2\x8b&\x99\xf0U\xca\xb4c\xf3\xe2-\xafl[].\xc5\
\xbdK\xc5\xa2=\xefR\xfc\x94\xd7\xb9\xbdK\x8b@\xd2*C\x9bcz<_\x126K\xbb#\xfb\\\
\x9a\xad\xe96Q\x1bX\x1c\xc1X\x1cS\xdb\x0fi/\xe1@\xd3$\xd8]\xf0\xc6^+9\x9fO!\
\x8e\xb8m4\xeb:\xd9*\xc1\xf8ygJ\t*\xe5\xfc\\Q\xd8B@\xde;=\xf0Y\xd6\xc4\xb2\
\x96\x13\x05D7\xd0\xd5\xd8*\xe5\x02\x95r:+[\x1a\xf8Av;\x87u)\xda\xc8\xe8\x1c\
I\x0b\xad\xa15\x83w\xc1~\xf2F\x03\xf5\xa6\xe9\xf8\xdd\xf5\xc8&\xbcY\xca\x0bB\
3\x139X\x90\x145&z\xda\xd5\xeaN\xaa\xeb\x17i\x17\xb5FpYG\xbcg\xad\xcfj\x0b\
\xa6\xf1\xf6,\xbb>\xbbY\x9ac\x1e\xdeq\xde]\xb0\xef\xe1\x9d\xe4]\xb2*EW\xe1\
\xe1=\xb2\xde\xa5y=\xbcp\xc4\xbdK0\x9b\x87\xb7\xd9lf\xb2\x04\x1c\xbb\x05\xef\
\\{\xd5\x96\x8d\xae\x877-\xfc d\xaf\xd6\xccT\xc7\xca\x08\xc6=\xbc\x8b\xc4\
\xca\xba\xa8\xb6\x94\x8ee\x1a\x8e\xdd\x18\xcc\x8a\x13\x0fo\x16\x9cxxW\x00\
\xebRt\xb3\x98\x9b\xea\xd5\xd5\xdaP\xab\xb7Rw\xd3B\xdeE\x08A\xa8u\xe60W\xab\
\x04K\x9b\x1e\xd5\xadt!\x08\xaer\xd8\xda\xaeO\xbd.\x1e2\x0b\xb0U\xadQ\xab\
\xa7_\xfd\xda?\x12%%\xf2\xf9t\xdb\xb7\n\x85\xc1#\xc0\xb2\xfa\xfcW6\xd1;\x8e\
\xa4Y\xaf\x8c\x9c$\x19\x87R\xce\xdc\x9b\x18V\xaa\xaa)\xe5\xcc\x1c\xd9\x99\
\x16\xc7j\xa2\xd7\xdad\x0e\xaa\\i\x12\xe3bi7\x93\xb2=\xa9;\x8f\x83]\xf7Y:\
\xb9\x01D.\xec\xee&\x05[\xeb\xcf\x95\xb9\xcfV\x89\x13\x82Y\x10f\xf0\x905gH\
\xbc1\x0b\xac\n\x99PC\xad\xbe\xdf\xf3\r\xee\xec\x8c\xb7l\xcf"0f\xc1\x02\x0e\
\x96Z\xeb9\xf1\x96Eb\x12\xac\x13Tj\r\xd9IJU)\xe7\x13\xaf\xc9\xaalwu\x01c\x18\
\xbb\x01a\x1c\xac\x12t\x15x\xb9\xfeV\xabI6\xd2\xb4\xcav\xce\xa5\x17p\t\xd0l\
\x1f\x10\x04\xe9\xbd\xc7V\x85L\x16onZe\xdbU\x83\x85\xca\xe1\xe4\x90Sp$\x94\
\xed\xf8>\x988\xc6f\xf5\xca\x00\xab\xee\xb3,\xc1Y0\x9f\xb2},\x82\xb3\xb2"J\
\x0b\xdf\x7f\xbft\xf7Y\x16\xc4\x95\xedJ\xa5\x92\xf27\xc9\x9f\x1f\xc9\xe0\xac\
\xb8\xb2=\x8f\tqZW\xb5*E\x9b\xcd\xa6\xcd\xe2\xa6\xe2\xc8\xbb\xcf\x96Q\x9f\
\xd5\x16\xccb\xd2;\x96\xcav\xab\x1d\x90\xcbWS\x9d\x7fvl\x95\xed\xc8\x04ap\
\x1cI\xdeS\x16B{\xf4\\9\xda\x16"E\x87\x8d\xb56\xca\x9b5y\xa3\xf5\x15\xbdmr\
\xd0O\xde8\xcb\xc9\xcbV[\xf0H\'o\xb4\x81#\x9d\xbc\xd1\x06\x8eb\xf2F\xab\x04\
\x17\x99\xbcqV,E\x17=\x12\xc9\x1b\x17\x89U&o\\\x99\xe1wY\xc9\x1bO,\xdb\xc7\
\x1dK\t\xce\x9a\x96\xbcq\x91\xc1YK\x112\xabL\xde\xb8\xb0.j!\xada\x0fi\x96_\
\xe3`\xd7\xbbt\xf4\x937\xce\x87#\x9f\xbcq^\x1c\xe9\xe4\x8d\xb6pd\x937\xda\
\xc4\x91L\xdeh\x1bG2y\xe3[\x11\x0b\t\xce\xea&\xcci5\xb6\xe6\xb6\xaai\x1du\
\xf7\xac\xf9\xd4\xba\xb0\x1e\x9c%%\x14\xbc\xf9\xfdz]H\t^N\xa0\x1cCc()\xd0J\
\xbcK\xdd@\xaa.\xd2$\xccq\xc6d\x0f\x8a\x97\xe38\x82\x9ckh\'\x0c\xe9\xa5y\x97\
\x94\xd3\x8f\xdf\xcd\x920gRw\xf3r}7\xb6\xab\xc4\xc8\x14\xb4T\xef\x92\xef\xf7\
\xfb\x90\xad\x849\xf1\xd0V!\x06u\xdc\xa5{\x97fM\x9835S\xbaS\xc2\xf3"}n\xaf\
\xb6\x97)\x17\xcdI\xc2\x1c\x1b8I\x98\xb3@\xbc\xe55\x99\x13\x826p\x920g\x81X\
\x18\xc1\xb8\x9a\xb5\xca\x849k\x97/_>t\x96$\xd1\x96\x8d0\x0c\xa3\x16\x0c\xb3\
l\xb6>f\xf8\x7f\r_\n\xf3VN{\xbf\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress01Bitmap():
return wxBitmapFromImage(getscanprogress01Image())
def getscanprogress01Image():
stream = cStringIO.StringIO(getscanprogress01Data())
return wxImageFromStream(stream)
index.append('scanprogress01')
catalog['scanprogress01'] = ImageClass()
catalog['scanprogress01'].getData = getscanprogress01Data
catalog['scanprogress01'].getImage = getscanprogress01Image
catalog['scanprogress01'].getBitmap = getscanprogress01Bitmap
#----------------------------------------------------------------------
def getscanprogress02Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x1a>IDATx\x9c\xed]_\x90#Gy\xff\xad\x8d}\xbd\xe7\xe3\xae\xcf\x80\xddkl_\xdb\
\xc4\xf6\xd8q`\xec$\xce\xc0Qa\xc8C\x10E\x01"\x0f\xb1\xa0R\xa0\xa7\x94H*U\xa2\
\xf2\xc7\x0bIU\xf4\x90\xaa\x08\x12\n%y@\xa9J\x15"\x15WD\x1e\x82HB\x10\xff\
\xe5\x07\x9c!\xa4\xf0`\x08\x1e\x13\x03s\x8e\xeb\xae\xed\x02\xae\xcf\x9co\xbf\
5\xb67\x0f\xa3\x19\x8d\xa4\x914#\x8d\xa4\xdd\xcb\xfe\xae\xaevw4\xd3\xdd?u\
\xcf\xd7_\x7f\xdf\xd7_o\x9c>}z\x0f\x971^\x06\x00B\x88u\xb7c)PJ\x05\x04\x01\
\xa0\xd3\xe9\xe4Z8\xe7\x0c\x96)!\xa5\x00c\xf3\x97\xa3\x94F\xb7\xe7Ak\xca\xf4\
\\\xb1X\x04\x80\x01\xc1\xbc\xa15\xa1\xdb\xf3\x00x\xcb\xaa"\x15\xaeXk\xed+@\
\xae=h\xc8-\xb4\x9a\xef\x83\x10|\xea}ZkT?\xd4A\xef\xe1\xd9\xbdk\x1a\x02\xf5\
\xda\xfd`|\x13ZiT\xb6\xdbPJ\xa7nS\xae=X(\xdc1\x93\x1c\x00p\xceQy\xaf\x95\xaa\
\xccj\xb5\x00\xc3\xd8\x82\x14\x1c\xa6)Q*\x9a\x99\xda\xb4\xb4wp\x16,\xcb@\xb1\
`\xc2\x9f\xd2\x1b\x86\xe4\xb0-c\xa1z\xd6F\x10\x00\x1a\xf5\xd2\xd2\xeb8pB\x86\
\xb2\xcd\x16\xeb\xed\xc1^\xc6\xf9\xcd\xf3\x15Z\xed^\xa6:\xd6F\xd0\xf3\xce\
\xa1\\m-\xbd\x9e\xb5\rQ\xad/\xac\xa4\x9e\xc4\x1e\xac\xd5js\x15f\xce)\xf0\xe6\
\xad/M9\xb9\xf6`\x16\x01\x90UX\xcc\x8b\\\xdfA\xcf\x078\xa3H\xb9v\xdddMe\x17\
\x84\x8f\x7f\xa2\x97g\xd5\x13\x91\xbb\x90qb\x9cj\xb5v\xde\xc5g\xc6\x81\x9b\
\x07\xb3"\xd7\x1e\xe4\x0c(\xd8,\x1a\xa2\xe5b=\xf1\xbe,\xca6\xe7\x80m\x06ej\
\x02z\x0eez\x7fs\xedA)\x91jq\x9bE\xd9\xb6\x8c\x80$c\x80\xe0\x80!\xb3\xb5i_(\
\xdb\x93\x16 \x9c\x03B,`\x0e\xc0\xa1\xb2\xbd\x0f\xb1.e\xbbV\xab\xa1Z\xb1a\
\x1a\x85\xd4\xcf\x84\xca\xb6\x99R\x05R:\x98k\xc3\xfa\xd2 \xd7!j\xdbv\xea{\
\xe3\xca\xf6\xbc\xaaZ\xfc\xb9Z\xad\x06\xc30\xe0y\xc3\x92\xf9\xb2P\xb6C\xa2\
\xa3\xe4\x80\x83\xf8\x0e\x8e`V\xef\xe7J\xb0\xdb\xed\xa6\xbe7\x0fe;\xcd\xd0\
\xceu\xb9\xd4j;\x10\x82\x83\xb3\xe9\x96\xb5Qe;\xaf\xe5R\x12r\x152D@\xad\x9e\
\xbe\x17W\x81\xdc\'\xfaR\xd1La\xf8%\xb4;N\xeaaZ\xb0\r0\xc6\xa0\xb4\x86\xe3\
\xf8\x99\xda\x93+\xc1r\xc9Bm\xbb\x98\xea^C\nl\xd7g;|\xb6\xab\x05T\xca\xf6\
\xe0\xefZ\x1b\xed\x8e\x9b\xbaM\xb9\n\x19\xce\xd3\xeb\x8d\xb6\x9dnr/\x14\x86-\
\xd9i,\xe7q\xacM\x17\x15\x82\xa3\xd7\xa9N\xf53H)2\x13\x1a\xc5Z\x95m)\x05\xa4\
\\\xae\xf3\xf5@M\xf4Z\x13\x1c\xd7\xcf\xf4\xcc\xdaz\x90\x88P,73Y\xb6\xb3\xb8\
\xcdB\xac\x8d\xa0\xeb\xfa\xf0<\xb5\xf4z\x0e\xd4\x10\x9d\x07\x97=\xc1\\u\xd1,\
\x12\xbd\x17\xd3H\x96i\xba\xcf\xf5\x1dT\x1ahwv\x00\xb6\x01\x00h4\x1a\x93\xef\
\x9dC`\xcc\x83\xdc\x85\x0ca#\xb2\x9b\xac\x8a\xc44,E\x8a2\x16x\x9a\x9c\xee\
\xf6\xc2\x9a\x88\xd6\x80\xe7Sd\x8b\xc9\x8a\xdc\tr\x0e\x14\xac\xd0\xba\xbd\
\x98M3,\xcf2\x19\xa4 t\x9d\xec\xcf\xe7N04\xb3\x87\x98\x16E\x11B\xf0\xe4^\x8e\
\x97#\x04\x83i\x10&8\xac&"W\x82R\x04\xdfx\x88B\xa9\x91j2\x9f&E-\x130d\xc0\
\xd4\x90\x0c\xae\x97\xcd\xd6\x91\xeb<\x18\x7f\xdd<\x9fr\xd1T\x9c\xd8\xd2\x8f\
\xb1\xc0\xc1\x93\x05\xb9\x12\x8c\x0f)\x95\xa3\x16\xa6\xd4\xa0\xd7\xb2F.\xaeD\
\x17\x15\x82\x8fIS7\xe3\xaa`^\xe4j\xbao\xd4J\x902X\x81\xc7\xe3O\xbb\xed\xea\
\xd8j\xbf\xd1\xec\xa2\xd1\xec-T_\x1a\xe4\xda\x83\xa3>\x86\xd0\x94\x9ed\xca\
\x909D\x19\xaf\xd7t\xcf6\x12M\xe9\xcb\xc0zL\xf7\xb4\x9aX\xf7\x95\x9a\xee\'\
\x85\x8dP\x82\x01T\xd3\xe2z\xea\xcaM\xf7\x93P*7\xc7\x8cK\xbdX\xbc\xc9\xac\
\xfa\x8cf\x19B\x04\xefw\xb3\xd5\xca$\x81\x976M\xc4\xe7+\xd7SpW`\x9eHB\xaeC4>\
\xec\xee1e.e2\xc6`\xc6\xcb\xca\xe8\x96\xca\x95`\xdcop\x7f\xd1B\xb5b/\xb4gB\
\x08\x8e\x8f\xfdy\x11\xac_\x08\x11\xc1\xf3\xb3\x8d\x84\\\x87h\xb7\xe7\xc1\
\xf3\xce\xc10\xb6\x00\x00\xd5J\x01\xd5Jz\x9f\xfd,t:nf\xbfb\xee\xd3D\xe5\x81O\
.e%\xef\xfb\n\xb5Fv\xd7\\\xee\x04\xfd3\x1a\xc5r\x13\xad\xf6\xd73\x0f\xa7$(\
\xa5\xd1j\xf7P(5\x13\xa7\x9bYX\x8a\x14UJ\xa3V\xff\xf42\x8a\xce\x8c\xff\x9fv\
\xd1E!\x04G\xb5l\xc3\xb6\x8d\x85\x8dN\xbe\xaf\xd0j;h\xb5\xe70\xc8`\t\x04\rC\
\xa0\xdd,\x83O\xb0\xb3d\x85\x94\x02\xb5\xed"l\xdb@\xb9\xd2\xca\xfc|\xee\x04\
\x9b\xf5\xd2\x10\xb94F\xa7I\x10\x9cEs\xa0m\x19\xa8V\xec\xcck\xc8\\uQ)\x00)\
\x073{>F\'\x8a\x8cN\x95r\x01\\\xd8\x99\xca94:e\xc1\xa1\xd1)\x86\x03gt\x9a\
\x86e\x18\x9d\xd2b%\x13\xfd\xb2\x8cNip\xd9k2\x87\x04\xf3\xc0\xb2\x8cNi\xb0\
\x12!3\xcb\xe8\xb4L,\xcdt\xdf\xed\x0eL\xf7\xcb0:\xad\xc5to\xc4L\xf7\xf7\x98\
\x12\xaeG\xb9X\xb7\xe3\xbb_\xe2\x83}\xe5\xa6\xfb\xb8\xf6r\x7f\xd1B\xc1^,Q\
\x07c\x80m]9tM\'\xbc\xba\xd3L\xf7\xb9\xf6\xa0\xaf\xb0T\xa3\x93\xe7\x8f\x0b\
\xab\x95\x9a\xeek\xb5\xda\xd2\x8cNZ\x0f+\xdea}\xb3\x90\xbb\xe9>4:U\xcao\x86e\
\x9d\x82\xb1`<h\x90O\xc6E\xbd\xd1;4:%\xe1\xb2\xd7d\x0e\xb7\x15d\xc1\xe1\xb6\
\x82\x18\x0e\xb7\x15\xe0p[A*\x1c()z\xb8\xad \x01\x87\xdb\n\x0e:.{\x82\x87\
\xdb\n\xb2 \xbe\xad\x801\xa0\xf1\xb1\x06\xe4\xab\x05\xf8q\x86Sw\xd8\xd8\x00A\
l\xdd\n\xa2K\xb8\xdd,\xa2V\x7f;\xbc\xef~\x07\x86a\x80\x88\xa0\xd43 \xba\x94g\
\x93\xf2%\xc8\xd8Qp~\x1c\xc6\x9d\x06\xe8\x05\x86\xc6\xdf\x9e\x06\xf0r(\xb5\
\x03\xda\x03\xf4\xee.\x88\x00b\xbb z\t\x00 \xef\xba\x15\xae\xba\x02\xe2\xc4\
\xf5\x10\xd2\x00\xe7\x0c\x82_\x01z\xfe\x12\xfc\x1fx\x0b\x93\xce\x85\xa0a\x18\
\x90\xf2.(}\tJ\xef\xc0y\x8c\xa0\xf5y0\xb6\t\xa2s`l\x13\x00@\xd8\r\x1e\xe8\
\x93\xa3\x17\x83\r$xn\x0f\xfe\x85\xa7\x83\xf8=B\x14\xac/\xb8\x80i\x99\xb8\
\x99\x03O\xfa>\\7\xbd\x0e\x1abn\x82\xc79\xc7\xad\xd2\x00\x17\xb7\xc0W\xe7\
\xd1u\xce\x06\r\xebOk!\xa9Z\xf5\xad\xa9\xcb\xdc\xfeh\x17\xb8\xaa\xff\xc7\x0b\
\x04\xa55:\xdd`\xee\xe3/g\xb0\xcc\x02\xc4\xb5\xc07\xbf\xe9\xe0\xd9$\xe3L\x02\
2\x13\x14B@J\t\x82\x80\xd2\x04\xcf\xfda\xf0\xa53\xa0\x1a\xd3\xfa\xe7A\xfd\
\xf7\x87\xed7\x8c1T\xfay\xa1\xe8g\x84\xae\xe3\x01`\xb0L\x0bo\xb0\x8e\xe2a\
\xe7+3\x89\xa6&\xc8\xd8Q\x18\xc6\xed\x00\x13\xf05@;\xe7A\xbb\xc1\x90\xdb^\
\x90\xd844k%\x10\x11\xaa\xb5\x0eX\x7f\xec:\xae\x0f\xc7\x05,\xf3u\xb0\xed\x93\
\xf8B\xf7\x0b\x13\xdf\xd3T\x04\x85\x10\x90\xb7\x99P?\x05H\x03\xb4\xb3\x83\
\xca\xbb_\x9f\x1f\x8b\x14h\xd4\x06\xeb\xccJ-XG:\xee\xd3p\xdd\xa7a\xbf\xe9\
\xd7\xc17\x92\xb5\xa2\x99\x04\xad7\xda`\xc7\xb6\xe0\xfb\xe7A\xb4\x83ri\xb5\
\xc4\x92\xd0\xec\x93\xad\xd4\xda \x00\xdd\x87<\x08~\x04\xadV\x1b\xe5\xf2p\
\x96\xa1\x89\x9a\xccq\xce\xf1\xf6b\t\xfag\x1c\xeecgAX=\xb9z\xfd#S?o\xd6J\x08\
%\x9b\xd2\xbbp<\xa0\xfbe\x07\x965H\x88\x95H\xf08\xe7\xb8\xf7^\x0b\xdf\xf65\
\xf4\x05B\xe5\xdd\xafG\xb9\xb8\xfa\x9e\x93\xf2f\x00\xc0\x03\x0flO\xbc\xa7U/\
\xa1U/\x03\x00\x9e#B\xfb\x8b>*\xd5\xc1\xbe\xc51\x82\xa5R\t\xbfV(B=\x17\xbc\
\xd0\xe5\x8c9u\xf3\x00\x11\x81\x88 \xa5\x04\x00|\xeb[\xc9\xf3\x1f\x8b\xf9\
\x05\x9e}\xc6\xc75,\xe8\xcd\xf8\x9aq\x88\xa0a\x18(\x95\xabp]\x05\xda%\x94\n\
\xab\'\x07\x04\r\xdf!\x02\xe7\x1c\x9c\xf3D\x9fC\x9c\\\xad\xfeQ\xdc~3\x03]\
\xf2\xc1\x18\x1br\xd0D\x04m\xdbF\xab\xf5O\x00\x18\xd8\t\xb66r!Nr\x8e\x13\x9c\
\xc3q\x9c(\xeb9\x11\xa1\xd3\xe9\xc04\x07m\xab7\x9a\x00\x8e\x81\xb1\x17!o\xb8\
\n\xf4\xcc\x7f#N1\x92\xa2\x95J\x15\x9e\x7f\t\x8c\x01\xc55\x93\x0bq\x92s\xfc\
\xf5_5P\xadV\xe1\xba.\xba\xdd\xee\xd0\x8a\xa1\xd1l\x81\x88\xc0\xd8\x1e\x88\
\x8e\x01\x00\xe4u\x80\xff\xcc\xf7\xa3{"\x82\x8cK0\x04F\xd6\xb4 "h\xad\xf1\
\x13\xad\xf1\xec\x05\r\xda\xa1\xe8z\xf8\x7f\x1a\xc2a\xc6X\x10\x93\xc66\x19\
\xd8\x11\x86\xab\x19\xc3\xb5\xfd\xe1\xf9\x89O\xb4p\xe2\xc4\xb5(\x16\xdf\x81R\
)\x98\x02:\x9d\x0e\xca\xe5J\x7f\xda\xef\xeb\x87l\x0fD\x1b\x00;\x06y3\xe0\x9e\
\x1d!H{@\xf9]\x16\xf4\x88\xea\xe3|\xdd\xc17\x1fq\xf1=\xcf\x83\xef\x9f\x81R\
\xe7\xa0\xb5\x86\xca3\x94i\x02\xc2/h{\xfb\x8f \xe5\xcd0M\x13\xa5R\t\xef(\x16\
\xf1\xa8\xeb\x05\x9f\xf7\xb7\xd22\nH\x82\x00\xc2\xb1\xa8\x8c\x88`\xe9\xad\
\xc3\xc3\xb2T*EI\xa8\xa6\xf5D\xf4\xed\xf7\xff\xc7#\r\xa3\xe766\xc7\x1f\xdc\
\xdb\x19\xbb/\xde\xeb\xa3u\x96\xcbe<\xec88\xca\x18\x8e2\x06\xcb2\xe1z\x1e\
\xb4\xd2\xc1\x1b\xc7\x02\x92\xc4\x00\x16\xdbV4\xa6\xc9\x84\r%"\x88\x1b$n\xbe\
E\xe2\x86\xeb\x058\xe78~\x82\xe3\xda\x13\xaf\xc2q~%\xae`\x1c\x8c!\xd2\x0f\
\x01`K0\xdc\x11K\x1f\xe9\xfb\n\xbe\xbf\x88\xaf\x90\xb0\x15\x9a\t^\xbc\x84\
\xe7/)\x1ce\x83\xf2M\xc3\x80\x7fL\xc1\x7f\xc2\x8fH"\xea\xd5\t\x04\x01\xa0\
\xf4\x9e2\xac\xfb\xfa\xda@t3\x9b\xb9\xa7\xfa\x9c"\x9c\x12;`<\xe8\xb1\xbc\r\
\xbb\xea\x9c\x86\xe7y0\x8c\x81\x9c\x907\x06\xe5\x87$\xc3\xce\t\x11\x11l4\x9a\
A7\xc7K\xcc@.\x84\xeb\x9d\x81e\xca\xa9a\x81\xb1ecj\x10\x11\xbc\xc7\x83\xf7^k\
=\xf4*\xc8\x1b\x05\xe8"A)\x15\x91\x0c1\x98\xe8G\x1b4\x07\xb9\xa0!\xc1rF\x9d\
\xd3\x13\xdf\xdd\xf0j\x9ar\x89\x08\xea\x9c\x1e\xd2N<\xdf\x1f\xbb\xcf0$\x18\
\x1fo\xeb\xd0\x10\x1d\xde\xdb\xcf0\xf4g\x06\x10!\xfa\xb6\x97\x02\x02<\xff\
\x7fa\xc8\x9b\x86.\xbf\xd60\xf0\x9f\x8e;\xd4\xe6\xa8\x07G\xc91\xccGnU\xd0\
\xea\xc7\xb842B\x8e\x06Ro\x08\xe3RtAZB\xf0T\xb11\xbe\xd2\xa9_\xc4-\xc1\xb0\
\x07\xd6W,\x06\x0f}\xcf\xf3\x87\xf6\rk\xfd\xfcX\x99#\x04\x17\'g\x18\x03\xa9\
\x99$L\xc2\x1a\xd8\x11\x96j\x18K\xc9\x87$\xb1\xe7\xa9\xc8\tCDp]\x0f\x8c_\x03\
\xd0s\x01\xf9\x89=8\xde\xbb\x99\x11\xef\xb9Qr\xa3e\xf3\x93\xe9j\x1bu\x80\x8e\
\x8e\x0e"\x02\xf5c\xba)\xac)i\x9aXth\x8e".)i\xe4:\x03\xc0\x19\x83e\xc9~[b\
\xc6\xd0\xd8\x93\x8c\r\x8b\xfc\xd45\xcf\x9a\xe8\x17A\x12\xa1\xf0\'\x8b}\x1e\
\x92\xa4\xb1\xb4,\xf3\x7f\xd1A\xd9\xc3-\xc8\xdd\xbb\x14\'\x12\xaf8\xfc\x8c\
\x90L!\r\xad\xbd\x1d\xcct\x98N\x9d\x07\xf3@Z-\x85\x108A\xb3\x84g\xa5\xb95z\
\x0f\xfbX\x9b\x87Wk\xca|\x9eY\x1a\x8c~\xc1\xcb\xc9\xe9\xd4\xff9M\x8a\xae\nK\
\xc8\xca5\xfd\xb3y\x14\xed\xacHT\xd5\xf2@|\xc8\xc5+\x19\x15.\x0c\x18\xb3\x1c\
,\x0bC\x13\xfd\xa2_\xad\xd6\x04\'e4\xfd\xea\xcf|\xc9\xa9\xc2U5<-r{\x07\t\xc1\
\xde>\xd3\x94\xb3\xb5\x0f"\xb8\x9eJ%E9g\x91~K\x04x\x9e\xbf\x9e\x13C\x18\xc2\
\x95D\x9f\x1c\xd1\xf8\xff\xf0:c\x90\xe2\x04h\xe8\x1f\x86\xfe\x87\x90\x92\xc7\
\x0cZl\x1fE\x1bN\xea\xc5\xfeu.N\xe2\x16}\x01\xe7G\x19\xc5\xfe\x08\x89\x85_\
\xca<Xk\xb4\xe1\x1dY\x0fpI\xc0$\xd5/\xc4Z\tf\xc54\x1dv\xf8\xb5LX.\xad\x03Zk\
\xd0\xce\xec\xfbB\xd0.M\xb4\xb3&\x13]g8\xa5\xde\x81\xeb\xe6g\x98\x9a4T\xd7\
\x16\x8c\x17\x05\x05E\x7f\x0f\xff\xcc\n6\xf23\xc4\xbe\x896\x9c\xd4\xc0\xac\
\x18].\xed\x1b\x82@>\xca\xd4R\x17\xbc\xbe\xaf\xc1\x8e\xa4\xeb\x03\xff\xcc\
\xf8\xfb7W\xef\xcd\xd0\xa1s\x172K\xb5h\x8f\x80\xb1\xd9\xbd\xbe\xaf\x86h\x16D\
\xe4F\x18.u\x9a`\x9c\xc14\xc4\x92\x95m\x85\x88\xc6\xc4\xc7\x97dU\x8b\xe7\x7f\
\x99\n\xc6R\xfb\rG\x95my*P\xb6\xa7\xaf(\x96htJ\xbbn\x0eW\x06\xb3\xdc\xe3q? c\
\x81kz\xd2#I\x97\x97ft\x1a\xf5\x01&U>\xea\xc7\x08\x91t/c{\xd8\xdb;\x06\xda}:\
E\xbdK6\xfc\x86\x95\xc5\xc9M\x9b\xc8\xd3N\x0f\x1b\x1b\x17S\xde\xb9\xe4!:\xed\
Z\x9c\xac\xea+\xdblc\x10\x15A{\x1b\xe3\xcfo\x06\xa5h\xadg\xe6\x88\x9am\x17]\
\xa2]o\xc8\x84O\x94Z\xd9\x9e4%\xc4\x0b\x8e\x9a=\xe2A\x1b\x1f\xa2\xb3\xc8\xe5\
\xa44\xa66\xd9\xcf"7|k\xb0\xf0\x8f\xb5m\x9c`\x12\x81$2\xb3\x96\xd23\x1a\x92\
\xe9\xe1\xd4#*`\x97\xd8\x83\xa2\x1fo2&\x11\xe2\xfe\xae\xbcT\xfe\xa8\xa2\x14\
\xe5\xa5\xf5\xe4\x0c!A\xc8\x98w\x9b\xe8]\xec\x814\x8d\xf7N\x9c\xe4h\xc1\xb1\
\x17K\xa9@\xd9\x8e\x0b\x8d\xc4\xf6\xecm\xc0\x7f:_\xcbv`\x97\x1aw\x0e\x0c\x82\
\xf1h\x17o\xb2,|\xa6\xdb\r:9\x89$\x12\xfe\x8e\xf5.a\x0ee;G\xaf\x0c\x11\x00FC\
EF\x04{\xeec(X&\xee3M<\x1a\xa6\x93\x9e\xf0\x9e\r}G\x13\x883\x04\'&3\x06`\xa3\
\x7fqox,\x85K+z~\xfc\xb3x\xa33\xed\xfc\xa4\t\xd3D\xb5\xf2\x1e4[\xff\x0c\xdb4\
p\xad\xbd\x85^\xefs@ROb\xf6ka\x9c\x12\x90[)\x0c\xb4}\xd5\x0b4>\xf7\xc5\xa1%\
\x87\x97R9\x0f$n\x82&\xa3\xd5Y\xd4>\xf0~\xf4\x1e\xf1\xc1\xf9\t\xd8v\x7f\xcf\
\xd1\x84\x1e\x9c\x84\xd4\xe4R\x82\x80(S\xf3\xac}\xfa\xe1\xfc\x1a\xd7_\x87&z\
\xef\xb1G\xf0\x07\xbfW\x06\xfe\xb2\x89b\xc1\xc4[\n6>\xdf\xed\rn`\x83\x82\x92\
\xc0\x18\x1b"\xa74-\x94z\x851@\n\x1eY\xb5\xa5\xe4\x13\x95\x83 \xb4\x99A\xdef\
\xc0x\xcd-h\xb7[\x00\x124\x99\xa7\x9ep\xe1\xb9_C\xf3\xfc\x0e\xca\xef\xba\x07\
\x85b\x01\xbdn/\x98\x98G5\xe8\x11\x08\xce\xa2a\xa7\xf5.\xdc\xf0\xa8\x9dD\xed\
9\xde\xba\xc9$\x83\x93\x96e\xe0\xfb\xe0\x1c\x8c\xa9\xb1\xd5\x04\x010\xee\x0e\
\x02z\xe5\x8d[Pj\xa0\x90OP\xb6\x7f\n}\xe6!4\xff\xe1\xcb\xd0\x9aP,\x16\xf0\
\xba_y\xddx\xa9#\x88\x86\x10m\x0czn\x16\xb9\x19\xd0z8\xf6{t\xbd)o\x95\xb8\
\xcf4 o\xdc\x82x\xa5\x80\xe3\xfd\x10\xb5\xfa\x87\xa3\xcf\xa7\xae&\xe8G\xdfF\
\xe7\xdf\xbe\x06\xc7=\x83;o\xb9\x05\x85B\xa1\xef\x0c\x99\xf6P 0h\xd4\xad;\r\
\xb3\r\x00\xc1\xcf\xa1k\x04\xc3\xb8\x0b\xe2f\x89k\x85\x04\xd1\x1e\xda\xed\
\x07\xf1\xdb\xa5"\xba\x9d\x07\xa3\xfb\xa6\xae&\x18;\n\xe3\xee\xd7\xc0\xf7\
\xcf\xc3}\xec,Jo\xbd\x07\x85B\x01J\xf9p\x1cwx\xc8\x12\x90x\x04\x03\x01\xc6\
\x1d\x02l3T\xd0\x82{\xce\xa8\xa7qN\'\xb4|\x06\x88\x82^\x13B\x80\x1f\xe7 \xda\
\x85\xf2\x9f\x84\xe7}\x17\xf5\xfa\x1f\x8f\xe9\xb8S\tr~\x1d\xd8\x15\x0c\x05[\
\x02\x00\xfe\xb0\xde\xc1\xdb\xdeb\xc2\xbaG\xa2X,@=\xa3\xa1\x7f\xf2,<\xef\xbb\
S\x1b5&U\xd9\x1e\x08\x1c\xe7tZ\xa5\x80\x10N\xdf\xe6\xbd&\xf8\xf1\xa0<\xef\t\
\x1f\xdd/\xf5\xd0\xfdl\x1b\xee7\x9c\xc4\'\xa7\x0eQ.\r\xd0%\x02\xd1.<\xffi\
\xfcM\xad\x08l\x10>\xf8g-8\x8f*\xb0\xab7!\x8d\x9ba\xdbo\x06\xe7<q-\x97\xdc\
\xde\xd8}Sz\x8f\x10\x0cC\xc6\x04\x8e\x1c\xb9&\xd8\xdbq\xf5&\x94\xd2hw\xbe\
\x04\xf7\xbf\x1c0h\x14\xdfV\x00\x80\xa1X\xee\x10\x13{\xd0\xb4NC\xf4SdV\xeb\
\x9f\x82y\xfb\xaba\x18\xd7\xc3\x90\x12\xfa\x8d\xc0\x17z\x0ezW\x9f\x80\xfd\
\x86;!\xae\xe3\xc1\x966\x06\xec\x11\x01;}\x013mmI{c\x9f\x19\x91\xbf\xf0(\x00\
\x80_{\x1c\'\x8e3l \xc8\x06\xa4\x1e\xf7\xa1\x95\x0f\xa5\x14\xb6\xb7\xab\xd8\
\xde\xde\x060G^5!\x04\x84\xb8\r\x9c1\xa8\xf0X\xf5+\xaeA\xa9\xdc@\xa1`\xa1\\\
\xb4@\xb6\x81n\xef\x11\xb8\x0f\xff\x0b\x80\xe3`\xfc\x950\xcd_\x80\x14\x1clS\
\xc0\xb2\x04v\x88pVi\xec\x1d\xe9\'\x7f\xa3\x17\x01\x00\x1b\x1b\x04\xb0\xcd~4\
!\xc7MR\xe2\x8a#{\xd8\xbcr\x13\xec\xeaMh\xda\x89\x84\xd9\xe3\xfe\x05\xd0E\
\x0f\xd7_\xf3\x04\x0c\xfe#t]\x82\xd6\x83\xf5\xe4\xac$\x03\x89\x04\x8d\xbb\
\xcc>\xd1\x80`\xad\xfa\x9bh}\xaa\x070\x82i\x08p\xce\xe0z\n\xa4\xce\xf4\xf7\
\xce^\xc2v\xa5\x82\x8f\x7f\xac\x04v\xefo@\xed\x9c\x04\xe1\xe5\xe0G9\xc4q\x86\
\x0b\xbb\x0c\x9c1\xe0dP>\x03\x83\xe4\x14\x9c\x02\xc2\x00\xd2\xbb \xda\x83\
\xfa\xc9\x05(\xa5\xa0i\x07\xa4\x7f\x8c\x97\xe8)<O\xcf\xc36/\xe1\xe4\xe6\xb8\
\xa8\x9d+\xafZ\xb1X\x04\xbfN\x00`A\xf4\xbc&\x18\xec\x08\no2`\xdc!!e\xb0\xfb\
\xda?\xa7\xd1z\xb09\xf4r\xef^\x00\xc4\x89\x1f@\x9c\x00\xaa\xdbm|\xc5Q\xb8ak\
\x0b\xe2\xd4\x8d\x90\xb7\x9ax\xd5\xd6\xab\x01\x00\xaf\xb9\x89C_\xbc\nt\xf1G\
\xf0\x9f|\n\xfe\x0f\\\xf8\x8f\xff\x0f\x00\xc0\xff\xe1\xf80k\xe7y$J\xb9\xf2\
\xbb\xc1\xda\xeeY\x8d\x07;\x0e^q\x92\xe3\xf3_rp\xe2\x18G\xa9\xf8z(u\x01\xc0\
\x058\x0f}v\xa2\xe4\x02\x82\x88\x8bg\xb5\x8bg\xb5\x82\xf7\xd8#\x00\xfe5u\xa3\
\x92\xca\x9a\x17CR\xb4R\xa9\xc04^\x0b\xc6\x18\xbc\xef\x9f\x03\x00\xd0\x0e\
\xe1\xfcE\xc04O\xc12O\x81\x9e\xdf\x83\xef?\x85V\xf3/\xc6\n\x8b\xefg(\x95\xac\
LI\xac&\xc1~\x831d\x05\xd7\xe7\xb3\xe9\xb6Q\x0fJ)Q\xfa\xad\n4\xed\x811\xc0\
\x7f\xf2i\xfc\xbc!\xe1\xb8\xdf\xc3\xd1+\x08\xb6y\'4\x11\xd8\xd5\x1bh\xb5\x1a\
\x89\xb1f\x9d\xae\x87j\xa5\xaf\xf4\n\x8en\xbb\x8aN\xd7\x9d+e\x1f\x00l\t\x8ew\
\xc6\xf62z\xbe\x82\x7ffN\x82\x95\xca\xef\x80\xbfr`J/\xbd\xd3B\xab\xe3\x80\
\xe8%\x94\n\xbf\x0c!8|\xa5\xe1:\xff\x01\xa7\xf7\xc5\xc4\xc2\x94\xd2h4{\xd8\
\xae\x06\xf3\x92\x10|(\'\xda\xa2\xa8\x7f\xa4\x9b\xf9\x99\x88\xa0\x90\xbf\n 0\
\xc66\xff\xae\x0b\xe36\x19l\x96$\x03\xa6\xb1\x05_\x05Z|\xbb=\xfd\xe4\x8ef\
\xab\x17\xa4}\xc81\xed&\x11\xa1\xde\xe8\xa2\xf7\xf0\xb8\x00\x9a\x85\x88\xa0\
\xde\xd9A\xebS\x0e\xc4+\xae\x82\xfc9\t\xf71\x17\xce7\x1c\xd4\xff\xe4\xfd\x81\
\x08\x7f\x86\xa0\xce\xbap\x9c\xc9\x82%D\xa3\xd9C\xab\xed\xc02\x03\xa9\xbbH\
\x12\xd5 ;\xa57wtpD\xf0\xd3\x9f\xf9<v_ \xf0c\xbc\x9f\xd7\xe5\x06\xdct\xd3K\
\xb0\xccSP\x9a\x00E\xa8}\xb0\x96\xba`\xad\t\xdd\x9e\x07 \xfb\xb7\x9e\'"\x82\
\xbb\x14\xec\x16S\x175\x884\xd81\x81G\x95\x86\xeb)0\x06\xfc\xfbg\xda\x89s\
\xd4$\x08\xc1a[2\x87\x13C4:\xdd\xecydB\x0c\xe6\xc1\x17\xc2\x05\x1c\xa0\xb5\
\x02\xb4\x02;&\xf0@\xad\x89\xfb~Q\xe2\xef\xff\xf1\xefS\x17:\x9apqQlW\x0b\xa8\
\xd6\xda\x993S\x02\xb1y\x90^\x18\xfe\x80\xbd\x8c\x81\x1dc\xd8d\x0c_\xfcr\x0f\
Z\x9dM\xdd\x98<\xc9\x01\xc1hh5\xcaC\xfe\xc4\xb4\x88i2\xc3/\xf1\x91c\x02\x0c\
\x0cD\x1an\xaf\x9d\xba!qr\xbd\x9e\xb7\xd0\xf0\x92\x92\xa3\\\xb2\xfa\xf6U\x86\
Z\xb5\x80R\xc6s_\x12\x95m\xc686\xfb\xa2\xef\x89\xef\xf4R\x17V,\x0c\xd6c\xae\
\xeb\xa3\\\xcd\xd6\x98$8\x8e\x8fv\xab\x02\x00\xb0\xac \xa9U\x16\x89\x9a\xb8\
\xe0=\xc2\x02\xc1\xa0\xb5\xc2SO\xa4\xef\x81[b*\xd5"=\x17\x87\xe3\xfa\xf0c\'p\
e\xdd\xf4<F0\xec=}Q\xc3}(\xdd\xd0\x0cq$f=\xcasW\xcb"\xb6\xd5\xb1\r\x92\xaco\
\x15\xb6\xef\xe6(\xfcR5Sa\xb1\x1c\x1a(\x16\x8bQ/\xd6\xb6\x0bc\xe9\xa8\x1b\
\xcd^\xb4\xf1x\xd6\xba\xae\x9fu\x05\x00P)\x971\x89\xef\xcc\xccx\x9cs00\x1cKH\
\\\xb0\x08\xca%{\xec\xda\xbb\x94\xce\x9c+t\x1e\x0c6)3\x0e\xd6\x7f\xf7n\xbc\
\xfe\xaa\x85\xd4\xab48\x92\xc5\xfa\xbb\x00b\x04\x83\n}\xcf\x01g?[I\xe5\xab\
\xc0P\x0fj\xad\xe0\xb9\xd9\x97$\xfb\x19C\xef\xa0V\xfeR*\xf9\xdcW]\xdcy\xa7\
\x1c\\ B\xb7\x97\xcf42\x0b\x11\xc1\x1d"\xf8\x8f\xcf^\n\xcd\x83\xf7\x7f \xdbt\
\x93\'\xa2!\xfa\xfd\xeftA;\xf3\xcf7\xa3\xc8\xc1\x1c\x13!\xfbN\xec\x01"\x82Y4\
\x96I\x88\xcfO\x86\xc1r1:\t\xb1\x81\xf8Q\x86Y\xf5\x87\xdcO\x0c\xb1\xfa\xbf3\
\x86\xd4F\xa7\x91S\xde#0\x06\x18\xf2H\xf4w`\xd1\xce\xd6\xa6\\O\xed\x01\x00\
\xdf\xb3\x97ftr\xbc\xc1^\x8b\xb5\x9c\xda\x13V\xeaz\x04\xd3\xc8w"w\\\x1a;\xf2\
/\xcd\xa9=K\t\x88-\x96j\x10\x82\xc34D*\xa3\x93m\xdb\x13?\xd3\xb4\x01\xa5\xf6\
\xa6\x0e\xcd\x95\x9d\xda\x13\xafL)\x8d\xae\xd2Hct\x9av\xec%0=,l\xe5\xa7\xf6\
\xac\x12+?\xb5\xc7\x90[h5\xdf\x97\xe2H\x14\x8d\xea\x87:\x91!wZ}\xa6!P\xaf\
\xdd\x0f\xc67\xa1\x95Fe\xbb\x9di}\x98k\x0f\x16\nw\xa42\x13r\xceQy\xaf5\xf3>\
\x00\xa8V\x0b0\x8c-H\xc1a\x9a\x12\xa5\x8c\xe9@\xd7\xb6\x7f\xd0\xb2\x0c\x14\
\x0b\xe6\xd4\x18lCr\xd8\xd6\x84I2%\xd6\xba\x03\xb4Q/-\xbd\x8e\x03\xb7wim\x9a\
\xcc<\xe8et\xaax\xbeB\xab\xdd\xcbT\xc7\xda\x08z\xde\xb9\\\xec\xa6\xb3\xb0\
\xb6!\xaa\xc3\xf0\x94%\xe3\xc0\xbd\x83Y\x91+\xc1\xbc\xf33\xe5\x81\\\xdf\xc1V\
\xdb\x81\x10\x1c\x9cM\x9f\xecwA\xf8\xf8\'zyV=\x11\xb9\x12$\x02j\xf5\xfde\x95\
[\x8a\x14\x15\x82\xa3Z\xb6a\xdbF\x0e\x1e^\x85V\xdbA\xab=\x9fA,w\x82\x86!\xd0\
n\x96\x87"\xdf\x17\x81\x94\x02\xb5\xed"l\xdb@9\xa3o\x10X\x02\xc1f\xbd4Dn\xd6\
~\xbfi\x88\xef\t\xb6-\x03\xd5\x8a\x9d\xf9\x88\xf7\\\x97KR\x00R\x0e\x96\xef\
\x85R#\xd5\xf1_\xd3\xea\xb3L\x82\xd1/\xb3R.L]\x1c\'\x95\x93\xf3.\xec\xc1\xef\
\x9eO\xb9\x9cm\x16?\x9a\x96\xb1\xec\xf6\xd6\\\t\xc6m/y&RW*\xbe\xad \xdb\xb3+\
\xd1E\x85\xe0c\xd24K\xcc\xe7"X\t\xc1n\xbb:f\xe5n4\xbb\x99\x05\xc6<X\x89.\x9a\
d\xc2\x97"{\xcc\xcb<8T\xb6\x0f:VB0i\x95\xa1i\xd5\xd9)\x97\x88R\xb99\x16\xc0\
\xd3K\x99\xc5rQ\xe4\xea]j\xd4J\x902\xb0[v\xbb\x83c\xa0]O\xc1\xcd\xf9@\xd3\
\xb5x\x97\x8c\x98\xa3\xef\x1eS\xc2\xf5(\xd1!\x92\x15B\x0c\xa4p|\xb0\xa7\xf1.\
\xe5\xfa\x0e\xc6\xb5\x97\xfb\x8b\x16\n\xf6b\xe1\xcc\x8c\x01\xb6u\xe5\xd0\xb5\
\xa4\xc4\xb2+\xf3.\xf9*\xb0\x96\x19\xc6\x16\x80 0=\xcf\xe0t\xcf\x1f\x17V+\
\xf7.U\x1e\xf8\xe4B\xc1s\x93\xa0\xf5\xb0\xe2\x1d\xd67\x0b\xb9.\x97\x00\xc0?\
\xa3Q,7Q)\xbf\x19\x96uj,\x08/+\x82\xa8{\x17\xf5Fo\xae\r&K\x99&\x94\xd2\xa8\
\xd5?\xbd\x8c\xa23\xe3P\x939\xe8\xc8w\x1e\x9c\xd3\xc3;\r\x87\x1e\xde\x198\
\xf4\xf0.\x13\x87\x1e\xde\x04\x1czxGp\xe8\xe1]\x16\x0e=\xbc9\xe1\xd0\xc3\x9b\
\x05\x87\x1e\xde5 \xf7\xf5\xa0\x94\x1b\xe0\xfd\xc4U\xbd^/\xf1\x1e\xad\t\xed\
\x8e\x13\r\xd3\x99\x9b\xb3\xfaKJ"L\xdc\x985\xa9\x9c\x9c\x95m\xc02\x07A\xe4\
\xa61\xd9\\aH\x81\xedzg\xe2\xe7\x8320\x14\x1e\xddsw\xe1\xfb\xd3\x83d\xe3X\
\x9a\xfbl\x16\xec\x94\'6\x1br\xb8\xd0pt\xa4\xc5\xda&z!8z\x9d*\x94\xd2C\xfb\
\x03\xe3\xe0\x9c-\xbc\x0bn\xad\xaaZ\xde\xe7\xf4&\xe1@M\xf4A\xa6\xcal\xcf\xac\
\xef\xc4\x10"\x14\xcbMhM\xa8V\xd3m\xa5\x9dG9X\x1bA\xd7\xf5\xa3 \x85ej5\x07j\
\x88\xce\x83\xcb\x9e`\xae\xee3\xcb\x94Q\xd6\x82Y\xe8\xcd\x91xc\xb4\xbe4\xc8\
\xf5\x1d,\x14\xcbhwv\x00\xb6\x81F\xa3\x01)\r\xf8~\xb2i0\x0f\xff\xc5Z6g\x116P\
\xdb\x0e*Vj9[fG\xb1\xd2\xcdY\x9e\xdbA\xb5bO\xbdgT\xd9\x9e\x85i\xca\xf6\\\xa9\
\xff\xe6\x85\xefuQ\xaf\xa53\x05\xe6\xa1l\xaf\xdc}\x96e\xcf\xaem\x1b@}v}\xbd\
\xee6\xe2)d]\xe7\xab\x99"\xa4\xf6\x85\xb2=\tR\x8a\x85#\x86\x0f\x95\xed\xfd\
\x04\xad)s\x06\x93}\xa1l\xa7\xc5<s\xe7\xbeP\xb6\x97\x89\x035D\xe7\xc1!\xc1,\
\xc8\x92\xb9nQe;-r}\x07\x1d\xd7\x87i\xd7Re\x0fYF\xb0P\x12r\x172\x81T\xa4}\
\x98\xbc1G\xec\xcb\xe4\x8dy6f\x9f&o\xcc\xa7!\x07"y\xe3\xbc\xd8\x8f\xc9\x1bs]\
.Y\xb1\x18\x1dB\xfa\xe14\xab>\xad\x11\xa5=\xaa\xd7\xb63\xa5\xfe\xcb\xd7\xf9\
\x12\xfb=O[\xe7\xbcy\xba\x81\x15\xe9\xa2\xb3\x927.\x13+!\xb8/\x927\xae\x1a+O\
\xdex\xb9\xe2\xb2\'\xb8\x92wp_$o\\&\xf6E\xf2\xc6\xbc\xb1_\x927\xe6\xea]\xaa\
\x94-lW\x8b\x00\x00.(\xb3Z\x95\x84I\xc9\x1b\xd7\xe2]\x92\xb1\xb8\x18)8z\x9d\
\xed\xbe\x0fb9\xc9\x1bW\xee]"\x1a\xcek\xc89[Z\xf2\xc68\xa6y\x97r\x7f\x07\x8b\
\xa5\x1a\x1a\xcd\xfc\xe3\xd5\x82\xe4\x8d\xe3A@+\xf5.\x85\x95e=1d\xde\xe4\x8d\
i\xde\xc3\x8d\xd3\xa7O\xef\t!\xd0\xe9\xccve\x1d$\x14\x8bE(\xa5\x0e5\x99\xb9p\
\x980\'\x03\x0e\x13\xe6\xcc@\xbea$\xf6\xe0\x9c\xb2\xd0=\xb6\xa8\x07\xe9\xc3\
\xb5"\xee/\x06\x1b\xb9\xca%+3\xc1\\\x85\x8ci\xca\xe8\xf7v\xc7\xc9\xc5=\xf6\
\xa7\xf5N\xa4\tq\xcea\xc8\xadL\xcfG\xd3\xc4\xe5\x08\xa5T0DU\x9e\xe9{\xf6\x19\
\xfe\x0fMT\xde\xdbF\xa3F\xc4\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress02Bitmap():
return wxBitmapFromImage(getscanprogress02Image())
def getscanprogress02Image():
stream = cStringIO.StringIO(getscanprogress02Data())
return wxImageFromStream(stream)
index.append('scanprogress02')
catalog['scanprogress02'] = ImageClass()
catalog['scanprogress02'].getData = getscanprogress02Data
catalog['scanprogress02'].getImage = getscanprogress02Image
catalog['scanprogress02'].getBitmap = getscanprogress02Bitmap
#----------------------------------------------------------------------
def getscanprogress03Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x19\xa9IDATx\x9c\xed\x9dm\xac+\xc7y\xdf\x7fG\x96u\xe6\xde+\xdf;\xd7V\xa4=~\
\xd3Z\xa9\xe2\x95l\xc4\x94\x8a:\xb4nP\xb3\xf9\xd0\xd0\x08R3_b&\x08"~\n\xe8\
\x16\x05(\xf4\xc5\')\x8a0E\x81\x9c\x06)|\x80~\x08\x0b\x140\x13\xd4(\xdd\x0f\
\x15\xed\x165m\xd7\xcd\nh\x14\xba\x08\xe0EbX\xab\xc4\x8e\xf7:\xae\xee(q|\xe7\
\xca\xd6=s\xac\x97\xd3\x0f\xcb\x97%\xb9$w\xc9]\x92G\xbe\x7f\xe0\x80<\xe4\xee\
\xcc\xfe93\xcf<\xf3<\xcf<\xb3w\xed\xda\xb53\xde\xc0\xb8\x1b\xc0\xb2\xacm?G.P\
J\x85\x04\x01\xba\xdd\xee\xe8\x8bf\xb3\x99\xba\xb0f\xb3\x89\xdbmb\xdb\x02\
\x80\xc6a\x87n\xcf\x03 \xf0\x8ef\xae\xefv=\x1a\xcdN\xa2\xfa\xcaE\xb0\xac\xb0\
\xdc\x9ekP:\xbcg\xf8\x17\x87J\xa5\x02\xc0]\xa9\x99,\x80\xe7\xf9Y\x16\xb7\x14\
I\x1a\xe2\xee\xb8\x0fWi\xc1u\xb0\xac>\xa7U\xc3\xb2\x1c\x00Z\xed6\x9e\x17$.;\
\x96`\xd6\xf8\xfc\x1fz<\xf2\x88=\xfe\xc0\x18z\xae\xb7\x89\xaa7C\xf0\xe3Ou6QM\
,2\x1d\x83QX\x96\xdc\x89\xb22%\xe8\x07\xc1\xe8}\xb5ZDJ\xb1v\x99\xa5\'\x1cl{<\
\x8d\xe9\x9b:\xd5\xfd\x99v\xd1n\xcf\xa7Q7\x08!\xb0-I\xaf\xd3\xa0\xdb\xf30\
\xc6\xacT\xde\x81%\xf9h\xb90\xfa\xdf\x0f\x14\xc1\xf5-\x12TJs\xdcr9l\x94\x81\
\xb0k\xd5k\xa5\xcc\xca?\xfa\x9d^\xea{2\x172\xad\xb6\x8b\x10\xd0\xa8\x973+\
\xd3\x18\xc3\xd1q\x0f\xf7\xd9\xf4\xf3l.R\xf4\xb8\xe5\xd2\xee\xf4)\x16ll\xdbB\
\xac1\x14\x95\xd2\xf4\\\x1f\xadW\xeb\xe6\xb9M\x13Z\x1bz\xae\x0flV\xbb\x99Fn\
\xd3\xc4\xae \xd3\x16t\xec\x03\xda\xad\'\x97\xce[Zk\x1a\xbf\xd1M4\xa6\n\x8e\
\xc5Q\xf3c\x08y\x01\xad4\xf5\xc3\x0eJ%\x97\xa4\x99\xea\xa2\x05g\xac\xf5/\x82\
\x94\x92\xfa\xaf\x16G\x04\x17\xd5\x17]I`IZ\xc7\r\xe6\xe9\xf4q\xe5lDU\x8bC\
\xb1\xe8P)\x17\x08\x94f^\x83K\x99\xec\x07[\x84\xad\x11\x048>\xaa\xe6^\xc7\
\xf9\x132)g\x8b\xad\xb6\xa0;\x98\xdf\n\x05\'\xd1\xf5J\x83\x1f\xa4\xabck\x04}\
\xff\x06\xb5F\x1b\xc8w\x81\xbd\xb5.\xaa\xf5\xad\x8d\xd4s\xfe\xc6`JdJ0\xcd\
\xaah\xc5\x15Tjd:\x06\xfd\x00\xa40#\xe5z\x9e\x95\xed\x14\xc3\xef}\xca\xcd\
\xb2\xea\xb9\xc8\x8c\xe0\xaeY\xe2\x86\xc8\xb4\x05\xa3\x956\x9bM\x1c\xc7\xc1\
\xf7\xf3[M$\xa9/S\x82R@\xb9$\x10\x02j\x95Yk\xf6\x10i\x94m)\xa1T\x08\xcb\xd4\
\x06\xdc\xbe\x99\x19\xbfC\xa2q?f\xa6B\xc6\xb6I\xb4\xb8\x1d*\xdbIPtB\x92B\x80\
%\xc1\xb1\'\xbf_\xd6U3mA\xd7u)8\xc9L\x15Y(\xdb;o\xba_E\xd9v]\x97\xe3\x96\x9b\
\xf8\xfas7\xd1\xa7\x9d?wB\xd9N\n?P\xb4;n\xaa:vB\xd9\xce\x13w\x94\xed\xf3\x8e\
\x8c\x95\xed\xe4\xe3\xe9\\*\xdb\xedN\x1f\xcb\x92H\xb1\xd8lx.\x95m\x08[\xa5y\
\x94\xdeA\x92\'2\x97\xa2\xd5J!\x81\xe1\xd7\xd0\xe9\xf6\x13w\xd3r\xc9A\x08\
\x81\xd2\x9a~?H\xf5<\x99\x12\xacU\x8b4\x0f+\x89\xaeul\x8b\xc3\xa3\xee\xd2\
\xeb\x0e\x1b\xe5\t\x17\xdca\xb3C\xa7\x9b\xdc\xbf\x9f\xa9\x90I\xe3\xd1-\x95\
\x92Y\xd2\xca\x11\x07(\xa4wggn\xbaO\n\xcb\x92\xb8\xdd\x06Jil\xdb\x8e\xbdFJ1\
\xb3:)\x95JH\xab\x14{\xfdN\x99\xee\x01l\xdb\x9a\xf0\xbf\xe7\x81s5\xd1\x1b\
\x03J\xa5\xbbgk-h\x8c\xa1Rk\xa1\xb5\xa1\xd1h$\xbc\'}=[#\xe8y\x01\xbe\x1f6G\
\x9eZ\xcd\xb9\xea\xa2\xab\xe0\x0e\xc14H#\x00\xdc\x94\x1a\xc9\xaa\xc86\x10HC\
\xa7{\x02b\x0f\x80\xe3\xe3\xe3\xf9\xd7\xa6\xf0\xb3\xaf\x83\xcc\x85\x8cao\xe4\
\xa4\xdc\x14\x89E\xc8\xc5toY\x92F\xadD\xbfw\xb8v\xd4\xa1\xd6\xe0\x07f\xc6\
\xf1\xb95\xd3\xbd\x94P.\xce\xaaX\xabBJ(\x16\x04\xb6e\xe8\xf5g\xeb\x8b\xbe\
\xcf\xddt\x0fc3\xfb\x10A\x82nj\xc9\xf8V\x8e\x96cY\x82\x82cbCH\x16\x99\xee3%h\
[\xe1/\x0ecMe8\x99/\xc2\xa2\xeeV,\x803\x88\xe4wl\x81\xe7Oj\x05\xcb\xbaj\xb6\
\x01\xb1\xfe\xb8\x0fu\xba\xfdD\xe4\x96\xa1\x1fY\xfa\t\x11:x\x86\xd8\xb8\xe9>\
\xda\xd5</9\xb9\xa5Q\xf7V\x8db1\\\x8b\xb5;;\x18uoYrF\x9a\xa6y\xc8u\xb0\x11\
\x82\xbdNcf\xb5\x7f\xdc\xea\xa5r\xa2\xac\x8a\x8d\xe8\xa2q\xa6\x0c{C\xfb\xa5\
\xee(\xdb\xe7\x1d\x1b!\x18g\xd2\xd7\xe6\x9c*\xdbq\xa8\xd6Z3\xc6%\xb7\xbf\x99\
X\xee\xdc\x08F\xd5,\xcfWx\x19L\xfa\xab \xd3.\x1a\xedv\x8f\x15\xecL\xca\x14BP\
\x88\x96\x95\xd2\x80\x93)\xc1\xa8\xdf\xe0c\x95"\x8dzi\xadU\x85eI>\xf9\xdb\
\x15\xc4\xa0\x10c\x0c~\x90\xae\'d\xdaE{\xae\x8f\xef\xdf\xc0q\x0e\x80p\xf7K\
\x96;`\xba]/\x9b \x84u\xc2H\xbc y@P\x1ah\rF\x14h6\x0bs\xaf\xd9\x88\xe9\xde\
\x18\xe8\xbaf\xe0\xa7\xd08k\x9a\xe6\x8d\x81@\x99\x89UE\x1a\xe4"E\x8d\t\x979\
\xcd\xe6|\xa3S\x14o\xc8\x90\xe6M!\x97\x16\x14"t\xa5\xe5itJ\x8a\xcc\tN\x1a\
\x9d\xd6\x974\x8b\x8cNI\xf0\x860:-BnF\'\x80r\xf58w\xa3\xd32d*d\xa2\xc3\xcd\
\x0fL\xeeF\xa7$\xc8\x94`\xb4K\xa5\xf5\xc4.\x82R\xe3VK\xab@dj\xba?nV\xb1\xedP\
\xd3\x88f7\xc9\xc3\xe8\xb4\x15\xd3\xfd\xf4&\xab\xa1)=/\xa3S\x12\xd3}~\x13\
\xbd\xd8\x1bU\x96\xb7\xd1icQ\xf7\x130\x9b\xc9\xa4\xb4Q\xd3\xfd\x8fl\xc2\x1cc\
\xcch\xd1:Dt\xf5\x7f\xee\x13\xe6\xdc1:\xe5\x88;F\xa74\xb8ctZ\x13\xab\x18\x9d\
2\x9f\x07\xeb\x9f\xf8\xfd\\\xc2G\x82@\xd1<N\x1f\x0f\x9e9\xc1\xe0\xba\xa6Rk\
\xd1\xee|%uw\x8a\x83R\x9av\xc7\xa5\\m\xad\x94:)\x17)\xaa\x94\xa6y\xf4t\x1eE\
\xa7\xc6\x1d\xa3SZ\xfc\xc8m+\x10\xe2~\x00\xee\x12\xef\xe6\xb5\xd7n\xf1\xa67]\
\x01\xa0\xfe\xf1\xef\xd1\xfb\xe3\xbf\xc2\xff\xfa\xd7\xe0\x87\'(\xa5\xf0}\x1f\
\xad\xf5\xc4X[w[A\xb66\x19\xfb $$\x1eG\x1b\x83\xd6\x17\xd1\xfau\x02u\x13s\
\x02\xfa\xf4\x12\x98W1\x9c\x82y3\xf0\x10\xf6\xa3\x0f\x81\xb8\x8b\xe2\x95\xcb\
\xc8\x0b\x17\x10\x02\x04\x9a\xbf\x0c|>\xd7\xedR)O\xee\xf5\xdd\xca\xb6\x02\
\xc7q\xb0\xedG\xd1/\xdf\xa6\xeb\x9d\xa0\xb5B\xeb\x13\x84\xb8\x801\xe1+\x10\
\x12\x030\xaf\x87/\xaf\x85a\x97\xbc|Fp\xeb\xc5p\x85e\x18X\x1b-J\x95C\x94\xb0\
\xb0$\\\xe4\xbb\xdc\xd6\x7f\xb2\xb9m\x05\x97\xa5\xe4!\xdbAZ\xef!P7\xe9\xf5_\
\x08\x1fl\xd0\xbb\x86\xa4\x9a\x8d\x8f$.\xf3\xf0\xdf\xf7\xe0\xcd\x83\x7f^5(\
\xad\xe9\xf64]@\xbeEP|_\t\xc7\x81\xbfT}^\xd2\xc9\xe6\xda\xd4\x04-\xcb\xc2\
\xb6m\x0c\x16J\x1b|\xef[\xe1\x8f.\xa0\xb1f\x16\xbc\xa3\x7f6\xab\xf54\x06\xdb\
\x7f\xcc+\x86^\xdf\xa7\xd7\x17\x14\x0bE\x9e(^\xe4\xd9\xfe\xff^J41A!.\xe28?\
\x01\xc2"\xd0`NnbN\xc3.w\x98az\xbfi\x1c\x0f\x84V\xa3\xd9E\x0c,\xe5}/\xa0\xef\
A\xb1\xf0\x01J\xa5\xab|\xb1\xf7E\x8c\xb9\x1d{\x7f"\x82\x96ea?\\@}\x1f\x8c\
\x06srB\xfd\x97>\x94\x11\x85d8n\x8e\xa5s\xbd\x19\xb6j\xdf{\x11\xcf{\x91\xd2\
\x87\xff!r/^kZJ\xb0\xf8\xd3%\xc4\xbd\x07\x04\xc1M\x8c9\xa1V\xdd,\xb18\xb4\
\x06d\xeb\xcd\x0e\x06\xe8=\xe3c\xc9}\xda\xed\x0e\xb5\xda\xe4\xde\xfc\xb9\x9a\
\xcce)\xf9\xf9J\x15\xfd\x8a\xc4{\xee\x05\x0c\xbbA.\x8aV\xb3\xcaP\xb2)}J\xdf\
\x87\xde\x97\xfb\x14\x8b\xe3\xa9%\x96\xe0e)y\xfc\xf1"\x7f\x16h\xf4-C\xfd\x97\
>D\xad\xb2yr\x9e\xe7\xd1n\xb7Q\x0b\xcc\xe4\xadfe@\x14^6\x86\xce\x97\x02\xea\
\x8d\xb1\xe3u\x86`\xb5Z\xe5g\xca\x15\xd4\xcb\xe1\x80\xaeU\xe6\xfb\xc4\xf3\
\x821f\xa4\xcd\xd4\xebul\xdbF\x08A\xb5:?5D\xabY\xe1\x92\x08[\xb3\x1f1JM\x8cA\
\xc7q\xa8\xd6\x1aa0\xab\x80jy\xf3\xe4 4S\xdc\xd4\x1a)%RJ\x8c1<\xf5T\x83\xa7\
\x9f\xee\xceX\xe7\xa6Qov&t\xdcQ\x0b\x96J%\xda\xed\xff\n\x08\xc4\x15\xb15rC\\\
\x95\x92+R\xd2\xef\xf7\xa9\xd5j\xfc\xe6o6\xf1\xbc0\x9dn\xab\xd5\x9a{_\xabYED\
\xb2\xcb\xed]\xbbv\xed\xcc\xb2,\xaa\xd5\x1a\x860\xd9ie\xcb\xe4\xa2\xf8\xad\
\xdfj\xf2\xe4\x935 \x9c\xae\x96\xb5\xe0\xf0\xfbJ\xa5\x82Rj\xdc\x82B\xdaH)v\
\x8a\x1c\xc0\xa7>\xd5\xa6\xdb\xfd\x1c\x10&\xe7\xbf\xb9@s\x89#?"h\xce\xa0ZI\
\x96\xa5g\x93\x90Rrx\xf8/\xb9=\x18X\xb7\xb4&\x88\xa4\xbb\x1e"J\xaey\xf4\x1fG\
\xefG\x04\xab\x1f\xd9\xad\x96\x1bb\xf8\xe0\xdf\x9e"\x15\x04\x01\xc6\x18\\\
\xd7\x9d wt\xd4B0v\xfc\xec\xbc\xc9b\xf8\xf0Z\xcf\xee\xd6VJ\xf1\xcc3\xee\xe8\
\xff\xe3V;\x9c\xf7#=u\xab\xbb\xb0\x93@\xca\xab\xc08j\xd8\xb2\xac\x89\x89\xff\
\xc9\'k\\\xb9\xf2V\x84\xb8\x0c\x84?H\xd4"p\x0eZp\x1f`4\x06\x85\x10\xa3\xf7CT\
*\xff(T\x06F\xf7\x8c\x9bp\xe7\t\x0e1\\\xf7\xf5\xbf\xd2\xe7\xdf\xc4\xac\xdc\
\x1d\xc7FH1\x13z\xb4\xf3]t\xd8\x1a\xddn\x97\x9b75\xcf?\xff<\xbd\xde\xe7\x07\
\xd2\xf5p\xe2\xda\x9ft\x1c\xfeo\xdf\x9b 9A\xf0\xd9\xfe\n\xb1R9cH\xd0\xf3<</\
\xb4\xa6\r\xd5\xb7\xdb\xc6p1\xd2\x1d/\x86\x16\xab\x894\xb9\x13\x04\x1fu\xc6Q\
\x12J)\x94R#3\xde\xf8\xf5\xa5\xd1\xea\xf9\xf4\xd4p\xfb\xb6\x19\xbd\x87\xc9-\
\x04IL\xed\xd1\xf12|\xbf\xbf\x1f\xbe^\xbc(F\xa4\x86\x90RR\xfa\x992\xe5r\x85?\
\x0f\x02\n\x91g\xd6\xfa\x8739\x80\'\x08V\xabU\x82 \x18\xcd1\x8b\xb0\xea\x11\
\x0b\xeb\xc0\xb2,J\xa5\x12\xe5r\xb8\xe05\xda\xe0y>B^\x02\xf3r\x98\xc2lj\x10N\
\x10\x8c\x06\xef@\xf8\x8bN\xff\xb1w\x01\xb1\x7f6\xfa>\xfa*\xa5\x9c\xf0\xe5\
\x19s\x9a\xe8\xc1\rw\xf3\xea\xcb\xafp\xf7\x9b\xc2\x1f\xed\xf6+p\xd7k\xe3\x1e\
1,\xb3\xf8S\x05J\xff\xa0<\x91\x8b\xcd\x18\x83\x19DB\r\xe4\xec\x84\x93tD\xb0R\
\xa9\xf0\xe0\x836\x17.H\xee\xb7$\x17\x85\\\xaa\xd8N\xc3y\xaf\x85u\x10\x1af\
\xd5\r\x8d\xff|\x06nk\x01\xa5\xe2\xb8\x1b.\x0f@0\xf1\x13}\xa5\xb2~Ro}\xcb`\
\x1f\x84\xbf\xa4u 1\xa7\x86 X\xddW(\x048\xf6\x03\x93u,\xc8\xa4\x17\xca\x97I)\
\x93\xf9\x89!\xda\x96\xc8A\xcb\x0f\xf3\xc5L\t\xb6\x89ab"\x9f-\x1b\xd5\xea\
\xc6\xf2\x1fk\xba\x9c\xcc\'z\xcfS\xe1\xb8Hy\xdf\xb2\xeb\x8d1\xf8\xd7\x97w\
\xf9\xd18\x1c s\x82\xc6\x846\x11uC/\x97\xc4\t\xcbS7thgIp\xc3F4\x19c\xc8F\xc0\
d\x80s\xa3\x8b\xa6\xc1\\U-\x0bX\x96L\x14\x1b\xa3\x94N\xec\xe1\x95RpA\xc0\x89\
Y,E\xe30&\x98D\x8c-\x81eI\x1cgq\x1c\xe8hI\xb3/\x12uc\xdb\x96\x13qn\xbe\xafR\
\x85\xa9\x8c\xbbh\x06\x9a\xd7\xb2\x96\x8b~-\xaf&S"\xa6=\xba[\x8b\xd9N$\x11#\
\xef\x85\x10\x14\x8b\xf6BI;R\x0f\xd7@f\x04g\x1e#\xee\xc1\xc5@O\x1c\xbc\n"\
\xab\x89\xac\xf7\xe3\r\x90\xdf\x82w\xde\x03\xc7\x112fy\x14\xe1\xc0\xd6\xb2\
\xba\x90\xd90\xceN\xe0+\xcf\x07\x89\xc2#\x87q\tq\x97\x8eb\x16\xe6`k\x04o\x9d\
j\xcc\x82\xd6\x98\xd6]\xe7]9$7\xf9}N\xca\xf6\xba\x18\xb5\x14\xe9\xf7\xad\xc5\
\x13\xdd1MFL\xbd\xa6\xc1\xbc\x16\xce6\xa49\x85\x00\x98\xbe\xd6L\xbd\xa6\xc5\
\xbc\x1f\'\xd3.\xaa\xb5\xc1\xed\xfb\x89Z`Z`\xac\xd3z\x13\xe5N\x95\x92\xfd\
\x184\xab\xb7\xc2*co\x1a\xd3\x02)S\x82a\x84\xfcr\'%\xc6\xe0\xf9j\xa6\x9b\xc6\
\xdd%\xa5\x18\xe9\xb7\xc6\x80\xef\x07\x93\xad\xbfD\x87\xcev\x83\xa4\x95P\xb5\
\x12"q\xea[\xdb\x96#\x95MJ1\xa1\x9b&\xa9jk\xd3\xc4\xf0a\x97\xe9\xa2r\xc1\xfe\
^\x033\xad7]\xdaV\xe7\xc1\xb8\xa5\xd5\xbc\xf9l\xe2\x9a\xa5\x17\xed\xa8\xfb,\
\x11\xb9\xfd\xa1a9II9\xb4`\x9au\xb3\xd6\x1as\x12\xb9w\xef\x0cs\xb67\xf7z\xf3\
CP7\xd4\\rq\x1f\xe7\x93\t!R\xd9\xbcV1\xc6\xa4\xca\x9e\xc7\xd0q\xb4\xe0\xd7\
\x1b\xd7\x9bc\x17\x8d\x12\x8b\x92\x9b\x9e\xc8\xd3\xfan\xd2YTr\xb4\x8b\xc6I\
\xeeiKv\xea\xc9<\xc9\xe0\x9cS\xff,\xc1|\x16\xd6\xa3\xa2\xe3\xbaQ\x92\xfb\x96\
\xc9\x14\x11\xe92\x8b\xcd\x86\xcb\xea\x8d\xaeg\xa6\xaeUJ#\xf6\xc5HXD_\x81\
\x89\xcf\x82\x04f\xf8a}q\xf3\xdd\xbcG\x1b]?\xc0,\xc18\x02q?a\x0c\xc9\xdc,\
\xda\x89\x1b{\xe0\xaf\x8c\x132\xd6;\xadqaQ\x89\x10%\x9c\x95\xca\x1f\xff\\\
\xf1Hk\xae\x9b*p\xd4\x82\x85\xf7\x17p\x7f\xe0\x86f\x84iI0o\x99\x1d\xfd\xcc\
\x84\xea\xd7\x07\xdeks\xe1\xc2\xf2\xe7y\xde\x0f\xb8\xa1\x96?\xfdRe{X\xa6\x01\
!f\xbb\xd58\x18\xcf\x9c\xf2\xe1b\x91\x91\xe3k\xba\x90ir\x93\x12\x03\x04X\x0f\
\xc8\xa5\xe4\x86\xb7>h=\x10_\xee\x14\xc2\xf3\xec\xe3\x95\xedi\x18\x03\xd3\
\x8e\xbb\x11A\xd7{\x8e=\x04\x1f,D\x82\xf2\xe6\xfc\xc0b\xee?\xc9!\xe5\x05\xec\
\x81\xb3T\x0e\x1e~\xfa\xcf\xb2d\xaacV\x86\xcf\x1cm\xe1Q\x17m\xd4\x7f\x99V\
\xfb\xbfQ*8\xbc\xb5t\x80\xeb~\x9e\x91H\x123ed\x02\xc7\x19\xb4\xa2\x99\xaf\
\x9e\xa5\xad/\x94\xb81]T\xab\x17h>\xf5q\xdc\xaf\x06Hy\x85Ri\xb0\xe7(\xe6\x07\
\\\xf8\x9b\x8alsX\xa4%g\x8c\x99XbML\x13\xfes_\xe5\x9f\xff\xd3\x1a\xfcn\x8bJ\
\xb9\xc0\xcf\x96K|\xa1\xe7\x8e/\x10\t*]\xd2\x1a\xe3\xb2\xceP\xea4,mAb\x8f\
\xa8\xf2mN\xcd\xdc\x1cQ\xc3p\x13\xfba\x07\xe7\xc7\xdfC\xa7\xd3\x06b\xe6\xc1\
\xef|\xc3\xc3\xf7\xfe\x88\xd6\xcd\x13j\xbf\xf0\x18\xe5J\x19\xb7\xe7\x86\x0b\
\xd3i\rz\xce\x83O\xd6\x1cOX\xebS\xbca.\xb15\xfb\xbc\x01\x9c\xf7\x87\xb2\xc3~\
\xe7\x01J\xbd8\xfan\x8e.\xfa}\xf4\xf5gh\xfd\xe7/\xa3\xb5\xa1R)\xf3\x81\x9f\
\xfa\xc0l\xa9sk\xdc\x9b|\x8d\xbd&\x9b\xael?d\xf3\xc1\x82\x83\xfd\xce\x03\xac\
\xfb,\xfa\xfe\xb7h\x1e\xfd\xbb\xd1\xf7\x0b\x95m\xf3\xdd?\xa3\xfb?\xfe\x88\
\xbew\x9dG\xde\xf3\x1e\xca\xe5rhsY\x97\xdct+\xaf \x89\x8d18\xce\xa3X\xef\xb6\
y\xabec\xcc\x19\x9d\xce\xa7\xf9\xb5j\x85^\xf7\xd3\xa3\xeb\x16\xae\x07\x85\
\xb8\x88\xf3\xfe\x1f\'\x08n\xe2=\xf7\x02\xd5\x8f<F\xb9\\F\xa9\x80~\xdf\x9bQ\
\xe5\xcci\xc2\xbef\xf6\xc2s)F\xff\xa7 \x86\xc1y\xc8\xc1\xb2,\xe4e\x891\xa7\
\xa8\xe0\xdb\xf8\xfe\xd79:\xfaW36\x9e\x85\x04\xa5\xbc\x1fq\x97\xa0\\\xb2\x01\
\xf8\x17G]~\xeeg\x0b\x14\x1f\xb3\xa9T\xca\xa8\xbf\xd6\xe8\xef\xbd\x84\xef\
\x7f\x1d\x88(\xdb\xc3\xc9~\xd8\r\xc5\xde\xe4{H\x1d\x01\xe586p\x11\xf9\xd6\
\xcb\xc8\xcb\xa1\x94\xfc\x7fJ\xf1\x17\xdf\xfa\x0e\xea\xfa7\xd0Z\xc5\x1a\xb0\
\x16\x13\xb4\x1d\xcc\xed0\xa8.P\x9a\xff\xd0\xac\xd0\xeb\xfb\xfc\xfa\xbfm\xf3\
\xe4/\x96q\xde}\x15\xdb\x91X\xd6\x03|O\xdf\xe0O=?\x99\xb2\x1d\x17\xea\x14\
\x03\x83\xa1\xe0<\xce\x9b\xc5=\\\xba(\xc6-\xa64\x81R\x04\xbe\x87\xd6\x8b\xeb\
\x9bK\xb0P\xbc\x865H,\xd58\xfa\x0c\x85\x9fx\x07\x8e\xf3\x00\x8em\xa3\x7f\x1a\
\xbe\xe8\xf6q\xef\xb9B\xe9\x89G\xb0\xee\x97\xbc\xdd\xb2y{\xd9\xc6\x98S\xbe\
\xa7o\x00\xf0\xd2\xbc\xca\xf7\x06\x0c\xcf&\xd9Y\x0fLZ\xd9\xc4\x05\xc9\xbe\
\x10\xec1\xd8C\xef\x05h\x15nE_Fl!A\xcb\xb2\xb0\xac\x87\x91B\xa0\x86\x87\x91\
\xdeu\x89j\xed\x98r\xb9H\xadR\xc4\x94\x1cz\xeeW\xf1\x9e\xfd\x1cp\x19!\xef\
\xc3y\xf4\xfd8\xef\xbe\xca\xdb-{`\xda\xb391\x06\xce\xbe\x1f\x96a^\x0b\xf9\
\xed\x19\xce\xf6/\xb1w\xfa2g\xfb\x97\xa6\xc8\xbf\x85[/\x19\xb8G`\x0c\xfc\xad\
\xd6\xfc\xcdw\x15\xea;\xc1\xc4\x86\x90\xb5\xf2\xaa9\x8f\x16\x06DC\x82\xcd\
\xc6/\xd2\xfe\x8c\x0b\xc2Pp,\xa4\x14x\xbe\xc2\xa8\xeb\x83\xe8\xdf\xdb\x1c\
\xd6\xeb4\x9bMn\xbf\xcf\xe6\xae\xb7\xbc\x03\xf81\xc4E\x81uY \xe4[\xc2\x00\
\xbdp\x87\xc0`/\xae\xc1\\\xf81\xf6\x00\xa3O1\x9c\xf1\xb2\xd6\xfcE\x10 \xc4\
\xf7\x91\xe2\x05\x1e\xb8\xf8\n\x97^\xff.o\xbb\x0f\x9c\xfb\xa0\xab\xc3t\x9cq$\
\x13\x1f\x89R\xa9T\x90\xf7[\x80\x08c\xce\xb4\xc1\x11\xfb\x94?\xec\xe0\xbc\
\xd7\xc6\xb6\xc3\xdd\xd7\xc1\r\xcd\xebL\n\n!\xe0\xf1\x87\x15\x10v\x1f\xb7\
\x1f\xf0Y\xef\x84{\xf6\xafr\xf5\xe0]\xbcY\xbc\x8dK\xe2^\xa4\xbc\x8a1\x7f\x83\
1w\xa3\xf5ul\xfb\x1d`\x9e\x0f\x0b1g\x18\xb1\x876gh9N\xdc\x08\xe1\xc1\xdfq)8S\
\x1d\x89R\xab\xff\x130\xa0^\xd2|\xba\xdb\xe7mW%_\xf8_}\xae\xdc+\xa9V>\x84R\
\xb7\x80[\x98[\x7f;\xb3\xc5;\xean\xf0\xbc\x80Z\xbd5\xfb41\x98\xedn\x03\x89\
\xab\xc2\xbc\x86\xe5RH\xd2\xb2\x04\x82\x93\x89)&U^\xb5z\xbdN\xc1\xf9I\x84\
\x10\xf8\xdf\x0c\x05\x8591\xdc\xfc\x01\x14\n\x0fR,<\x88\xf9\xe1\x19\xc6\x9c\
\x10\xf8\xb3\xf9$\xae\x07\xe3\xcf\xba\xbd\x15\xb3\x0eOAMuK)\x93\x93\x83H\x0b\
\xda\xb6M\xf5W\xeahs\x86\x10\x10|\xfbE\xde\xe7\xd8\xf4\xbd?\xe7\xe2]\x86R\
\xe1\x11\xb41\x88{\xf6h6\xff5n\xef\xbf\xcf\x14\xb6\x1f\x91\xffi\xbc\xbdI\xf2\
\xaa\r\x8fDY9\xafZ\xbd\xfe\x8f\x91\xf7\x8d\xbd=\xd5\x8f\x16iw\xfb\x18\xf3:\
\xd5\xf2\xdf\xc3\xb2$\x81\xd2x\xfd?\xa6\xef~)q\x05\x00\xcd\xc3\xf2L:\xea\xe3\
\x96;\xb1\xd76/\x8c\x08Z\xf6\xdf\x07@iM\xeb?\xf5p\x1e\xb6)\x97\x1c0\x0e\x05\
\xe7\x80@\x85>\x81N\'}\xea\xa1Z\xb54\xf3\xd9/(\xbd\x11\x82\xe3\x05\xef\xc9\t\
\xed\xcf\xf4\xf1\xbf\xf6M\xec\xbfc\xe3=\xe7q\xf4\xbbm\xa4\xbc\x801g\xa8\xbf6\
\xf8~\x9f~F\xbbc\xf6W\xb5u\xa4\xc4\xa8\x05\x9f\xfe\xec\x178}\xd5 \xef\x95X\
\xb6\x83\x94o\xe7]\xefz\x9db\xe1A\x946\xa0\x0c\xcd_on\xe4\xa1\xb2\xc4\x88\
\xe0\xa9\tc\xac\xd5\x0f4\xc6h\xc4\xbd\x16\x7f\xaa4\x9e\xaf\x10\x02\xfe\xe7g;\
\x04\xdf\x9a\x9dgv\x1d\xe3y\xf0U\xc3P\xf3\xd5Z\x81V\x88{->\xd1l\xf1\xc1\xbfk\
\xf3\x07\xff\xe5\x0fV\xae\xe4\xf3\x7f\xe8\xf1\xc8#\xf6\xf8\x03c\xe8\xb9\xd9L\
#\xcb0"h^\x9d\xfcB\xdc-\x10\xf7\n\x04\x82/}\xd9E\xab\x17V\xae\xe4\xe3OuV\xbe\
w]D&\xfaI\xc9\xb8\x7f\xaf\x85@`\x8c\xc6s\xd3?\xe0\xba)\xe0\xb3*+\xd6d!\x84\
\xe4\xc2\xc0\xd3\xff\x8d\xaf\xb9\x89\x0b\xf3#\xda~\xb5ZLo\xb4\x8dA\xe9\tg"\
\xe4D\xdfL\xb7P\x8e]M\xec\x8b\xf0\x17\xd3Z\xf1\x9do$\x1f+\xdd\x9eO\xa3>0\xdf\
Y\x92^\xa7A\xb7\xe7\xad\xbc\x15\xef\xc0\x92|4\x92\xb8\xc0\x0f\x14\xc1\xf55\t\
\x0e[O\xff@\xe3=\x93\xaek*\xa59n\xb9\x1c6\xca@\xd8\xb5\xa29\xd1\xd6\xc5\xd1\
\xef\xf4R\xdf3EP \x06V\xe1\xaf\xff\x9f\x0e\xe6$\xfd\xce\xb1V\xdb\r\x13Xe\x98\
v\xd3\x18\xc3\xd1q\x0f\xf7\xd9\xf4\xd3\xd4\x04A)%\x02\x81\xd6j\xbe\xb9!\x01\
\x8e[.\xedN\x9fb\xc1\x1ex\x87V.\n\xa54=\xd7O\x1d\xab=\xc4\x88\xa0\x10\x121\
\x18{\xbe\xf7\xb9\xd5\x9fh\x00\xad\r=\xd7\x07\xb6\xab\x1cD\x08\x86?s\xe0\xf7\
i\xd4\x7fm\xed\x82\x85\x00\xaf\xdfM$\xe2K\xa5\xd2\xdc\xef\xb4\x86\xa4iJ\x17f\
\xc6\x13B\xa2\xb5\xc2\xf7z\xb0fV\x92\x82\x03\x05GP-W\xd7*g\x08c\xc0\xed\x1bV\
I<;1\x0fj\x15\xac\xfd0\xc5\x01\xb9,!\x04\x94K\x829\x81\x87\x0b1j\xc1\x13c\
\x08\x9e_o)$\x048\x11r\xae\xeb\'2]T*\x95\xd8\xcf\xe5\xc0\xe84\x14R\xf3\x8cN\
\x8b0"\xf8\xcd\xaf\xf5V\x9a\x16\xa2\x88.\xda\x95\x86Z\xa3\x9d\xe8\xbeB1\x9e`\
\x12\xa3\xd32\x8c\xbah\x1a\x8de\x1e\xa2](\x08\xb2qt/2:%A\xb6G\xa2D\xdeg\x99(\
a\x9d\xac\x0b\x1b\x89\xf8\xdd\t\xa3S\x9e\xd8\t\xa3\xd3\xa6\xb1q\xa3\xd3\xbaX\
tjO\x1e\xd8\xa9S{\xf2\xc2J\xde\xa5\xcc\x109\xb5\'o\xa3S*\xefRf\x88\x84\x89\
\xe4it\xda\xda\xa9=Y\x1a\x9d\xe6m\x17\xda\xf8\xa9=\xa1\xd1)\x142\xd5j\x91N\
\xb7\x9fh\xa1\xba\xa8\xbe\xd2\x13\x0e\xb5Jm\xf4\xff\xd1\'\x8fS\xd9e2\xed\xa2\
\xe7\xc2\xe8\xb4\x0e\xce\x81\xd1i}\xec\xb4\xd1)+\xec\xa4\xd1)k\xec\x8a\xd1i\
\xa7\xb6\xd7\xe5\x81L[\xd0\xb1\x0fh\xb7\x9eLp$\x8a\xa6\xf1\x1b\xddDc\xaa\xe0\
X\x1c5?\x86\x90\x17\xd0JS?\xec\xac\x98O&\x03\x94\xcb\xefM4\xc1K)\xa9\xffj2\
\xcb]\xa3Q\xc6q\x0e\xb0-I\xa1`SM\x99\xd8|k[\\\x8bE\x87J\xb9\xb0\xf0\x9c^\xc7\
\x96\x13Y\xf1V\xc1V\xf7\xf0\x1e\x1fec7]\x84s\'d\xd2*E[mA7\xe5\xfc\xe6\x07\
\x8av\xc7MU\xc7\xd6\x08\xfa\xfe\x8d\xc4v\xd3u\xb0\xb5.\xaa\x87\x81\xb69#\xd3\
\xe5RaE\x81\x97\xd5\xa1\xaaq\xe5d\xda\x82i\x04\xc0\xa62\xe8f:\x06\xfd\x00\
\xa40#\xe5z\xde\xb9\xbc\xa7\x18~\xefSn\x96U\xcfE\xe6B&z\xfal\xb3\xb9\xbd\x00\
\xa0!\xf2\xc9\x84 \xc2\xf1\xd8\xef\x1d\xaem\x9b\xd1zp\xb6}\xb0\xda\xfd\x99\
\x13\x94\x12\xca\xc5\xa1Oo}\xeb\xb5\x94P,\x08l\xcb\xa4\xf6\rB\x0e\x04K\x051\
\xb1\xc0]\xa4k\x0ea-\xc8\x193\xba\xc6\x12\x14\x1cC\xda\xe3\xb6\xb3=\x7f\xd0\
\x9a\xf4\x11\x96\xab\xc7\xf8\tNP^4M\x14\x0b\xe3\xad\x05\x8e-\xf0\xfct\xe27\
\xdb\x94G\x11r~`\x12\x91[\x86~\xc4\x00.\x04\xa4\r\x7f\xcb\xd6\x01\x1a\xa9|\
\xc1aW\xa9\xa1"\xfb\xed\xb7\x96_t\x91w\xc9\xb2\xe4\x8c4M\xb35`^}I\xb0\x11\
\xefR\xaf\xd3\x98\t\xad<n\xf58n\xb9k\xd5\x97\xc4\xbb\x94\x9f\xb2\x1d\xf1.\
\xc5\xc5\x8d\xda\x965\xf3\xd9\xaaX\xe4]\xca\x8f`F\x9b\x90\x97ak\xde\xa5M`\
\xe3\xde\xa5y\x88\x9e\x191\x846c\x05 \xc9\xde%\xcbZs\xefR\x9e\xa8\xd6Z3\xa9\
\xfe\xdc\xfefZ;7\x82\xd1\x06\xf3|\x85\x97\xc1\xa4\xbf\n\xb2M\xa0\x1a\xe9v\
\x8f\x15\xecL\xca\x0c3^F\xcaJ\xb9R\xce\x94`\xf4\xa4\xf1\x8fU\x8a4\xea\xa5\
\xb5<K\x96%\xf9\xe4oW&\xce\x91\xf1\x93F\xc7\x0e\x90i\x17\xed\xb9>\xbe\x7f\
\x03\xc79\x00B\x1fa\x96~\xc2n\xd7Km\xea\xc8|\x1e\xac\x7f\xe2\xf7S9G\x92"\x08\
\x14\xcd\xe3\xf4\x1e\xde\xcc\t\x06\xd75\x95Z\x8bv\xe7+\xa9\xbbS\x1c\x94\xd2\
\xb4;.\xe5j\xfa\x8d\x99\x90\x93\x14UJ\xd3<z:\x8f\xa2S\xe3\xdc\xf9&\xd2\xe2\r\
O\xf0\x8e\x877\r\xeexx#\xb8\xe3\xe1\xcd\x08\xe7N\xc8d\xe2\xe1\xdd\x94\xfbl\
\xe8\xe1\x9d\xb6\xe5\xcc\x83\xd2`\xd9%\x9a\xcdR\xec\xf7\x0b7gm\x1aQ\x0fo\xd6\
\x0b\xec(\xde\xf0\x1e\xdes7\x06\xd3\xe2\x8e\x877\r\xeexx\xb7\x80\xcc\t\xda\
\xf6\x1er\x90\x9c\xb1Q/\xc5^\xa3\xb5\xa1\xd3\xed\'\xee\xa6C\x83\x9c1\xa4\xde\
\xe6\x9a\xb1\xb2\r\xc5\xc2\xfe\xe8\xff\x823\xdf\\\xe1\xd8\x16\x87G\xcb\xb7\
\xff\x0c\xf7\x03\x0f\xe1z\xa7\x04Ar\xabyn\xee\xb3e(\x95\x92M\xee\xd1\xbcj\
\xc0\xa8w$E\xa6\xee\xb3F\xbd\xb4\xb0\xd5\xa2\xb0,\x89\xdbm\xa0\x94\xc6\xb6\
\xe3\xaf\x91R\xcc\xfd\xd1\xb6\xe2>[\xb4\x1f>\x0e\xc3szW\xc5v\xddg9\xc0\x98x\
\xcf\xf1v6g-\x811\x86J\xad\x85\xd6\x86F\xa3\x91\xf0\x9e\xd9\xcf\x96u\xd5L\t\
\xba\xae\x9bx\x0cz^0\nRXU\xab\xd9\x19\xf7\xd92\xbc!W\x13\x9bBn\xce\x97epS\\\
\xbb\x0e2\x1d\x83}/\xa0Pj&:\xff,\x0f\xffE\x1c2\x97\xa2a\x90\xf9\x86\xd6B\t\
\x909\xc1j\xa5\x90\xc0\xf0\x9bN\xd9.\x97\x1c\x84\x18djN\xd9\xb53%X\xab\x16i\
\x1e\xce\xc9,2\x85\xa4\xca\xf6a\xa3<\xb1\xc9\xf2\xb0\xd9\xa1\xd3M\xbe\x83;S!\
\x93&Q\\Re\xbb\\\x9e\xb4d\xa7\r\xb0\xdd\x9a&\x13U\xb6\xe7\xc1\xb6\xad\xb5#\
\x86\xb7j\xd9^W\xd9N\x82s5\xd1kmRg0\xd9\te;)V\x99;\xb7F0\xaal\xe7\x89s\xd5EW\
\xc1\x1b\x9e`\xa6\xcb\xa54\x12=\xaal\xe7\xb99+\xdbT\x0f\x1a:\xdd\x93\xd1\xb1\
\'\xc7\xc7\xc7\xf3\xaf=\xaf\xca\xb6a\x0fLhB,\x15\xedD\x13\xf5"\xf7`\x9a\xe4\
\x8dq\xc8E\x8a\xeel\xf2\xc6,\xb0\xb3\xc9\x1b\xb3z\x90\x9dM\xde\x98\x05v:yc\
\x16\xb8\x93\xbcqE\xdcI\xde\xb8\x00\x1b\xd9\x9c\x95G\xf2\xc6\x9d\xda\x9c\x15\
\x87,\x927\xee\xcc\xe6\xac\xbc\xf1#\xbd9+\xd3.\xeay\xfeh\x0cF\x91W\xf2\xc6\
\x9d\xf1.-K\xde\x98\xe7\xe6\xac\xdc\xba\xe8N\x9f\x18\xb2*\xce\xcd\x89!\xabb\
\x17\x937\xee]\xbbv\xed\xcc\xcap?\xed.A)\x15\xb6\xa0\xcar\xd3\xfb\x8e\xe1\
\xff\x03\x8a;\x05YOu>\x1e\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress03Bitmap():
return wxBitmapFromImage(getscanprogress03Image())
def getscanprogress03Image():
stream = cStringIO.StringIO(getscanprogress03Data())
return wxImageFromStream(stream)
index.append('scanprogress03')
catalog['scanprogress03'] = ImageClass()
catalog['scanprogress03'].getData = getscanprogress03Data
catalog['scanprogress03'].getImage = getscanprogress03Image
catalog['scanprogress03'].getBitmap = getscanprogress03Bitmap
#----------------------------------------------------------------------
def getscanprogress04Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x1aaIDATx\x9c\xed\x9dm\x8c+\xd7y\xdf\x7f+\xc7\xde\xb3z\xb9\xf7\\\xc7\xb6\
\xe6\xaaN4r*k\xae\xec\xdas\x9d&\xa0\xad\xa0\xa6\xf3\xa1\xa6\xd06\xa2\xda\xc2\
\xa2\x8b"b?\xb4\xa0[\xb4\xa0\xd1\x17m\xd2\x02\xe5\x87\xa2]\x17(\xbai>\x94\
\x01\n\x98\tj\x94\xee\x97\xd0F\xd10N\xd3\xd0@#Ph\x01\x8f\x13\xc3\x1a5J5\xd7P\
u\x8fT\xc5\xf7\\Y\xd2\x9e\x8dem?\x0c\xc9\x1d\x92Cr\x86<$w\x85\xfd\x03\xd2\
\xe5\xce\xcb9\xf3\x9fs\xe6\x9c\xe7<\xcfy\x9eg\xe7\x91G\x1e9\xe1\x1d\x8c\x1f\
\x03p\x1cg\xdb\xcf\xb1\x16(\xa5b\x82\x00\x9dNgt\xa2\xd1h\xe4.\xac\xd1h\x8c\
\xdd\xd7l\x1e\xf2x\xf91v\xb9\xb1\xf0\xdeb\xb18\xf3\x9c\xd6\x10\xa9\xc5\xf55\
\x1a\r<\xcf#\x0cC\x00\xca\xe52\xc0)A\x9b0\xbaO\xbf\xbb?\xf8\xeb\xda\xea\xe5\
\x19\xe8\xf5\rJ\xa7\x9f\x1f\x12\x1d\x92K\xe2\x8e\x95k\x9f|\x18\xdd\xa7V-Z-S\
\x08(\x15\x05RN\x9f[\xd4\xdb\xac\xb6`\xb3y\x98h9\xe8\xf5B:\xdd`\xe1}\xc3\xee\
4\t)\xc1s\x05B\xc4\x7f\x17<\xe8\xf6O\xcfg\xf9\x94R\t.\xf3\r\x02\x94K\xde\xe8\
w\x10DT\xeb\xadL\xf7\xcd{\t\x05\xdf\xa5\xdd\xaa\x01\xe08\x82\xc3\xc3\x06Z\
\x9b\xcc\xcfd\xb5\x8b>\xe0\x9e\x8e\xc6YZ.\x0b\xfaAD\x94\x18e\\7\xdf\x88o\x95\
\xe0.b\xf4;\xcf[^\x045kt\xc9\x80\xb5\x8c\xa2\x93h\xec\x97\xf0&\xde\xfca\xb3G\
?\x88\xd6^\xf7F\x08V+\xc5\xa9c\x8f+\xbd\x11\x82\xd6\xa7\x89\xacHv\xe7ubk\x04\
7\x85w<\xc1\x8d|\x83\xbf\xf5{\x01\xd7\xae\xb9\xa7\x07\x8c\xa1\xdb\xb33\x8d,\
\xc2F\x08~\xe1\x8b\xedMT\x93\x8a\xb5uQ\xc7I\x11\x1c\xb7P\x96U\x82a\x14\x8d~W\
*\x05\xa4\\}\xa4,~\xca\x1b\x93^\xf4\xad|\x93\xbeUYT$\xf8\xb8\x8e\xa4\xdb\xae\
\xd3\xe9\x06\x183_\xaa\x99\xb5\x1e\x14"\x16\xb6\x87\xd0\x1a\xaa\x7f\xab>\xb3\
\x9c\xb4\xe7\xb6\xfa\r\x1a\x03Ah\xf0\xbd\xf8\xa1\x1cGZ]:\xf5\xc3\xe3\xdc\xf7\
X\xff\x06\x830&i\x1b\xfd\xc0\xa0T~\xf5\xd1ZF\xd1 \x8402\xf4\xbam\\\xd7\x19\
\xeb\xbai\x98\xab\xb20;(u\xc2\x82^>\x13k\x9b&\x8c\x81n/\x04\xa6\xd5\x08\x93\
\x90Nq\xce\xd9\xd5\x94~\x17\x92L\x1e\xc8\x81\xeed\xd8%\xab\xe5\x83\xd4\xeb\
\xb4\xd6\xd4\x7f\xb9C\xef\xe9\x0c\xad+\xa1\xe8\xc7e\xea\x81\xf2)Ow\xb5\xda\
\x82\xae\xcb\xc2\xef\r@JI\xed\x17\x0b\x99\xca,x1I!\xc0\x91\xe0\xb9\xf9\x9ei#\
\xa2Z\x1a\n\x05\x8fr\xc9\'R\x9aY\x82\x8a\x94\xb1\x1ef\x15l\x8d \xc0\xe1Ae\
\xeduX#\xd8h4\xa8\xd7\x8a\xf8^\xc9V\x91\xe90\xa7\xf5e\x81\xd5\x16\x9c7\x9f\
\xa5\xa1\xd7\x0b\xd1\xda\xe0\xfb\xde\xe2\x8b\x01\xa5!\x8cN\xff\x9e\xa7\xba\
\x1fbk]4\x0co\x8e\xf4\xa6\xcb\xca\xbeClTu\x9f\x15Z\xdf\xb6R\xce\xa2\x97c\x95\
`\xaf\xd7\xb3Y\xdcBl\\u\xbfhY4~\xed\xea\xf5e\x81\xd5o\xb0\xd5\xee\xe38\x12)\
\xe6\xaf\xc0\x8f1\xfc\xfb/\xf7lV=\x13\xd6\xd7\x83\x8d\x83\xae\xcd"W\xc6\x85\
\xb0\x9d\x07\x9e{\x95V\xf3\xc9\x85J\xa2<\xc2\xb6\xef9\x1c4\x9e@\xc8=\xb4\xd2\
\xd4\xf6\xdb\xb9\x8c1V[\xb0Tz(\x93\x06,\x8f\xb0]\xaf\x97\xf0\xbc\xab\xb8\x8e\
\xc4\xf7]*e?\xd73\x9d\ta{\x16<WR,d\x93rf\xe1\x1d/l\x9f\xbbA&\xafnf\xab-8\x14\
\xb6\xb3"\x8c\x14\xadv/W\x1dgB\xd8^\'\xce\xbd\xb0\xbd\x08Ve\xd1\x8c\xcb:k\
\xf5e)\xc7j\x0b\xe6\x19\x00\x96U\xe4\xe6\x85\xd5o0\x8c@\n3\xd2\xac\x05A\xba\
\xa4rn\x85m\x80~\x82S\xa3\xb1=\xc3\xe7\x10\xd6\t\xba\xee\x0eR\xc4\xea\xf6z\
\xad\x98z\x8d\xd6\x86v\xa7\x9f\xb9\x9b\x0e\xcd\x83\xc60s\xc7\xe1,X\x16\xb6\
\xa1\xe0\xef\x8e\xfe\x9e\xa7a\xf3\\\x87\xfd\x83\xce\xcc\xf3\xa7e02\xc7\x01\
\xf4\x82c\xa2(\xbb\xbd\xc2\xea \x93E\xab=D\xb1\x98m\xc8M\x1a@\x81Q\xef\xc8\
\x8a\xadM\xf4\x8e#\xe9u\xea(\xa5q\xdd\xf4k\xa4\x14\xb9^Z\x1a\xb6*\xaa\xb9\
\xae\x93{\xf7`^\x9c+a\xdb\x18P)\xfb\xb7\xe7ak\xaa{c\x0c\xe5j\x13\xad\r\xf5\
\xfa\xec\x8d\x05\xe3\xf7\x8c\xd7\x97\x05[S\xdd\x07AD\x18\xc6\xcd\xb1\xacT\
\x93Eu\x7f\xae\xba\xe8,\x9cI\xd5\xbd-lTu\x7fp\xd0\xca|m\xaf\x1f\xad\\_\x96\
\xefp\xe7\x91G\x1e9q\x1cg\xcc\xf3e\x15\xc4s\xd7\xe2\xc9k\x95}\xd8YP.\x97\xc7\
]{l!VAlh-\x94\x01k\x99\xe8\x1dGR\xaf\x16)\x16\xbd\x95w\x1dF\x91\xa2\xd5\xee\
\xd3j\xf7\x17_\x9c\x02\xeb\x04=\xcf\xa1\xdd\xac"\xd3\xfcp\x96\x80\xeb:4\xf6\
\xcb\x14\x8b\x1e\xd5Z+\xf7\xfd\xd6\t6\x0f*c\xe4\xe6)v\x17\xc1I|\xcf\xc5\x82G\
\xbdV\xe4\xb0\xd9\xcbU\x86U\x82\xa5\xe2\xe9\xde\xce\xa1\xa42\x9c\xcc\x97\xc5\
\x97\x1ae\x9e(\xc7j\xfej\xa5\x90\x9b\xa0\xd5i\xc2\xf7\xdd\xd1\xefv\xa7\xbf29\
\x80\x7f~\xd0\x19\x19V\xa5\x94x\xee\xd5\\\xf7[%\xe8$\xbaf\x10\xacN\x0e\x06{P\
\x13\x0e$B\xee\xce\xbe8\x05\x1bY.9\x8e\x9c\x1aM\x83\rx\xbd\xc0\x86\x08v\xdb\
\xf5\xa9\xfd\xdb\x87\xcdn\xee\xefi\x19lD\x16M\xdb\x9c\xeen\xc81\xfa\xdc\x0b\
\xdb\x8b`Wu\x9f0\xbe\x96\xcb\xe5\xccN\x92\x8b\xeaK\xealj\xd5\xeaBg\xe5$6\xd2\
\x82i\xfbg\xb4Y\xaf\xb0=\xc4F\x06\x99J\xb59\xa5\\\xea\xf5\x17o@\xb0\x81\xf5\
\x11\x14;\xa3\x9fA\xa8\x08,L\xfa\xcb\xc0\xaeu)\xf1\xdb\x95\xf6Bd$w\xfd\xe6]\
\x88Y%\x98T\xe9\xb9\xae\xa0^+\xae\xa4\xb8\x15\x02\x8a\x85w\x8d\x1d\xd3\xdb\
\xb4MD*~\x80\xa1\xc4V\xaf\x95\xa8\xd7\xec\xed\x00\x0e\xa3\xfc\x0bi\xeb\xa3h7\
\xe7\xb6\xff\xac\xd0\x1a\xfaK\xf8TZ\x1fd\x8c\x81N\xcf\x0c\xcc\xd9z\xca\xbd|\
\x99\xf2"e\x96"\x07\xeb\x8aFb\xe2\xb7\xddh\x1cf\xba\xfe\\\xec\x17]\xe7C\xaeR\
\x9f\xd5\x16\x1cV*\x04\xa0\x03J%\x7fe/P\xad\xe3\xc1%\xb9\xdb~\xb2\xbe\xe1\
\xef\x8d\xec\xba\x8f}\x8d\x0cRf\xdbM\x98\xa5\xbc\x82/p\x1d3\x16j%\x89y\xaa{\
\xeb\x04}W#\xe5\xe9\xc0\x92E\xe9\xe4\xcc\xd0\xc0%\xe7P\xc7\x11\xf8\x9ear\xe3\
\xc6F\x03\xe6\xf4{m\xaa\xe5*\x90O\xe94\xef!\x0b\xfe\xa9\x19\xdbs\xc5\x98w\
\xe9\xc6w\xdd/\xabt\x9aW\x9f\x10\x10\xf4\x1a\x08\x11\x9b\xb3\xdb\xad_#\x8cnf\
~\xa6\x0b\xa5\x93\r\\(\x9d\xd6\x88\x0b\xa5\xd3y\xc7\x05A\x1b\xb8P:\xad\x11k#\
8\x14\xb3<\xcfCH\x89t\xe2\xcdw\xae\xebr|l\xf8\x0b\xa5*\xaf\x99\x1f\x11~\xf7;\
\xf0\xa7G(\xa5\x08\xc3\x10\xadu.7\xbdE\xb0NP\x88\x0f\x80\xf8\x04\xb5/\x16\
\xa8\xd4~\x08\xdc\x83RG\x98\x13\xd0\xc7\xc7`\xe0\x84c^1o\x03\xe0>\xfc!\x10wP\
\xb8|\t\xb9\xb7\x87\x10 \xd0\xfc\x9f(\xe4\xeb\x9d\x0eQ\x14\x8dIHy\xd5\x05V\
\x08V\xabU\x8a\xc5\x12\xe2\xf2\xfb\xe8\x04\xefBk\x85\xd6G\x08\xb1\x8717\x11b\
/~6\x06\xe1R\x06\xe4\xcc\x8f\x06\xaa\xc57N\x88n\xbf\x8c9!V\x9b\t\x00\x87by\
\x1f\xdfw\x10\x0e\xdc\xc9\xab\xbc\xa9\xff\x17aZ\xb0Q\xdb\x04\x85\x108\x8e\
\xc3c\x8f\x95y\xf4\xf1*J\x1b\xa2H\x83y+\xfe\x0fF\xa4\x1a\xf5G3\x97\xbb\xffo\
\xba\xf0\xee\xc1\x1fo\x19\x94\xd6t\xba\x9a\x0e \xef\x11\x14>R\xe4_\x1e\x1c\
\xf2\xef\x0e\x0f\x89\x12\xd1\x87\xe6!\xb7\xb0\xed8\x0e\xae\xebbpP\xda\x10\
\x84*~\xe9\x02\xea+\x06\xc79\xf8\x87\xd3\x1a\xb8\xfa`W\xb0\xf9\xa1\xa1\xdb\
\x0f\x01\x87\xfd\x836\xbe{\'O\xf7\xff;\xaf%\xf4\x88+E\x04\x12\xe2N<\xef\xc3 \
\x1c"\r\xe6\xe8\x16\xe68\xeer\xfb\x96\x03\xa6&q\xb8\x1f\xc7\x1e\xad7:\x88A4\
\xbd~\x10\xd1\x0f\xa0\xe0\x7f\x9cb\xf1\n\xdf\xe8~\x03c\xdeL\xbd?\x13A\xc7qp\
\x1f\xf4Q?\x00\xa3\xc1\x1c\x1dQ\xfb\xfc\'-Q\xc8\x86\xc3\xc6i\x90\xd5Z#n\xd5~\
\xf02A\xf02\xc5O\xffE\xe4N\xfa\xb7\xb9\x90`\xe1\xe7\x8a\x88\xbb\xaf\x12E\xb7\
0\xe6\x88je\xb3\xc4\xd2\xd0\x1c\x90\xad5\xda\x18\xa0\xfb\xcd\x10G\xee\xd2j\
\xb5\xa9V\xc7]\xf6fJ2\x97\xa4\xe4\xaf\x94+\xe8\x1fJ\x82g_\xc2p6\xc8%\xd1lT\
\x88\x87\\\x83\xd2\xc7\xf4C\xe8\xfen\x9fB\xe1T\x1f\x94J\xf0\x92\x94|\xe2\x13\
\x05\xfe0\xd2\xe8\xdb\x86\xda\xe7?I\xb5|\xb6\xc8\r\xd1l\x94\x07D\xe1\rch\xff\
ND\xad~\xaa\x8f\x9d"X\xa9T\xf8\xf9R\x19\xf5F\xfcAWs\xfa\xcc\xda\x801&\xb74\
\xd3l\x94\xb9K\xc4\xad\x99\x8c[:F\xd0\xf3<*\xd5:A\xa00\xc7\x86Ji\xf3\xe4 \
\x9eg\x8f\x8cA\xe5\xdc\x81~\xd8(#\x84\x183\xb1\x8d\x08\x16\x8bEZ\xad\xff\x0c\
\x08\xc4e\xb15rC\\\x91\x92]!\x88\xa2\x88[9lf\x87\xfbe\x92\x14G\x04k\xb5:a\
\xf4f\xac\xee\xfb\xccj\x9e\xcf\xb6pEJ.K\xc9m\xad\x89\xa2(s\xb7m\x1eTG\xbfG\
\x04\x85t\x91RP\xder\xcbM\xe2\x8a\x94\xa3\x1d\x87J\xa9\\\xad\t\t\x82\xe6\x04\
*e;\xeav\xdbp\x1cgDr\xd8\x9aY1"Xy\xf4l\xb5\xdc$&SFd\xed\xb2\xe7J\'\xe3Nxqe\
\xe9\xb2\xe7\x8a L\xb7\xe4m\xad\xe7\x92\xdc\xaa\xf7Y\x16<\xf5\xd4>\xcf<\xd3G\
\xca+\x08\xb1\x8b\xe38T>_\xc1\xb9\xf7\x94\xe8m\xad\xd9\x13\xe9\xee\x0cg\x9e\
\xe0\x8d\x1b\x11\xfd\xfe\xb8a\xb0\xd5j\x11\x04\xe3F{\xa5\xd4T\x17\x86s@p\xd8\
*\xa5\xd2\xa3\x14\x8b\x9f\xe1\xf6\xed\xefs\xeb\x96\xa6\xd3\xf9:\xe5\xf2/\x8c\
]\xdb\xeb\xf5(\x95\xc6\x17\xcdc\x04\x9f\xee\xcf0\xa1n\x11C\x82\xd7\xaf\x7f\
\x9cz\xfd\x1f\x8c\x9dSJ\x8d\x8d\xa4\xae\xeb\xc6\xa2\x9aI\x91d\x00\x1e\xf6\
\xce\x86\x04\x93\x867\xdf\x9c\x9e\x12\xd2|3&\xa7\x8eQ\x0bf\xf17\xda\x06\x86\
\xcfu|<MP\x08\x11\xeb\x87\x06\xa4\xbe\xf4\xa5\xe9x\xa6g~\x9a\xd8\xdd]\xfc\
\xe2\x85\x10t:\x1d\xbe\xfc\xe5\x16B\x08\x9a\xcd\xd6\xe8\xdc\x88`\xda\x08t\
\x960Oj\xe9?\xd3\xa7\xd1h\xa0\x94\xe2\xcf\xffL\x91d\xa6\x87\x04\xc1\x07\xd6\
\xf9|kC\x14ET\x9f\xac\x12E\x11\xde\xb5\xeb\xfc\xf5\xbf6\x9e\x01hDP\xca\xcb\
\x1b\x7f\xb8,\xb8\xf3\xce\xb89\xd2ZP)E\xb9\\&\x8a"\x1c\xc7\xe1\xf1\xf2g\x11\
\x13\xc6\xd6\xd1 s\xef\xbd\xceT\x01g\x19Cra\x18"\xf6$O<Q\xe1\xfe\x87<`\x06\
\xc1+W\xece\x17X\x07\x92-\xa8\x94\xa2Z\xad\x12\x04\x01B\x08\xaaOV\xb8~\xfdt\
\xa9\x97\xaa\xb2\xb8d\xc9\xdfo\xddPJQ\xaf\xd7G\xa1>\xcb\x9f\xfb<\x85Bq\xe6\
\xf5\xa3\x16|\xef\x19%8\x9c&\x82 \xe0\xa9\xa7\xf6\xb9}[\x8f\xfc\x8d\xcb\x7f\
\xb5B\xe9\xe7?;uO\xb2\x93\x9eyYt\x88(\x8a\xf8\x95_\x89\xf5\x9dB\x08\x8a\xc5\
\xcfP\xfe\x85\xf4\x9ciI\x8c\x08*\xf5\nA\x10`\x8e\r\xe6(\xee\xc5\x93#\xd7\xbc\
\xb9\xc8\xa6Uv(\xbd\x18c\xa6V\rB\x08>\xf6\xd3\x05*\x95\'3\x955"\xb8\xbf\xffO\
\xce\x9c\xb86\xf9\xd2\x84\x10\xf8\xbeO\xedo\xd72\x971W\x16]\x86\xf0\xf8=\xab\
mP\x1fn\xc0\x1b\xda\x1f\\\xd7\xa3V\xaf\xe7r\x9e\x18\x11L\xcao\xab\xc0\xf7c\
\xf5c*318.R\xceg\t.pS\x13>\xb7\x01\x13\xf6<\x84a\xbci`f\xeb\x0f\x8f\'\xcf\
\x1b\xb3ps\x811\'\xb9\xc9\xc1\x9a\xdc\n\x82 \xc2\xbd\xdfA^\xc9\x16\xf6a^\xeb\
\x19cPJ\xc7{\x00\xd2\xce3)\xbb\x8ccmn\x05\xcb\xbc\xed\xb12\x12\xbf\xe7\x11\
\x18\x9e\x9b\x18\xefG\xbf\xce\xd4<8l\x8dE\xad\x92\x86t\xa2k \xe88r\xe1x\x11\
\xc7f\x9a\xeerb\xe2\xdf!\xae:\x82\x13b]\xcb\xacx\xa4\xb3^\x8aU\x82\x8e#\xf1\
\xbcl\xab\x12yY\x8cu\xe3Y\xad\xe7=\xe4\xe0\\=\x15#\xc3P-|9\xa9\xc2\xb6\r\xe4\
\x996\xe5\x95\xf1\x8bg\xb5\x9e3y\xdd\xa2\xde1Q\xca\xd6\xbeA!\x04\x85\x82;G\
\xc4\x13HA\xbe\xb7\xc6t\x0bnu\x90\x113\xd4\xed0{\xd0H\xbdp\xceEgR\xab6\x8b\
\xdc\xc9\xd1x\xfa\xdb,\x8d\xbb\xd5\x16\xec\x07\xd1\x0c\t&\xbdY\x92\x97\n1\
\xb8b\xe2\xb2\xb5O\x13Y\xa1\xb5\xc6\xcc\x0cA=\xbfc\x8a\x85\xfdw\x86\xea\xfe<\
`Hn\xbe\xe8z\xdaw\xad\x13\\\xe7\x8ar\x11\xb9\xb4\xc3V\tjm\xa6&jA:\xe9\xdc\
\xb9\xb23\xb4\\\x9a\xb7\xbd\xd5oPkC\xbf\x1f2M))\xa7\x0c\x8e,\xc1/\xfb-k\x9c\
\xe8M\xda\xd0\xc6\xacc\x19\x91yR\xcc2\xd1\xe7{Uc\x90R\xe0{\xce\xc25\xe0\xd1\
\x11|\xfb\xb9(S7\x1d\x96\t\xf1\xa27\x08_\x9e\x96~D\xe2\xb1\xcdx\xff\x99\xfe\
\x06\x17\xd59Kh${\\\xc3\x9d=2\x87\xbe\xf5\xdc{\x11r7\x96z\xe4\xde\xdclv#\xc9\
.q\xc9\xec\x16L\xb6dZ\xabN\x7fV\x99!\x88\x83\xc29\x8e\x9c\xabn\x94R \xe5\xde\
\xa8\x8e\x0c\x96\xc2\xc1\xffS\x06\x19\xe7\x83\x0e\xeaE5\xbe^I\xf6\xfd\xc9\
\xa11\xed\xb9v\xb2O\x12\x06F]/\xed\\\xb2\xfa\xc9\xe3\xa97\x8c]\x9c2\x0f\xfa\
\x1f\xf5OMO\x93%\xcd"4,xX\xde\xc92\xe3\\\xfa\xb9\xbc\xf3\xe9\xf8\xe0\x96\xd2\
\x82\xc6\x1c\xf3\xe9B\x81\xafu\xbb\xf1\xf6\xfd\xb4VK{\xc2\x04I\x91\xc1\xdc\
\x9c\x84J\x86\xea4\xa7\xf1g\xcc\xc9\x0eb\xe7\x04s\xb23v\xbd963\x95O1\x07@\
\x98t\xdbD/x\x96R\xc1\xe7g}\x9f?\x18:\xab\xcf\xd0\x03\x8c5\xe8\x92\xa2\x8b6\
\x86`\x18\xde\xc0\x9e\xd6\x1f\xcc\x8c\x15}\xbd\xf67\xe8\x05!\xf79.\x1f\xf3\
\xbd\xd3ZS*\xb7\xf1<cc\x8bE\xf9\xce0\xae\xf2\x1f\x11\xd4\xea%\x1a_\xfc\x02\
\xbdoE|\xc8\xf5\xf8s~af\xe5s\x9f\'g,\xfa\xd1SY\xc0\x90\\r\xff\xcc\xd8<\x18>\
\xfb-\xfe\xd1\xdf\xaf\xd2\xed\x05|\xc4s\xf9l\xa98\xb8k\xfc!f.r\x8e\r\x98\x9d\
\x19g\xd7\x0bc\xe2o\xcf{\xd0\xa3P8u\x81\x98\x9a\x07_|> \x0c~\x9f\xe6\xad#\
\xaa\x8f_\xa7T.\xd1\xeb\xf6\xe2f\x9f5~\x0f\xa0\x94F\xec\n\x06\x8eg\x89\xdaO&\
\xfe\xdcA\xbdl\xcf\xc5\xd5\x00\xdeG\xe3\r\xbd\xee\x07\xaf\xa2\xd4\xcb\xa3s3d\
\xd1\x1f\xa0o|\x93\xe6\x7f\xfc>\xe5\xbf\xfc\x08\xe5r\x89g_x\x81o?\xf3\xed\
\xf1R\'\x88\x1a \xbc\xa1\xa6\x0fN\xc2\xe27\xe7~\xc8\xe5\xd2%\xc1{\xe5U\x8c9\
\xa1\x1f\xbe@\xab\xf9\xab\xa3*\xe6.\x97\xcc\xab\x7fH\xe7\xbf\xfc>\xfd\xe0\
\x06\xd7\x1ex\x80R\xa9\x14\x8bb\x8b\xbe\x99\xe4t\x94\x85\xcc\x12\x84\x8d1x\
\xde\xc38?\xe9\xf2^\xc7\xc5\x98\x13\xda\xed\xaf\xf0w*e\xba\x9d\xaf\x8c\xae\
\x9b\xbb\x9a\x10\xe2N\xbc\x8f\xfe\x14Qt\x8b\xe0\xd9\x97\xa8<z\x9dR\xa9\x84R\
\x11\xfd~0%\xcaI)\xf8\xf8C.{\x93]t\xf2\xe1\x18(p\x87;u\xe7\xbc0)\xc5H\x99l\
\x0c\x04a\x84{\x9f\x8b\xe38\xc8K\x12c\x8eQ\xd1\xf7\x08\xc3\xefrp\xf0Ogo\xc6K\
/\xfc\x03\x88;\x04\xa5\xa2\x0b\xc0?>\xe8\xf0\x97>\xebS\xb8\xeeR.\x97P\xafh\
\xf4\xf7_#\x0c\xbf;z\x98E\xe4\x00\x848\xc1u%*X\xfc\x1d\xba\xae\x83\x1cx\xb3\
\xc4q\xd6\n\x88A\n\xce\xff\xab\x14\x7f\xf4\xc2\x8b\xa8\x1b\xcf\xa3\xb5J\x95k\
\xe7\x13t=\xcc\x9b\x06c\x8e\x89\x94\xe6W\x1be\xba\xfd\x90_\xfa\x17-\x9e\xfc\
\\\t\xef\'\xaf\xe0z\x12\xc7\xb9\x97\xef\xeb\x9b\xbc\xa63Z\x94\xcc\x0eR\x08\
\x1c9\x10\xb6gt\xd1=\x11\xbf\xb4\x93\x13\xc1\xaex\x17;{\xef\xc7\xe8\xe3\xd8\
\x9c\xa6\x14Q\x18\xa0\x17\xd49\x93\xa0_xd\xb4\x01\xbc~\xf0U\xfc\x0f\xff\x19<\
\xef^<\xd7E\xff\x1c|\xa3\xd7\xa7\xf7\x9e\xcb\x14?u\r\xe7\x03\x92\xfb\x1c\x97\
\xfb\\\x97\x13c\xe0H\xb3\xb3\xb3xr\x9b\x14\xb6\'?\xd9\x93\xdd\xbb`\xe7\x1ev\
\x10(\xadQ\xcfE\x18\x1d\x11Ej!\xb1\xb9\x04\x1d\xc7\xc1q\x1eD\n\x81\x1a\xe6(\
\xbb\xe3.*\xd5CJ\xa5\x02\xd5r\x01S\xf4\xe8\xf6\xbeE\xf0\xf4\xd7\x81K8\xee\
\x83\xb8\xee\x07q\x1d\x89\xd8sF\n\xa2#c\xe0\xe4\x07\x03\x06?\x02`g\xc7p\xb2{\
\x17;\xc7o\xc4$\x06\xd8\x05Nv\xee\xe1\xf6k\x06\xde#\xe06\xbca4\xff\xefU\xc5\
\xbdw=\x8f\'_%P&W\xf8\xbfT\x82\xde\xc3\xfe\x80hL\xb0Q\xff\x1c\xad\xaf\xf6@\
\x18|\xcfAJ\x11;\'\xab\x1b\x03\xdf\xd97\x89B\x85\x04\xd4\xf1\xc3\xa8\xa3+\
\xc0\xfb\x11w\n\x9cK\x02!\xefA\n\x01W\xe2\xf2\xc5`T2{\xefg\x070\xfa\x18cN\
\xd0\xfa\x15"\xf5\'p\xf2:\x1c\xbf\xca\x95{\xde\xe6\xe4\x87\xff\x1b\x01\xdc~\
\x1d\x9e\x9b\xc8\x85\xbd\x14\xc1r\xb9\x8c\xfc\x80\x03\x08\xfaA\x84\xd2\x06O\
\xecR\xfa\xb4\x87\xf7\x90\x8b\xeb\xc6\xde\xd7\xd1M\xcd\xdb\x8c{@\xd7kE\xea5\
\x813\xd8\xb8h\x0c\x1c\x1d\xbd\x0f\xa5\xdf\xcd\rs\x99##@\xbc\x0b\xb8\x07\x8c\
&\x08o\x10}\xefE\xfc\x0f\xff\x14\xc6\xbc2\xb8\'\xdd\xd9x\x12K\xc7U\xab\xd6\
\xfe\x1e\x18P\xafi\xbe\xd2\xe9\xf3\xe3W$\xbf\xfd\xdf\xfa\\\xbe[R)\x7f\x12\
\xa5n\x03\xb71\xb7\xffd\xcc\xc5\x1b\xa6S\xa2\xc4Q\r^\xe5\xca\x1e\xc0x\x1c\
\xa6~?\xa4\xb1\xdf\xca\xf5\xb0\x93\xc8\x9d\x12\xa5V\xab\xe1{\x1fC\x08A\xf8\
\xc7\xf1\x03\x99#\xc3\xad\xd7\xc1\xf7\xef\xa7\xe0\xdf\x8f\xf9\xd3\x13\x8c9"\
\n\x97\x0c6\xb8\x06dJ\x89\xe2\xba.\x95\xbfYC\x9b\x13\x10\x10}\xefe>\xe2\xb9(\
\xfd\x1aw\xdea(\xfa\xd7\xd0\xc6 \xde\xb33 \xf7\x83\xa9\xc2\xb6\x91\x9e/s\\\
\xb5Z\xed\xef"\xdfw\xaa\x04\xaa<V\xa0\xd5\xe9c\xcc\xdbTJ?\x83\xe3H"\xa5\xd1\
\xfa{(\x15\xa5\x16V\xad5\xd8\xaf\x976\x96\x0b;WJ\x94r%\x0e\x8c\x11)M\xf3?t\
\xf1\x1et)~\xda\x03\x03\xbew\x15m\x8e0\x06\xea\xb5\xca\x94\xab\xcdY\xc4TJ\
\x14}tD\xeb\xab}\x9c\x1f\x7f7\xee\x9fu\t\x9e\r\xe8\xff\xcf>\x07\xff\xec\x0b\
\x18s\x82z\xc5\xa0^\n2\x93s\x1cI\xb1\xe0Z\xc8\x18\xa23\xc7\xebN\xc3\x88\xe0o\
~\xed\xb79~\xcb \xef\x968\xae\x87\x94\xf7\xf1\x13?\xf16\x05\xff\xfeX9\xa4\
\x0c\x8d_jd*t\xbf^\xa2f1\xbe\xc5~\xbdD\xbd\xd1\xa6\xbfD\xa6\x9f\xd1 sl4\xe6u\
\x8dR\x11Q\xd8\xc7\xbc\xfe&\x7f\xf0Gq\xf8h\xad5\xff\xf5km\xa2\x17\xa6G\xa9\
\xb4\x87\xb1I\x0e\xe2\xde\xd0:\xacf\xde\xa2\x92\xc4\xe9<\xf8\xd6\xa9^Bk\x05Z\
!\xeevx\xaa\xd1\xe4g\x7f\xda\xe57\xfe\xd3odz\x90$\xb9^/\\\xa9{\xb9\xae\xa4Z)\
\x07\x8e\xca\x8dz\x89J\xce\xbc/\xa7z\xd1\xb7\xc6O\x88\x1f\x13\x88\xbb\x05\
\x02\xc1\xef\xfcn\x0f\xad^ZXX\xb9\xe4\x8d~\x07Ad%\x91p\xbf\x1f\xd1n\xd5\x808\
A\xb8\x94"\x97m11\xd1\x8f\xdf\xb4{\xb7\x83@`\x8c&\xe8\xb53\x15\xf6@\xc2\xa0\
\xb2J\xcb%\xd1\x0f\xe2\xd5\xc3\x10y\xf3\x15\xa6\xaa,\x84\x90\xec\r\x96\x03\
\xcf\x7f\xa7\x97\xb9\xb0\xdd\xc4b\'\xb7\x05w\x0eV\xc9\xb2\x95\xba\x9a\xd8\
\x1dL\xd4Z+^|~\xf5\x96h\xec\x97\xa6\xc2Q\x1f6{cA5\xd6\x85)\x82\xc3\xd6\xd3\
\xafk\x82of\xeb\x9a\x8bP\xad\x14\xa7\x8e=\xae\xf4F\x08NtQ\x81\x18h\x85\xbf\
\xfb?\xda\x98\xa3\xf5\x85\xe7\xdb\xb5\xa9;\x9c\x831\x82RJ\x04\x02\xadUv\xfd\
\xca\x19\xc7i\xb0\x0e!G\xda\xaa0\xf8\xfa\xd6\x1e\xc86\xa6\xfc&\xa2\xb0\x9fi\
\xce\xcb\x83\xdf\xfa\xbd\x80k\xd7\xdc\xd3\x03\xc6\xd0\xedmf=\x99 (\xd1Z\x11\
\x06]\xeb\x95|\xe1\x8bv\x06\xabe06\x8a\xea\xc1:\xcfF\xd6\x9ej\xc5^\xd6\x1e/\
\x91\x0b{\xe9\xac=G\xc6\x10=\xb7\xda:/Y\xb1\xe7\x89\x95\xe3\xdc\x038\xce\x0e\
I\xcf\xbf\xbc\xf2\xc3\xa8\x05\xff\xf8;\xdd\x95\xa7\x85H\xc1\xc0l\x8a\x10qt\
\xe6N7X\xe8\x99\xe6{\xe9\xc7\x85\x00\xcf=\x8d\xab\xadu~U\xc7\x88\xa0\r\x89%6\
\x8e\x98Qz\xf5\xc9\xd5\xc5\xaa\xe8\x87\xc7\xb9\xef\xb1\xbe\x9d2\x08\x19\x8bG\
o\x0b\xfd\xc0\xa0T~\xf3\xf8Zv\xfc\x06a\x9c#\xa2\xd7m\xe3\xba\xce\xc2\xbd\xd5\
\xf3RLk\xb3\x83R\'Kk\xe1\xd6\xb6\xa5\xd9\x18\xe8\xf6B`\xb1\x16@:\xc59gW\xcb\
\xdf\xb4\xd6 \xc6\x95\xb2\x9fI\xe94k\x90\x81x`\xc9\x19\xf5v\x0ck!\xe8{\xe0{\
\x82J\xa9b\xa5<c\xa0\xd773\xe7\xbfy\xb0>\xc8\x14\x06\xe4lB\x08(\x15\x05\xcbx\
\xc2[\xcd\xda\xe38\x92jw\x7ft,\xab\xd2\xa9\\Nw\x17\x972\xce\x122\x1c\xa4\n\
\x1e\xa3\xbc/[\xc9\xdasxP\x1f\xfd\x0e\x82\x88\x83f?\xd5 2\t\xbf0\xc3\x1f^\
\x81R\x86Rq8\xaf\n\x04G\x18\xe2\xcdF\xb9\xadK\xab"\xd9\x85:\xdd \x13\xb9EPz<\
\xa1\x9b\x94\xd3;\xa92Y\x97l \xf9\xe5\xd9T:\xcd\x13\xf56\x9a\xb5\'\x08B\\w:F\
\xe2\xba\x94N\x1b\xcf\xda3\x0b\x8b\x94N\x0b\x97K\xcd*\xce \x19@\xb3\xd5\xca\
\x95/fk\xbeK[Q:\xbd\x13\xf1\x8e\'\xb8\x11\xff\xc13\xa1tZ\'\xb6\xa9tZ[\x17]\
\xd5tm\xab,\xab\x04\xc3Dp\xe1J\xa5`E\xe9T\xfc\x947f2\xd3\xb7\xf2-)\xacv\xd1N\
7\xa4^3qPEGfV:\xcd\xc2UG\xf2X",v\x18)\xa2\x1b[$\xa8\x94\xe6\xb0\xd9c\xbf^\
\x02\xec+\x9d\x0e\xfeu~\xa5\xb4\xf5A\xa6\xd9\xea\xc5\xe9Q,&\x186\xc6pp\xd8\
\xa5\xf7t~\xe1}-\xa3\xe8a\xb3G\xab\xdd\xa7\xe0\xbb\x99\x94N\xf3\xa0\x94\xa6\
\xdb\x0b\x97\x16\xde\xd76Mhm2+\x9d\xd6\x89\x0bI&\x0f<\xf7*\xad\xe6\x93\x0b\
\xe7-\xad5\xf5_\xeed\xfa\xa6|\xcf\xe1\xa0\xf1\x04B\xee\xa1\x95\xa6\xb6\xdf\
\xce\xb5)\xc1n\x92S\x8f\xb9>\xb6CH)\xa9\xfdbaDp^}\xa5B\xa2LG\xd2<\xacO\xa5\
\xaa\x1db\xa5\xbcK\xb6Q(x\x94K>\x91\xd2\xccjp)\xb3\xbd\xb0y\xd8j4\x92\xc3\
\x03;z\xd3y8\x7f\x83\xcc\xb2\xe6\xb3m\xa07\x98\xdf\xfcy\xba\xfb\x04\x94\r\
\xb7\x82M!\x0co\x8e6\xeb\xad3\xd5\xfb\xd6\xba\xa8\x1ez\xd4\xac\x19\xe7\xef\
\x1b\xcc\t\xab\x04\xb7\xe1V\xb0\x08V\xbf\xc10\x02)\xccH\xb8\x0ef\xcc\xc8\xb6\
\xdc\n\xb2\xc0\xaaui\x93\xd8\x8auiX\xa9\x10\x80\x0e(\x95\xfc\x95\xd5\x16Z\
\xc7\xf6\xfe\xb4\xe9!\x8bu\xc9\xfa4!%\x14}\x83\x94\x85\xc5\x17g,\xaf\xe0\x0b\
\\\xc7\x8cl\x83\x93\x98g]\xb2N\xd0w5R\x9e*\x89\xa2\x0c\x92\xbf3\xc3t\x9b\\(;\
\x8e\xc0\xf7\xcc\x94\xa0\xbdQ\xebR\xbf\xd7\xa6Z\xae\x02\xb1\x9a\xa1\\m\x12\
\x86\x8bw\x10\xcc{\xc8\x82\x1f[y!\xfe7\xb9\x07g\xe3\xd6\xa5d\xc6\xe3v\xa7\
\x9f\x89\xdc\xa2\xfa\x84\x80\xa0\xd7\x18\x85\xa4n\xb7~\x8d0\xba9\xf3\xfaIX\
\x9d\x07\x93]-\x08\xec\xec\x18\x1e\x06E\x1eB\xc8\xdd\xd9\x17\xa7`#\xb2\xa8\
\xe3\xc8\xa9U~\x1e\x1b\xdf*\xd8\x08\xc1n\xbb>5]\x1c6\xbb\x1c6{k\xaf{#\xb2h\
\xda\\\xe8N$i[\x17.\x84\xed\xf3\x8e\x8d\x10L3\xbeh\xb3>\xa7\x93$62\xc8T\xaa\
\xcd)\xaf\xb1^\x7f3\x1a\xef\xb5n\xa7\x1c"\x08cO\xd2m\xc0n\x00\xd5D\xb7\xbb\
\x9e\x90jVA\x9c%$QV\xce\x95\xb2U\x82I\'\xe2\'\xca\x05\xea\xb5\xe2J\x96%\xc7\
\x91\xfc\xdb\x7fU\x1e\xcb\x01\x13\xe6\xdc\x1dk\xb5\x8bv{!ax\x13\xcf\xbb\n\
\xc46B\x9bv\xc2N\'\xc8\xad\xea\xb0>\x8a\xd6\x9e\xfa\xf5\x95<6g!\x8a\x14\x8d\
\xc3\xfc\x16^\xeb\x04\xa3\x1b\x9ar\xb5I\xab\xfdL\xee\xee\x94\x06\xa54\xadv\
\x8fR\xa5\xb9\x94\xad\x7f-\xa3\xa8R\x9a\xc6\xc1o\xae\xa3\xe8\xdcX\xebn\xc3\
\xac\xe5\xac\xb3\xbew\xbc\xa8f\xb5\x8b\xca\xc1\xee\xf8\xe1\xd4P-\x1f\xa4^\
\x97\xc7\xc2\x1b+\xb1\xe22\xf5\xc0\xbd \xcf\xa7h\xb5\x05]7[\x8a\x84\xa1\x857\
\x0b\n^LR\x08p$xn\xbeg\xba\xb0\xf0\xae\x13\x17\x16\xde4\\Xx\xc7qa\xe1]\x176e\
\xe1\xb5j>\xab\xd7\x8a\xf8\x9e\xbd\xd5\xc3\xa2\xfa\xb2\xc0j\x17\x9d\x97.v\
\x126,\xbc\x1b7\x9f\r-\xbca\x18\xb2{\x19\x8eg\xf4B\xdb\x16\xde\x8d\x9a\xcfJ\
\x95\x86\xed"\xe7b\xa3\xe6\xb3F\xa3\x91\xc9oWkC\xbb\xd3\xcf\xdcM\x87\n9c\xc6\
\xa3-l\xdc|V\xad\x14h\xec/N\xfe\x0b\xe0\xb9\x0e\xfb\x07\x9d\x85\xf5\xed\xd7K\
\x14\xcb\xc5\xd3\xbf\x1bm\xda\x9d\xecN%\x96\x1d$\xb3\xcb\x8d\xc5b\xb6\xc9\
\xbd\x94\xd8u\x0f\xf9}(\xb66\xd1;\x8e\xa4\xd7\xa9\xcf\xd5\xdf\xb8\xae\xb3\
\xb2\x83\xc9VE5\xd7ur\xc7I\xcb\x8bs%lkmr{\x8dn\xad\x05\x87\x9b\x14\xf2\xb8\
\x0b,\xa3\x8e\xdc\x1a\xc1 \x882oRX\x05\xe7\xaa\x8b.\x83\x0b\x82y\x90\'\x82ko\
\x89h\xaf\xcb\xc0\xeeN\xa7 \xc2/62\xe5?[\x87\xfd"\r\xd6\x07\x19\x9dL\x16u\
\x06`\x9d\xe0:\x84\xedR\xd1C\x888%C\xde@\xc6V\t.+l\xcf\xc3dP\xe4\x0ba{\x02\
\xd6\x9d\xb3\xb2")l\xbb\xae\x9bzM\x9c\xb0q\xfcX\xb1X\x9c\x19\xe4\xeaL9g\xc1\
\x85\xb0=\x05c@\xe5\x94\xee\xce\x84\xb0]\xaf\xd7\x17\xdf\xc0r\x9a\xb83!l\xaf\
\xd3I\xe4\\u\xd1epA0\x0f\xf2\x0c\x00\xe7R\xd8V\x1a\xda\x9d#\x10qh\xb0\xc3\
\xc3\xc3\xd9\xd7\x9eWa\xdb\xb0\x13\xa7k\x16d\xce\x18r.\x827&\xa5\x88\xfdz\
\x89j\xb5\xb8\xd6\xe0\x8d[s\xce*x\xe3Q\x95m`\x18\xbc\xb1\xd3\x1b\xcf{\xb6q\
\xeb\x92\x10\xe3\xe4\xd6\x19\xbc1\x89\x8dY\x97\x92be\x9e\x8c!\xcb\x06o\x84\
\xc5]\xd5\xea4q#:m-[\x19C\xe6\x05o\xdc\xb8ui\xd9\x8c!YB\xff\x15\n\xcb\x85\
\xfe\xdb\x88,\xba\xcd\x8c!\xa3\xd4`\xefD\x8cR\x83\xa9\xbc\x8b\xacs\x84\xff\
\x0f\xe1\x8bw\xd3\xb8\xf4uJ\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress04Bitmap():
return wxBitmapFromImage(getscanprogress04Image())
def getscanprogress04Image():
stream = cStringIO.StringIO(getscanprogress04Data())
return wxImageFromStream(stream)
index.append('scanprogress04')
catalog['scanprogress04'] = ImageClass()
catalog['scanprogress04'].getData = getscanprogress04Data
catalog['scanprogress04'].getImage = getscanprogress04Image
catalog['scanprogress04'].getBitmap = getscanprogress04Bitmap
#----------------------------------------------------------------------
def getscanprogress05Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x18\xfbIDATx\x9c\xed]m\xac#Wy~\xee\xee\x92{6\xdc\xec\x9eMBr6\x84d\x02\t\x9d\
|(\x1d\x02\xa5\x136"C\xab\nG\x15\xc4A\x151-\x82\xa1R%\x87\xaa\x95#\xb5\xc5\
\xd0J\xb5\xaa\xfe\xb8\xa8*\x18\xfa\x03\xff\xa8\xc4\x85\n\xe1\xf0\xa3\xb8Q%L)\
e\x90(\x9d\xb6\xa8\x8cJ\x94L\xda\x00\x93h\xc9\x9eD%{\xee\xb2\xec=\x0e\xc9\
\xde\xfe\x18{<c\x8f\xed\x19\xfb\xf8\xeb\xea>\xd2\xd5\xf5\xc7\xf8\x9cy\xe6|\
\xbd\xe7\xfd8\xef\xc6\x993g\xf6q\x80q\x0c\x00\x18c\xcb\xbe\x8f\xb9\x80s\x1e\
\x12\x04\x80V\xab\x15}Q\xab\xd5\xa6*\xd0\xd0\x01C\'\x99\xaf\xafT\x9b\x08\xb8\
@\xd9\xb6S\xbf\xa7\x140\x8ddy\x9e/\xe1\xf9\xe9\xe5\xc5\xef\xbbX,\x02@\x9f\
\xe02P\xdf.\xcd\xbd\x0ee\x04k\xb5\x1a*e\x0b\x86^PUd:d\xbf\xbe,P\xda\x82\x96e\
\xe5\xba\xdeq|\x08!a\x18z\xa6\xeb\xb9\x00\xfc\xa0\xff>N\xb2V\xabA\xd7u\xf8~\
\xb2\xff.\xad\x8b\xfa\xfe9\xd8\x95\x1d\x00\xd3\x8f\xf9\x1ez\xbf\x1f$\x07\x00\
Gf*y\x06\x08\xb1\xab\xa4\x9cI\x0fG)A\xc7qT\x167\x11YZ>\xb5\x8bN\xdbe\xa4\x94\
9\xae\x9d\xbd\xbe,P:\x06w\x9a.\x18\xa3\xa0\x84\x8e\xbd\xae\x03\x89\xcf}\xdeQ\
Y\xf5H(%(%P\xdbn\xab,rf\xccm\x16e\x8c\xc22506\xbe5\'!\x08\x04Zmo\xea\xdf\xcf\
\x85`\xb5R@\xd9\xb6\x94\x96W\xa95\xe1\xbaA\xee\xdf*_&T\x93\x03\xc2\xde\xb0S\
\xb7\xa1\xeb\xf97\x05J[\x901\x9a \xe78\xfeL\xddK\xd3(\xec\x92\tJ)\x08!\xa8U\
\n(\x95wr\x95\xa1\x94`\xb1\xd0\x17\xb9</\x88$\x95Y\xe0\xba\x01\x9a;e\x00\x80\
i\xea\xa0\x94@\x88\xec\xcb\x91\xd2.z\x8b\xd6\xefB\xb3\xb4\\\x1c\xae\x17 \x08\
x\xf4^\xd3\xf2uS\xa5\x047\xd1\xdf\xbb\xe5y\xca\x93\xc0\xb9\x98\xfa\xb7\x0b\
\x11\xb6k\xd5\x02\xf4\x81\'_o8p\xbd`\xeeu/\x84\xa0]\xb2\x86>{\x88\x8b\xe5\
\x11\x9cZea\xf4_\x17\x8b\xc5\xb1\xe30\xde\x9d\'\xd5\xa7i\xfd\xd7e\xdb\xc6\
\xa8\x1e\x9bV\xce\xd2\xb6K\x8b\xc2\x81\'\xb8\x901\xf8\xb5oy\xb8\xfdv\xad\xff\
\x81\x94h;j\x96\x91IX\x08\xc1G\x1em.\xa2\x9aT\xcc\xad\x8b\xd2\xec\xea\xd1\
\x89 d\xfa\xc2\x94\x12\x8c\xcfn\xbaN@\x15\xb0dl\x034\xb6\xe3\xca+?(\xed\xa2\
\x01\x07\xcc\xeekB\x80v\xb3\x82V\xdb\x9b\xa8\xca\x18\xa55$\x04\xd0\xb5\xcd\
\xe8\xbd\x10IUG\x16(\xdf\xd1{\xbe\x8c\xd4\xf7\x83\xbb\x8bY\xe1\xfa\x9d\xdc\
\xbfQ>\x06=?$\xa9\x1a\xae\'\xc1y~C\x98R\xd5}\x1c\x94\x12\x98\x86\x06Mc\x984G\
\x8c\xd3\x88\x0b\xb9\x01\xce\xf7\x87\xba\xe6RT\xf7\x83\xaa\xf4\x80\x03\xed\
\x0c\xbaR\xca\xac1\xdf\x8en\xb5,\xaa\xfb\xb9,\x13\xe3T\xe9\x8b\xaeOi\x0bR\
\x02\x98\x86\x84\xedm\x8f\xbdN\x08\x81\xca\'Zp\xbe;\xf9\x01P\nX\x06\x01!\xe1\
\x12\xe1\xb82\x97\xd2Xi\x0b\x06\x81\x93IMH)E\xf9C\xe6\xc4\xeb\x00\xc0\xd4C\
\x92\x84\x00\x8c\x02\xba\xd6\xffn\xe1\xaa\xfb<0M\x1d\xc5\x82\x81\x80\x0b\xb4\
Z;\xa9\xd7\xe8\x1a\x85]L\x1aI\x1d\xc7A\xbd\xe1d\xae\xe7\xc0[x\xd7n\xbb\xb4TI\
&/z\x16\xde\xac\xf0\x03\x8e\x9d\xa6\x93\xab\x8e\x95\xb0\xf0\xce\x13ko\xe1\
\x9d\x84\xb5\x1b\x83y\xa1\x94\xe0\xb4\x16\xdey\xe2\xd0\xc2\x9b\x07\xabh\xe1=\
\xf0cPi\x0b\xea\xdai\xec4><Q\x1e\xcd#l\x1b:\xc3v\xeda\x10z\x1c\x82\x0b\x94\
\xab\xcd\\\xc6\x18\xb5\xaa{\x1d`l\xb2\xa2\xa9\'l\xf7\x08\x8e\xab\xaf`\xc6\
\xcad\x14\x8dz%\x93\xb7a\x0fK[\xe8\xe3\xc2\xf6\xa8\x06\xa74\xdb\x03\x1b\x87C\
a{\xe5\xb0\x8e\xc2\xf6\xb4\xee\x94Y\xb0\x12\xc2\xf6<7\xd8\x87\xc2\xf6\xbaC\
\xb1\xb0=\x9fkg\x81\xd21\xe8\x07\x00%2\xd2d{#V\xe4\xb5\x14\xb6\x17\xa1\x89\
\x9b\xa6>\xe5\xaa{M\xdb\x00%\xfbp\x1c\x07\x1ac\x088\x1f\xbaN\x08\x89f\xcb\
\xcd\xdcM{.6R&m\x90\x0b\xf7\xba\xd75\xc04B{\xde\xa4\xf8\t]c\xa8n\xb7\xc6^\
\x13\x96\x93\x8c\xa6q\xbc\x0e\x82 i\xafX\x98\xd7}\x1eK\xb3ee[\xdcu-Y(%\xe9\
\xe4FAi\x0b:\x8e\x939\xf2\x851\n\xa7U\x01\xe7"\xe1\xe8\x13\x07\xa5d\xecC[i\
\xd5=\x10z\x0e\xe6\xf5\x1e\xcc\xab\xba_\xab\x85^\x08\x99\xdb\xbfmi\xb2\xa8\
\x94\x12E\xbb\x91K\xb3=\x8d[\xe5\xd2\x08z^\x00\xdf\x1f^BTc\xad\xba\xe848$\
\x98\x07y\xe2\x1a\x9c)b \xa6\x81\xd21\xe8z\x01\x0c\xab\x96\xc9\xb7l\x16?\xec\
<P>\xc9\x84\xb3\xe2\x82\xf6B\x190\x97Y\x941\x8a\x8am\xc1\xb2t\x05\xb1K\x1c;M\
\x17;Mw\xaa\xdf+\'\xa8\xeb\x0c\xcd\x86\rJg#\xd6\x83\xa61\xd4\xaaEX\x96\x0e;g\
\xd4\x0b0\x07\x82\x8d\xedR\x82\\0\xc3Xc\x94D\xe3\xd92uT\xcaV.1\rP,\x8bj\x0c\
\xd0b\xd2\x7f\xa1T\xcf\xb4\x98\x8f\xab\xcf4d\xb4\xa3(\xdb\x85\xb1n_s\xf7\xba\
\x8f\x0f7?\x90J$\x157\xe6\xdaMH~O\xe2\xb9\xed\x07S6\xf2S\x83\xf3\xfe\xac\x9c\
\xd7\xbby!\xb2(cth6\xf5\x16\x10\xf5\x02,\x88`\xbbY\x19\xf2\xdf\xae7\xda\xb9\
\'\x8ci\xb0\x10Y4\xcd9][\xd0\x11/\x87\xc2\xf6\xbac!\x04\xd3\xfcg\x84\\Sa;\r%\
\xbb1\xa4\\r\xdc\xc5\xb8;+U\xdd\xd7k%hZ\x18D\xd8n\xf7\x95\xba\x9e\xcf\xe1)VO\
,Eu\xaf\xc7,\xb5o14x\xbeT\xe2\x98\x1ewD\x88w\xf6\x85{\xdd\xc7\xa5\x97\x87\
\x8b&\n\xd6\xe4\x98\x89q \x04\xb0\xcc\xa3\x89\xcfD\xca\xd0]\x98\xd7}\xc0C\
\xd3\xb4\xae\x9f\x06\x00T\xca\x05T\xca\xea\xcex\xf2\x83\xe1\xc9j\xa1^\xf7\
\xb5Z\r\xe5\x8f}a.\xea\x08!\x92\x82w\xaf\xbeIP\xae\xba\x0f\x9e\x15(\xda\r\
\x94\xedw\xc14o\x1e\n/\xcf\x0b\xce\x05\xda\x8e\x87\xed\xba\x93\xcb]\xb3\x87\
\xb9,\x13\x9c\x0b\xd4\xb6\xbf:\x8f\xa2s\xe3\xc0K2\x87J\xa7\xbc8T:\xe5\xc0\
\xdc\x94N\xd3\xa2`\xe9\x91\xcc)%P\xb2\xeb\x90\xa0\xa0\x94B\xd7C)G\xd34t:\x12\
\xaf\xbb\x9e\xe1\x82|\x15\xfe\x93O\x00/\xef\x81s\x0e\xdf\xf7!\x84H\xcc\x96\
\x9f\xac\x15\xf1p1\x0c\xe4\xb2K\xe6\xb2\t\x1a \xe4:\x80\xdc\x83 \xe0\xa8\xd5\
\xff\x0e\xc0U\xe0|\x0fr\x1f\x10\x9d\x0e \x81}t\xf0\xa2\xbc\x0c\x00\xd0\xeex#\
@\x8e\xc0<y\x02\xf4\xf8q\x10\x02\x10\x08\xfc(\xf0\xf1x\xab\x85O7\\<X0@\x08\t\
\x1f\x94v\x1a~pn\xb1\x04m\xdb\x86e\x15@\xd95hy\xc7 \x04\x87\x10{ \xe48\xa4<\
\x07B\x8e\x03\x00$\xbaA\xc6]r\xf2\xd5\x8d\xf0\xfd\xcf\xf7\x11\xec\xbe\x00\
\xb9\x8fP\xd8$\x00\xc0`\x15\xab\xa8\x18\x0c\x92H\\M/\xe2\x92\xf8\x1e\x08\xdd\
\x1c\xac^=AB\x08\x18cx\xf0\xc1"\x1ex\xc8\x06\x17\x12A \x00\xf9j\xf8\x07D\xa4\
\x9c\xd6\'3\x97k>P\x01\xae\xea\n\xaf\xafHp!\xd0j\x87c\x98^E`\xdei\xa1Raa\xf8\
l\x10d*37A\xc30P(\x14a\xfc\xea}\x10\x9d-x>\x0f\x1f:\x01*)!\xe5\xd5\xb2\x93\
\xb7\x8a\x04*]_\x1a\xf9\x0b\x89\xb6\xeb\x03 \xa8n7A\xc1Q\xabU\'\xeeV2\x13\
\xa4\x94\xa2\xf8\xbe\x12\n\xef\xf9-\x88\xce\x16\xb8\xd8\x83\xec\x84O\xb7\xaa\
\xf8\x98\xb18\xea\xd5\xf0(\xdbJ\xad\x05\xd2=\x83\xa6\xe7\x88P\xa9}\x0e\x14\
\xbb(\x97m\x88\xb4m\x062\x124M\x13v\xb9\x02B5\x08\t\xc8\xbd=\x94?p\xaf\x82\
\xdb\xcf\x8ez\xad\x18\xbd.\xd7\xc2Vu\xbd\x17@\x004\xbe\xec\xe0\xdf\xda;\xf8\
\xccg\xeaC\xbf\x9b(l\x9b\xf7Y [\xa7\x11\x04\xe7!\xc5\x1e\xec\xd2b\x89\xa5\
\xa1\xd1%[\xae5!\x01\xb4\xbf\xed\x83\x9d\xbe\x1f;;&l;\xe9\xe8>R\x16=A)\xdeS,\
A\xfc\x82\xc2{\xeayH\xac\x06\xb98\x1a\xb5\x12\xc2)W\x82\x8b\x0e\\\x1fh\x7f\
\xd3\x85i\xf6\x03\xa0S\t\x9e\xa0\x14\xf7\xdcc\xe2\x07\x81\x80\xd8\x95(\x7f\
\xe0^\xd8\xc5\xd5"\xd7C\xa3V\xec\x12\x05~.%\x9a\xdf\x08P\xae\xf4\xbb\xea\x10\
\xc1R\xa9\x84_+\x14\xc1\x7f\x1e\x0eh\xbbh\x0c^2wH)s\xef\xfd\x1a\xb5"^K\xc2\
\xd6\x8c{C%\x08\xea\xba\x8e\x92]\x81\xe7q\xc8\x8eD\xa9\xb0xr@\xb8\xce\xeeI\t\
\x9e\xd3DU\xaf\x15A\x08I(\xa6"\x82\x96eag\xe7+\x00\x08\xc8I\xb24r=\x9c\xa2\
\x14\x9b\x84 \x08\x02\x9c\x1f\xb1\x04\xa4\xa1^-"N1"X.W\xe0\x07\x97B\x1f\xb2w\
e\xf3\xe5\x9c7NQ\x8a\x93\x94bW\x08\x04A\x90\xb9\xdb6\xb6\xed\xe8uD\x90P\r\
\x94\x12\x14\x97\xdcr\x838\xd5=z\x13\x08\x0f\xe7\xcf\xd3\x9a@\x8c\xa0\xdc\
\x07J\xc5l\xe7K,\x1a\x8c\xb1\x88d\xaf5\xb3""Xz`\xb5Zn\x10\x83)#\xb2v\xd9\xb5\
R:i\x03\xbe\xcfY\xba\xecZ\x11\x04\x86[rW\x88\xb1$\xd7\x8e !d\xc8\xd9ow@\xcd\
\x11\xc7\xda\x11\x04\xd2S\xb8\x8c\x12\n\xd6\x92 \x90N2mvM\x10\xfc\xae;\x9dru\
\x19H\xeb\xaaiH\x10\xbcC_\r\t&+\x06\x95\xcbi\x84\x8f\x8d\xfbr\xd5A\x08\x81\
\xa6i\xd1\x043\x96\xe0:c\\\xe3D]tp\x11=(\x88\x11\xbce\x99\xf717D\x04)=\xb9\
\xcc\xfbP\x8a\x9d\x9d\x9d\xe8u4\x06\xaf\xbf~5\xf3\x9fmoo\xa3\xd1h\x00\xe8\
\x8f\xb5\xde\x12\xd1\xfb\xa344\xf0\x9c<Iq\xf3-\x1a\xae\x8e\xcd\xae\x11\xc1S\
\xa7\xd4\xd8\xf3T\xa3\xd3\xc9\xaf\xba\x00Rr\x9f\x9dPd\xb0\x9c\x17\x0c\xc3@\
\xb5ZM(\xa4.I\x89\x97\xe5\xcb\x90\xf2\x12:\x1d\x89\xf3\xe7\x05\x84\x10h6\xfb\
\xa7BG\x04\xaf^Q\x82\x9b\x9b\xbd\xe3tOG\xad2\x0eA\x10\xc0\x8dIdk+\x8b\xa6\
\xe1\xbc\x10\xa8\xd7\xeb\t\x9946\x8b\xaef\x0b\xf6@\xc8d\xbb\xe0\xd7\xdbm4\
\x1a\x8d\x04\x97\x03\xd3\x82\xee\xbf\xbb\xb0m\x1b\x84\x10|\xe8#\xe5\xe8\xf3\
\x03A\x90s\x0e\xeb~\x0b@h\t\xbb\xe7\x97\xfb\xfa\xa5\x03A\xb0\'f2\xc6P*\xd9\
\x89\xef\xd6\x9e`\x8f\x1c!\x04\xd5jm\xc8}sm\tJ)a\x9af$\x04\xd4j\x7f\xd1\x9d\
\\\x92\x0c\xd7\x86\xa0\x94\xfdc\xe0\xcf\x0b\x81R\xe9\x03\xf0\xbc\xd0\xbf\xf2\
\x0f\x1f\xad\x82\xb2\x9b\xd0\x0b\xcd\x941\xdbD\xb4\xd0\x8f\xb2q\xaf\x1a\xce\
\x0b\x81\x8fW\xabh\xb7\xbf\x06\x00\xb0\x7f\xb7\x9c\x98T\x06ukk\xb3\xe1\r\x82\
\x1f\xa3^\xff,\xce\x9d{>\xda-\x14\xdfW\x82\xf5N\x0b@\xcc\xbdf\x00\x11\xc1\
\x97V\xb4\x05;\x9d\xb0M|\xdfG\xb5\xfa\'\x00\xc2\t\xc5\xb4~\x03\xc5\xf7\xf6E7\
\x12\xfb\x1fo\xc5\x88\xe0W\x9aM\\\x10\x02\'(\xc5\xd5]\x8bN\\sE\x8e\x13\x90\
\xcd\xfe3\xba"6]]\x99\xa22\xc8\xa3\xe3IS\xda^\xea~v\xee\\r\'A\x08\xc1\xddo5a\
\x97~\'\xbd\xac\xf0\xaa\xe8}D\xd0u]\xb8\xae\xbbR\xca\xa7A\xe2\x84\x10\x18\
\x86\x81\xf2\xef\x95G\xfcbL\x0b\x16\xdfW\x82x\x89C\x88\xd0\xf1\x00\xfb2\xda\
\x9a\xf4*\x9a\xc6gZ%\x0c\xc3@\xb9\\I~8\xc8h\x00}\x82\xef\x1d\xbf\x15\t\xc9\
\x85\x07\xe9_\xee\x96\xf8\xb2|9\xfc\x0eG\xa3J\xee\xbe\xf3\x9a\xdc7\xde\xc3k\
\xaf\xbc\xa2\xff\xe6\xe8\x95\x00\x80\x93\'\xbaC\xe4\xc8&\x84\xec\x84>q]\x102\
9b?\xf3,\x1av\xdd\xf1\'\xf4\x00\xc0\xcb\xaf\x02\xa6\xa1\xe5\x8fE\x8d\xd7\x85\
\xe1\x1b\x97R"\xf0^\x88\xddO\xf7\x9a\x81\x0b\xe7\xbeLH\x19\xfa\x92i73\xd0S\
\xd9\xd4\xebq\x0c^-\xa5\x848/\xe1?\xcb\xa3\xbb\x8f\x8a\x1c\xd9|)\x0b\xbdJH\t\
\xf8O\xcf\xe7\xac\x98\x1e\xb9\xf1\xd3A\xff1\xad\x8d\xa8\x06L&\x97\xf6\xb1\
\xf2\x16d\x8cN\x1c~\x12\x00\x0f\xb2\x0b\x16\x94\x86I\xaa\xa4\x1c\x7f\x8aI\
\x7f\xec\xce\xa9\x8b2\x8dB\xd7\xd8\xf08\x8aU\xde\xfbO7I\xa6n\xaci\xb4\x9b\
\xf9\'<O\xcd\xf3\xb2\x1c\xd52\xa7.J\x90.\x0f\xc6?\xeb\x91d\xa7\xb2M>a\x8f\
\xe8\x1e\x16\'7&\xf6\x8e\xc1\xaf\x87[p\xc2\xc29\x0b\x12\x93\x1f!0\xcd\x9e\
\xe9+\xad\xcd\xc3\xa5\x89\x12\x02`\x1f\x90\x1bc\x0b\x8en[&K\x1b&8\x89\xdc`_\
\x9b\x01}\x02\x83\xd5\xc7>\x8b\x1d\xf5\x97\xe5\xd60\xb0\xf8\x0fwQ2\xf0\x7f\
\xf0u\xbc\xb6)\xd7r\x82\xf1\xddy\xe8\x16b\xad7\xfe\x1c\xb6\xae0\x12\xfb$jAv#\
\x03?\xcb\x937\x1e\xefS\x83\x84\xd3\xea\xd9\xc8\xb1\x83@x\x9e\x85L\xf4\x86\
\xc1\xae1\xd0\xba\xe3\xd6\x87\xc4\xa5)\xbb\t\xe3.\x03\xceE\x07R\xc8\xd1\x84F\
M\x8f\xbd\xd7\xfb\xd9\xfb\xac\x102\xd9\x1ar\xe8\x05\xf2\x8c\x01)\x01B\x86\
\xc7N\xdf\x19Ovp\xbfi\xf6\xf5\x19\x83e\x0f\x92\xeb\xf5\xa5\xd8\xc3\x8e\xef\
\x173C\xe1\xeeL\xca\xa4>\x06\x88\x11t\xbc\xa7\xb0\x01\x82\xb7\xc7\x93\xe9\
\x8ex\x80#zC~L\x94)\xa7\x80LJ:\x11\xc1J\xf9\xb7\xe1x>n`\x1a\xee6\xf4~\xad)\
\x95\x8f\xba\x9f\xdd\xdd\xec\xd2IB\xc9\xa5\xb2\x15\x91\x1c\xab}\xad\x1a\x7f\
\x1e\xb5G\x1fA\xedS\x9fG\xe1^\x1d\xfb \xf8\x81\xe7\xa5V>j\x8e9\'$\xce\xbb~Lh\
\x1c\xf8Al2\x91\x83\xe3W\x01z\xe4\xd8(\xe3\x8b\xff\xd4\xf7\xf1G\x7f`\xa3\xed\
x\xb8S\xd7`\xbe\xc3\xec)\x1a\x1371\xee~z\x95\x0c\xfdAB\x8a\xee\xff\x19\x96\
\x98\x91\xf5J\t\x02@\xbfM\x87i\xf6C \x86\x16\xfa\xb3\xcfx\xa8|\xb4\x84\xe0\
\x13\x7f\x8d\xf2\x07\x7f\x1d\xef.X\xf8\xb6\xe3\x86\xcd\x9e>{G\xa0\x84\xc00\
\xb4\xbeh\x05\xa4J \x12\xc0\xd3~\x80s\x19\xce\x16\xa5\x94@\xd7\xbb\xfe\x03r\
\x1f\x9e\xff\xec\xd0nB\xa2\xeb\x14t\xab\x0e\xed\xc6\xd3\xe0\xbc\xbf1>z\xd3M7\
\xd5\xb6\xb6\xb6\x12Q\\\x17\x7f&\xf0\x9d\x7fy\x1c\x97\xae\xb8\x15w\xddy+\xcc\
\xb7\xdd\x85\xff\x13\x02\x17/\\\x0c/8\xd6-u\xe0\xf1h\xaf\xbf\x16\xd7\xben3$u\
,\x9d\\\xef\xe7d\x8b\xe0lOh~e4A]\xbf\x11\xd7n\x11\x90c\xc7@\xc8k _y%\xb1\xbc\
HH0z-\x8c{\x0cl\x1d\xa7\xf8\xde3\x01\xea\xf5O\xe1\x18~\x81\x8b\x17/N\x88]\
\xba\x1c\xa0\xf5\x8f\xff\n\xf3mw\xc1\xba\xcf\x84\x10\x12\x8e\xe3\x8cW>\xf5H\
\x8d\x95\x1d\xf7AA\xa01\x1a\xde\xec\xa8\x1eA\tX\xec\xb8\xa4\xc4\n)%\x0c\xe3\
\x1ePv]\x18+!$\x9a\xcd/\xe1S\xf5\xbf\xc2\x05\xc1\x87\x9d\x10R\xef\x83\\\t\
\xfd\xae7!\x08\xce\xc3{\xeay\x94\x1ex\x0b\n\x85\x028\x0f\xe0\xba\xde\xd0$";\
\x19g\x0b\xb9\x01\x90}\xe8\xfa\xf5\xfd\xf79\xa0\xbdQ\x03c\x0c\xf4\x04\x85\
\x94\x1d\xf0\xe09\xf8\xfe\x93\xd8\xde\xfe\xd3\xa1\x87?\x96 \xa5\xd7\x81\x1c!\
(X\x1a\x00\xe0\x8f\xb7[\xf8\xcdw\x1b0\xdf\xa2\xa1X,\x80\xbf( ^\xba\x00\xdf\
\x7f2\xd7\rb\xc2\x18MC\xef\xb65M\x07\xe9&\xae\xfa\t\xe7\xf8\xdf\x1f\x9f\x05\
\x7f\xf6\x19\x08\xc1S{\xd6x\x82\x9a\x0eyIB\xca\x0e\x02.\xf07\xb5"\xda\xae\
\x8f\x8f\xff\xe5\x0e>\xfc\xfe\x02\xf4\x9bNA\xd3)\x18\xbb\x1e/\x89s\xb8 2\xea\
a\xba\xa4x\x18\x8c\x18~\xb4\xbf\x01\xb2\xb1\x0f\xb9\x9f$L)\x056zZ\xf6n\x8bq\
\x81\x80s\x04\xbe\x071\xa1\xce\x91\x04\r\xf3L\xe4U[\xd9~\x0c\xc6\x9b_\x0f]\
\xbf\x1e\xba\xa6A\xdc\x07\xfc\x93\xe3\xc2\xb9\xe2$\xacw\xdc\x0ev\x1d\xc5\rL\
\xc3\r\x9a\x86})\x81=\x81\x8d\x8d\xf1\xddUH9tx\x9c\xaek\xc0\x06\x00\x84:Qz\
\xf5\t\x9c<A\xb0\x01\x02.\x04\x02?\x80\xe0a(\xfa$bc\t2\xc6\xc0\xd8m\xa0\x84\
\x80\xf72{\x1cy-Jv\x1d\x85\x82\t\xbbhBZ:\xda\xce\xf7\xe1}\xf7q\x00\'\xc0\xb4\
\xdb\xa0i7Bc\x14\xe4x\xff\xa0\x9c=)\x81\xfd\x9f\x85o\xba\x01\xcc\x1b\x1b\x12\
\xd8\x0fu\xac\x84P\xbcA\xd3pds\x1f\xc7\x8f\x1e\x07\xb9\xe28\x84\xdc\x0b\xd5\
\x8d\x12x:\xd8\x05?\xf7D\xd4\r\xf3"\x95\xa0~\x87\xd1%\x1a\x12\xacU\xde\x8f\
\x9d\xc7\x1c\x80H\x18:\x03\xa5$\x0cN\xe6\xcfB\xcaK\x00.!\xf09(\x00\xde\xb9\
\x03|\xef\x14\x80\xd7\x81\\I\xc0N\x10\x10zU\xb8\xb1=\x15\x96O@\xc0\xa8\x84uJ\
\x0b7\xa8\xa2\x03)\xf7\xc1_\xda\x05\xe7\x1cB\xeeA\x8a\x9f\xe2\xb2<\x1by2M\
\x8b!\x82\xc5b\x11\xf4:\x06\x80\xc0\xf5\x02p!\xa1\x93M\x14\xee\xd7\xa1\xffR\
\x98\xfa\x99\x0b\x89\xe0\x9c\xc0e$eO)\x01v\xf2G`\'\xfb\xef\xf7\xf6\xae\x05\
\x17\xaf\xc1\xb3\xf2$\xf6$\x01\xc8Q\x00WA\xee]\x84\xff\xf4\x0f\x11<w\x16\xc6\
\x9b\xdf\x04)_\xec\xfefz2\x99\x08\xda\xe5\xdf\x07$\xc0/\x08|\xa9\xe5\xe2\x9a\
S\x14_\xffg\x17\'\xb7(J\xc5{\xc1\xf9.\x80]\xc8\xdd\x9f\xe2\xc2\x80M1W\xc6\
\x90\x1d/\xf2H\x9a\xe7\xd9\xfa\t\x82\xe5r\x19\x86~7\xa4\xdc\x87\xff\xfd\x1f\
\x03\x00\xe4\x9e\xc4\xf9\x8b\xc0\xfd\xf7\xdd\x0c\xd3\xb8\x19\xce\x7f\x04\xc0\
e\x89\xc0\xf7\xd2\xcaC\xfc\xb8\xb4Z\xad\x99z\xcd"\x11\x11\xd44\r\xa5\x0f\x96\
!\xe4>\x08\x01\x82\xe7^\xc0\x9d\xba\x06\xd7\xfb\x1f\\yD\xc22n\x87\x90\x12\
\xe4\x8a\r\xf8\x9e\x07\xe0g\x89\x82V>%J\xb9\xfcQ\xd0ki\xb4X\x96\x1e4\xb1\xd3\
r!\xe5e\x94\n\xbf\x02\xc6(\x02. \xc4s\xe0<H-\xac\xbe]C\xc1\x9al\x81R\x95\x0b\
;WJ\x14\xa6\xbd\x13\x00\xc0\x85@\xe3o\xdb\xd0o\xd3P\xb0t@\xea0\xf4\xd3\x088\
\x87\x94@\x10\xfcd\xe4\xcdhZ6\xab\xd9`z\xbeq\xe8\xe5\xc2\x06\x00F\xc2\xb4+\
\x83C;\xd3\xb9jbo\x0f;\x8f\xb9`\xd7\xbc\x06\xda\xad\x1a\xbc\xa7<\xb8\xff\xe9\
b\xfb\xcf\x1e\t\xa7\xf0\x17%\xc8\x11\t1\xa2\xf5\xf2BUz\xbe\xcc)Q\xbe\xfa\x0f\
_G\xe7\x15\t\xbaE\xc14\x1d\x94\xde\x807\xbc\xe12L\xe3fp!A\xc4\x06\xfe\xcbu\
\xc6\x16\x96\'%\n0{z\xbe\\\xe7\xaaud\x18\xa2\xc6/\nH)@\xb6\x18\xfe\x9b\x0bx>\
\x07!\xc0\xdf?\xf6y|\xf6\xd3\xdb3\xdd\x90\n\xb4\xdb\x0e\x1a;N\xe6\xeb\xfb\
\xcb\xc4+}\xbd\x84\x10\x1c\x10\x1cd\x8b\xe1c\xb5\x06\xde\xfeV\r_\xfc\xf2\x17\
\x15\xdf\xea\x82sa\xcb\x81]59F@\xb6\x08\x08\x08\xbe\xf1M\x07\x82?\x9f\xab\
\xe0IXB.\xec\xe4\x93\xdc\xdcb \x90R\xc0s\xd4/\xd8KM\xcfG\x08\xc5\xf1\xee|\
\xff\xcc\x13\xceBnd^H%\xb8\xd9\xdd1\x0b\xc1q\xf6\x99t\x91,\rk\x91\x0b\xbb\
\xd7z\xe2\xa2\x80\xf7\xed|]s\rra\x13\x90\xae\xd8\xf0\xe4w\x9a\x90{\xf9<\x10W\
1\x17v\x82 \xa5\x14\x04\x04B\xf0\xec\xfa\x95\x11`\x8c\xc225\x05\x877\n\xb4\
\xda\xd9\x87\xc9 b!\xae4\xd2V\xf9\xde\xe33\xddT\xb5R@Y\xe1I]\xd5J\x01\x95Z3W\
V\xa0\x1e\xfa\xa7\x91tg\xcd\xc0wgZ\xf3T\x93\x03\xc2\xde\xb0S\xb7\xfb*\xfc\
\x1cH\xb4\xa0\x10\x1c\xbe7\xfd\x18b\x8c&\xc89\x8e?S\xf7\xd24\n\xbbd\x86C\x87\
\x10\xd4*\x05\x94r\x1e\xc1\x99\x18\x83\xb3\xee\x14\x8a\x05=z\xedy\x81\x12I\
\xc5u\x034wB\x07X\xd3\xd4A)\xc9%\xdeE]tOJ\x04O\xbb3\xdd\xcc-\xb1\x03\x8bgi\
\xb98\\/@\x10\xf4\'\xbc\xbc\xf9\n#\x82?|\xa2\x9d{Y\x18\xc4f\xcc\x8a\x92\xe7)\
O\xc2,\xc7ZG]4.\xb1L\xab_\x89\x9b\xf7\x8b\xc5b\xd4\x8a\xb5ja\xe88\xeaz\xc3\
\x89\x8e\x07\x9bT_<\x02\xbel\xdb\x18\xc57\xad\x9c\x85\xc4M\xd8%k\xe8\xb3\x87\
\xb8\xc8\x9d\rr\x1a,\xcd_tS\xb5\r{\x04\xd6\xca!v\x1a\x1cx\x82\x0b\x19\x83_\
\xfb\x96\x87\xdbo\xd7\xfa\x1fH\x89\xb6\xa3f\x19\x99\x84\x85\x10|\xe4\xd1\xe5\
\xa9\xf0\xe7\xd6E\xf3\xe6(\x1b\x87Y\xc2\x8d\xd4f\x0c\x89\xadO\xbaNR\x13J\xe5\
\x05c\x1b\x88G\xc0\xe7\x95\x1f\x94g\x0c1\xbb\xaf\t\tS\x82\xb5\xda\xdeDUF,UL\
\x02\x84\x00\xba\xd6\x8f\x9f\x17"\xbf\xaaCi\xd6\x1e\x00\x08|\x0b\xd5J\xa8\
\xdd\x1e\xdc]\xcc\n\xd7\xef\x87\xb9.%kO\xafR\xcf\x97\x89\x1c\xf2*\xe0zr(\xe5\
_.\xeb\x92J\x14K50Fa\xe8\xac\x1b\xf30\xfez\xcb\xb2F~\'\xe4\x068\xdf\x1f\xdb5\
\x17\x96\xb5\'^\x19\xe7\x02m.\x00d\xb0\x01\x8eI{\t\xec\x8f\xf9.\x87uI\x05\
\xe2\x95\xe5Q:\x8d\x9ad\x80pb\tF\xe8\xbf\x96\x92\xb5\x07P\xaf\x97\xe1\\\xcc\
\xaetR\x85\x95U:\xa9\xba\x91\x95V:\xcd\x8a\x95V:\xa9\xc0J+\x9dT`\xa5\x95N\
\xf3\xc4$\xa5\xd3<q\xa8t\x9a\x17\x0e\x95N\x8ap\xe0\t\x1e*\x9dT\xe0@*\x9df5]\
\xab*K)A?vhb\xa9d*Q:Y\xef\xd0\x13\xd2\x8b8\x9fo\xd1W\xba]\x8a\xef\xdc5F3+\
\x9dF\xed\xe8C\xa5S\\:\x02\xec\x8fTR\xaf\x05\x16`]\x922\xa9\x8f\x99\xa7\xd2)\
+\x94\x8fA\xcf\x0fI\xaaF\xa8t\x1a\xaf\xbeH\xc3\\fQ\xcf\x0f\xb3\x1e;\xed\xe6B\
\x94N\xe30\xb7eBJ\xa0\xed\xf8\x98\xb7\xd2i\x12\x0e%\x99<\xa0\x04\x89\xb0\x02\
\xbb\x98\xee\x02\xad*\xac \x0b\x94\xb6`\xde\xb0\x82,\xe8\x85\x15\x10\x020\
\x1a\x86\x15\xe4\xc1\xd2\x0eoT\x15V0\tK=\x9dr\xd6\xb0\x82,Pj]\xaa\x94\xad\\q\
\x13S!%\xacg\x1c\x94\xb6\xe0\xb8\xf5,\r\xbd\xb0\x02c\x9c\xee>\x06.\xc2\x10\
\xbe\x1e\x96f]\xca\x82xX\xc1\xac\xa6\x82q\xd6\xa5\xa5\xad\x83\xaa\xc2\n&=\
\x1c\xa5\x04\x1d\xc7QY\xdcD,\xdc\xba4mX\xc1\xc2B\\g\xc5\x1a\x84\x15\xcc\x86\
\x95\x0f+P\x05\xc6(*\xb6\x05\xcb\xd2\x15\x84\x15p\xec4]\xec4\xa7\xf3FVNP\xd7\
\x19\x9a\r[Y\xfe\nMc\xa8U\x8b\xb0,\x1dvN\xdb 0\x07\x82\x8d\xedR\x82\\0\x83e\
\x88\xd1\xfe\t\xb3\x96\xa9\xa3R\xb6Po8\xb9\xcaPJ\xb0`\xf55`RJ\x14\xed\x06|\
\x7f\xb6\x08\x9aO\xd6\x8ax\xb8\x9b|\xd5.\x99\xb9\t*]\x07\rC\x8b^7[\xee\xcc\
\xe4\x00\xe0\xcf\xb7[\xd1\xf2C)\x85\xae\x9d\xce\xf5{\xa5\x04\xe3\xc7}y\xde\
\xec\xe4\x80\xae\xa6.ff#tr\xee\x978\x16"\x8b2F\x87f\xd3\xc1\xb3d\xe6\x85\x85\
\x10l7+CZ\xeez\xa3\x9d{<M\x83\x85\x08\xdbi*|-%\x87\xe7<p\xe0\xb5j\x87\x04U m\
\x97!\xe4b\x12x,d\x92)\xd9\x8d!\x07\x1e\xc7\x9d\xac\x13U\x01\xa5\xfb\xc1DpV\
\xe9\xa1\xc8\xdb\xc9\xf39\xbc1\x8b\xfe<\x83\xb3\x94v\xd1xG\xd4\xe8l6\x858\
\xe2\xba\xd1\xbc6\x18\xb5a\x05\xb1F\xd24\x82J\xd9\x9a%\xed\x04\x08\x01,\xf3h\
\xe2\xb3\xbc\xb9w\x94\x87\x15\x08\xd1?\xc1\xa7R.\xa0RV\xa7\'\xf5\x83\xfc64\
\xe5\xb3h;\xa7q$+\x84\x00\xdc)<O\xe6\x921\xa4\xe5\xc8\xae\x1f\xb6\x18r\xc2\
\x9b\xa6\xbc\x80\xcb\xa9\xc8\x01s\xcc\x18\xe2z@\xadV\xcft\xfd<\xb5j\x87\x92\
\xcc\xba\xe3\xc0[x\x95\x9b\xcf\x08\x99\xbc,\xa888n\xe5\xcdg*,\xbc+m>\x03\xd4\
YxW\xd2|65\x06\xc6\xdfB\x83\xb3\xf2\x1e\xfd\xa7\xd2\xc2;\ns\t\xce\xca\x82%\
\x1c\x1c\xb7X,\xf5\xe0\xb8\x83\x04\xb5\x1b\xdeu88n\x16\x1cZx\x97\x00\xe5\x0b\
}\xa9hL\xb4\xea\n!\xd1l\xb9\x99\xbbi\xc1\xd2AHx\xde}\xde0W\xa5\x04\xed\x92\
\x89Zu|\xb2\xd4\x1et\x8d\xa1\xba\xdd\x9ax\xdd`\xc8l\xb5\xd6D\xb3\x95}\xf7\
\xabt\x92\xc9\x13F`Y\xd9\x16\xf7B\xc1H\xbc\xcfk\xf3_\x9a,\xca\x18\x85\xd3\
\xaa\x8c\r~\xd446\xb3\x13\xc3R\x85mMc\xb9CV\xf3b\xad\x16z!d\xee\xa0\xca\xa5\
\xb5`\xcfI!O\xac\xef4\xb1\xbcK#\xe8y\x81\x12\'\x85IX\xab.:\r\xd4Z\x97\xb2\
\xcd\xfc\xca\xea\xcbR\xce\xdc\x8c/\x93\xe0Lq\xf0\xc64P:\x06\xb9\x00\x9a\xad=\
\x80\x84\xb9\x93\xea\xf5\xd1\x9a\xedY\x82\xff\xf3@\xbdm\x02\x1b\x91\xdedQ$\
\xc6A9AM\xdb\x00\xed\xa6\xfe\xaa\x94\xad\xd4k\xf2\n\xdb=Y@J\x8c\xb4\xee\x8e\
\x82R\x82\xba\x06\x98F\xdf\xd5j\x9c\x02*\xab\xb0m\xe8H\x1c\x80\xe5x\x1d\x04A\
v\xeb\xb1\xda\\\xd89\xac\xb9Y\x85\xedx\x88+\x80\xa8wd\xc5J\x08\xdbq\'\x838(\
\x9d\x9c g\x12\x0e\x85\xedU\x82\x94\xf9\xd6Z`\x89\xc1Yqa\xbbR\x19\x1d:\x9e\
\xfcM\xb2\xbe,X\x9au).lO\xabB\xccb]Z\xab.:\n\x07\xcb\xba4\x80I]u\xe3\xcc\x99\
3\xfblA\xce\xa9\x8b\x06\xe7<\x1c\x83<\xef\xd4\xb4F\xf8\x7f\x9e\x95[o\xbapu\
\xac\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress05Bitmap():
return wxBitmapFromImage(getscanprogress05Image())
def getscanprogress05Image():
stream = cStringIO.StringIO(getscanprogress05Data())
return wxImageFromStream(stream)
index.append('scanprogress05')
catalog['scanprogress05'] = ImageClass()
catalog['scanprogress05'].getData = getscanprogress05Data
catalog['scanprogress05'].getImage = getscanprogress05Image
catalog['scanprogress05'].getBitmap = getscanprogress05Bitmap
#----------------------------------------------------------------------
def getscanprogress06Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x1a\xc0IDATx\x9c\xed\x9dm\x8cc\xd7y\xdf\x7f\xb3+\xed\x9c\xd9\xdd\xec\x9e\
\x95\x1d\xed]\xf9E\xd7Id]9Mt-\xa0\t\xeb5j&\x05\x12:m#*H"\xc6\r\x1a\xa2\x9fh\
\x07\xa9i E\xa6I\xd10A\x81L\xd2\xd6\x1d;\x1f2\x1f\x02\x84_\xdc\xd2(\x90\xb0\
\xfe\xd00}\x81)\xa0\xb0\x996\xad\xae\rE\xbaN$\xeb\xae\xb1\x96\xce*\x96\xf6\
\xcczw\xe7\xccj\xb5\x93\x0f\x97\xbcs\xc9\xe1\xcb\xbd\xe4%9\xb3\x9d?\xb0;\x1c\
\xf2\xf0\x9e\xf3\x9f\xf3\xf6\x9c\xe7\xe5<+\x97/_\xde\xe3>\xc6\x03\x00\x96e-\
\xbb\x1ds\x81R*$\x08\xd0l6\xa3\x0fj\xb5\xdaT\x0ftl\xc8\xb9\x02\x00c W\xa8\
\xa1\xb5\x99\xf8\xbdq\xf5Y\xd6\n\x85\xdcj\xf4{\xa3e0#\x1e\x19\x7fN\xb1X\x04\
\xd8\'\x98\x05\x02\x05\xb9\xeek!\xa0\xd5\xa8\xd2ly\x98Q-\xea\xc2u\x86\xbf/\
\x048\xf6>9\xad\x19In\x142%h\x0cx\xbe\xc1u\xc2^\xb4,I\xa5\x9c\xcf\xec\xf9\
\x1d\x7f7\xf5wNdV{\x17\x9e\x1f\x92\xcc\x1a\x1d\xcf\xa0T\xfa\xf50\xb3\x1e\x1c\
\x9cGR\nr\xae\x8dm[\x081\xfe\xbb\xf9|~\xe4g\xda\xac\xa0\xd4\xde\x81\xa1\x99t\
\x9d\xc8t\x88\xc6+\xad\xd5j\x04\nZ\xed\xf6\xc4\xefI+?\xe6\xd3\xd1\xbd6X\x9f\
\xe38\xf8\xbe\xdfW&\xf3!\x1a\xafx\xb0\xb2ya\\}\x99\xf6\xa0\x14\x90s\reocl9\
\xad5\xd5_o\xd2\xfe\xca\xe4?\x80\x94\x90w\x05B\x806\xd0\xee\xf4o\x13\x93\x86\
j\xa6=\x18\x04m,KN,\'\xa5\xa4\xf2Os\x13\xcb\x01\xe4\x9c\x90\xa4\x10`\xc9p\
\xaf\xed!\xc9<\x1c\xda\x83\xd3n\xf4i\x90\xcb9\x14\x0b.\x81\xd24\x9b\xf5\xa1e\
\x1c[R.\x96\xfa\xdek\xb7\xdbln\xb5\x13\xd7\x93\xe9\x10M\x8b\xcd\x8d\xd2\xe4B\
3b.\x8b\xcc<\xb1TI&-\xdam?\x91\xac\xda\x83\x1f(\xea\x8dv\xaa:\x96F\xd0\xf7_\
\xa7\\\xad\xcf\xbd\x9e\xa5\rQ\xad\xb7\x17R\xcf\x91\x9b\x83i\x91)\xc1I\xc7\
\xa2\xfe\xb2Y\xd6<\x1a\x99\xce\xc1z\xa3\x83eI\xa4\x18\xbf\xd9\xefb\xf8\x83?j\
gY\xf5Hd~\x1e\xacm\xb4\xb2|\xe4\xcc\x98\xdb*jY\x92|\xceN$\xba\x8dC\x10h\x9a-\
o\xea\xef\xcf\x85\xe0z\xb5\x90\xe9I~\xbdZ\xa0Zk\xd0\xe9\x04\xa9\xbf\x9b\xb9,\
\x9as\xc0q&\x9cpS\xc2\xb2$\x8d\xad\n\xcd\xb6A\xeb\xd1\xe5\x86\xb5;\xd3\x1e\
\x14\xa2\x9f\\\xbb\xed\'\x1a^=\r\xd8 \xa4\x04\xc7\x16\x91F \xe7@\xab\x93\xae\
M\x99\x12\xb4c\xeaU\xa5I,\xa9\xb8\xb9\xe1\x04Q\xa0\x94\xa1\x90\xef)\xb1\x04\
\x82\x1d\x0c+\x89\xdb\x94\xe9>(c\xebI\x10d\xb3\xd1)M\xdf\xb0\x94299\xc8\x98`\
|\xe6e\xb9\x91\xa7\x11 \x06\xb1\x10a\xbb\xb6^\xc0\xb1\xfb\xcd\x03\x9b[m:^0\
\xf7\xba\x17B\xb0\\\xca\x1fx\xef\x19\xa5\x17Bpi\xc2\xf6*\xd9n%\xa3p|\x9a8\
\xea\xc8Tu\xbfY+a\xdb.\xd0o\x8e\xfb\xd3/{<\xf1\x84\xbd_\xd8\x18Z\xed\xe9\xe5\
\xcb^}I\x90\xe9"\xe3\x0e\xd8\xc1z\xaa\xf4O~\xa6\x91e5\x11\x96\xa6\xba\x87P~\
\xccJu/&Xo\xc6\xa9\xee3%\xa8b\x12G\xa9\x94C\xca\xd9WJ\xcbZ\xe9\x93\x90\x06\
\x95p\x93\x86j\xa6C\xb4\xba\xbeI\xbbYE\x08\x81m\xc9\xb9[x\x93\xcc\xc3\x95\
\xcb\x97/\xefY\x96\xd5\xb7(\xcc\x82J9\xcfz\xb5\x90\xc9\xb3\x06Q\xfeT=\x91\
\xc1\x06\xc2\x13J\x9f\x13BV\xd8\xaa\xb7\x11\x02\xaa\x95\xecH\x1ac\xd8\xd8l%&\
\x17\xc7\\D\xb5\xcd\xad6\xf5F\'\xb1\x85w\x1c\x94\xd2\xb4Rj\xc0\xe3\x98\x9b,\
\xaa\xb5\xa1\xd5\xf6\x81\xc5\x18AG\xe1X\x92I\x03\xc7\xbeD}\xeb\x97&j\xd2\xd2\
Xx]\xc7b\xa3\xf6,B\xae\xa1\x95\xa6\xb2\xde@\xa91\x8a\x99\x01d\xda\x83\x85\
\xc2\xe3\x99[x\xab\xd5\x02\x8es\t\xdb\x92\xb8\xaeM\xa9\xe8\xa6j\xd3\xd2\xacK\
q\x0b\xef(8\xb6$\x9f\x1b\xb1I&\xc4\xb1\x85\xf7\xb0\xe1\xd8\xc2;\x80\xfb\xde\
\xc2\x9b\xa9\xea~\x94\xd0<\x0cq\x0boVn+\xc3\x9es\xe4\xe6`Zdl\xe1\x9dO\xd9Y\
\x90\xe9\x1c\xf4\x03\x90\xc2D\xc2\xb5\xe7\r\x97T\x8e\xac\x85\x17\xa0\x13\xe3\
T\xab\xcdG\x17\x93\x06\xf7\xfd\x1c\xcc\xdc\x9d\xb2\x90\xdf\xb7\xe7\x95\x8b\
\xc3\xdd*\xb3t\xa7\x9c\x84L{\xd0\xb6It\xb8\xcd\xca\x9d2\t\x0e\x85\xb0=\xea\
\x00"eh\xf4\x9c\x05\xf7\xbd\xb0\x9d\xa9\xea\xbeZ\xc9\xe3:\xf3\xd1\xa8E0\xfb\
\xf5%A\xa6=8.<`\x18z\xc2\xf6\xa0\xca\x7f\x14\x94\x0e\xf7\xda\x1e\x92\xa8\xee\
\x0f\x85\xb0=\xab,\xba0\xd5}\x1ad\xe5N\xb9P\xaf\xfbv\x82 \x90,\xb1p\xaf\xfbi\
\xdd)\xe7\xe9\xe5\x7f\xecN\x99\x06\xff_\xb8S\x96\x8an\x02\xc5\xaf\xa1\xd1\
\xec$\x96)\x0by\x07!\x04J\xeb\xd4\x1e\x87\x99\x12,\x97r\xd4\xd6G\xf8\x9d\r\
\xc0\xb1-\xd67&\x9b\xec\x06]3\xd7k\r\x1a\xcd\xe4\xf6\xfd\x8c}\xd5\x92\xcb\
\x8d\xf9|\xb2\xcd\xbdP\xe8\xd7d\xa7u\xb0]\xdaFoY\x92v\xb3:\xd6\xce`\xdb\xd6\
\xcc\x1e\xc3K\x15\xb6m\xdb\xc2\x1e\xf0a\xcb\x1aG\xeaD\xaf\xb5I\xed\xdf\xb6\
\xb4\x1e4\xc6P,o\xa5\xd2l\xa71\x9b\xf5\xb04\x82\x9e\x17\xe0\xfbj\xee\xf5\x1c\
\xa9!:\r\x96\xa6\xba\xcf\xa2\xbe$\xcf\xc9\xd6\xd3)\xc5\x88kO\x11\x031\r2\x9d\
\x83JC\xa3\xb9\x03"t\x1c\xdf\xdc\xdc\x1c]v\x8a\x05c\x1ad\xbe\xc8\x18V"\xbd\
\xc9\xa2H\x8c\xc3\\VQ!\xc2\xf9\xd8i\xad\xcf,\x89h\r~`\xfat1i\x909A)\xa1\x90\
\xebi\xb7g\xf76\x942\xbc\xa3\xc6\xb6L\xea\xa8\x17\x98\x03\xc1\x9e\x9a\xbd\
\x87q^\x14=Xrx/\xc7\x9fcY\x02\xd71\x8c0X\x8dD\xe6\xa1=\xf1\xb6\x16J\x9b\x896\
\xf3q\xdbD\xce\r\xe3\x97 \xfc\x99\xf6*\x97L\xb7\x89\xf8t\xf3\x03\x93\x89\xa4\
\xd2\x89\x1d\xfd\x84\x08\r<i\x90mhO\xac\xf24{\xe2$(\xb5\xdfki=\x17\x17\xe2uo\
Y\xf2\xc0j\xea\xcd\x18\xf5r\xa8\xbc\xee[\x8d\xea\x81\xd3\xfe\xe6V+\xd5\xa5\
\x1b\xc3\xb0T\xaf{\xc4JT\xd90U\x86\x9d\xe1]n\xcbQ\xdd\x9b\xc5\\\xb8\xb7P\xd5\
\xfd(\xaf\x8ay\xe1\xd0\\\x98c\x8c9\x10\xdc\xa1\xcd\xbe\x000\xa9>g\xab\x8ce\
\x85\xf3{\xab^O\xb5@-\xe4D_*o\x1dP.\xb5;\x8b\xe9\xed\xb9\x11\x8cw\x98\xe7+\
\xbc\x05\xa8\'\x86!\xd39\x18\x1fv\x1fv\xedL\x9e)\x84\xc0\x8d?+\xa5\x0fX\xa6=\
\xd8\xe9\x04\x94\xbb~\x05\xcf\x16s\xbc\xae4\xed\x8eF\x08\x81\xe3\x84s\xc8\
\xb6mvw\r\xdf{\xd1\xe2\x86y\x07\xff\xc5\x17\xe0\xce\x0eJ)|\xdfGk\x1d\x99\xe1\
,K\xf2\x9b\xbfV\x88\xe6\xaf1\x06?H7\x122%\xe8\xf9\x1a\xa5\x0c\xb6SD\x1bC\xa9\
\xfcS\x14J\xf7\x08\xd4u\xcc\x0e\xe8\xdd]0\xb0\xc7.o\x98{!\xe1\x0f}\x1f\x88\
\x13\xe4\xce\x9fC\xae\xadaY\x02\xdb\x12H\xb9\xc3\xde\xado\xb2\xfb\xdd\xbf\
\x8e\x9e\xdflz\xcb\xf1\xf8-\x97\xcb\xe4\xf3\x05\x0c\x17\xf14\xb4[\n\xadw\x10\
b\rc\xc2\x9f\x00\x86\xee\xed\x92]r\xe6\x9dnL\xfc\xad=\x82\xedk\xe1\xd6\xe9\
\x11\x1d#-)q\xdd\x02\xae\r;o\xfc\x05\x9b\xf5v\xea\xb6MEP\x08\x81eY<\xfdt\x91\
\x8f?SFiC\x10h\x10\xbb\xa0{e\xd6\xa2\xf2\x86]\xc4\xbd\xbbp\xe2\x81\xf0\xe7\
\xa9\xee\x07wN\xc4\xca\x88\x90\xd7\x83\xdd7\xee\x1a\x94\xd64[\x9a& \xbf\xe7\
\xddll\xb5x+\xe8\xf0\xf9\xcdM\x82 \x98\x0fA\xd7u)\x14\x8a\xb8?\xfaQ\xf4\xeeY\
<_a\x08W\xcd\xde\xf01f\x07q\xea\x1e\xb7n\xfdM\xf8\xfb\x9b\x06\xce\xc0\xad[\
\xc0\x99\xfdg\xdd\xba\x05\'O\xde\xe2\x9dw\xce\xf0\xce\xdb\xb7\xb9}\xef\x047n\
\xc2\xeai\xc1\x03\x0f\x08\xce\x08\x19\xad\xc6\xe6m\xd3\xddZ,\xd67\x1aH\x14\
\xb5\xda\xfa\xc4 \xcc\xc4\x04\xa5\x94\x14\x7f\xa6D\xe1\x1f\xff,z\xf7,J\xef`v\
\xc3\xee\x12\x00wo\x81\xf9N4$\xcd\x8d\x1d\xcet{\xf1\xcc\xbb\xc2\x9f\x83G\x9d\
\xf0\xf7^O\xc7z\xdc\xec`\xcc\x9b\xe8\x1b\xdf\xe6\xe6\xb5\x93\xac\x9d\xb78)d\
\xaf\x8f#\xfbD\xb5\xf6\x07H\xb6\xa9T\xca\xe8\x11\xd7\x94$"\x98\xcb\xe5(W\xaa\
\x08i\xa3\r\x98\x9d\x9d\xb0\x81\xab\xab\xe8k\x7f\x15+y\x13!\xce\xf2\xa0X\xc5\
u."/H\xe4\xf9$5\x80\xd9\x19\xf8\xfdN\xef\xfdp\xe5\xf4\xbe\xfe\x97\xbca\xcep\
\xe6\x9c\xc5I\xd1#z\r\x01l\xfd\xa76_m\xd5\xf9\xdc\xe7\x0e\xaa)\'\x12\xdc\xd8\
\xf8=r\xf9\xa7\t\x82\xeb\x18\xbd\x13\xc9\xd0\xb7\xb6\xaf\xf0]\xad\xa3\xb9&\
\xd8\x03q\x06)Vq\x9f\xb4\xc3B{\xfd\xf7\xbf\x88S\xfb\r\xef\xfd\xce\x8a@\xac\
\x81X\x13D\xfaF\xe2e\x04\xee\x136\xb6%\xf1\xbcW\x08\xdex\x85\x9b\xdb\'9{1\
\xac\xc3\x00\xad\xe7|\xacK\x1f\xa3^\xcfQ\xee\xedS\x93\x08\xda\xb6\xcd\xd6\
\x1f\xd6\xd1oK\xbc\x97^C\xac\x11\x8eE\xfd&J]\x8d\x88\x05\xdf\xba\n\xe6o0w\
\xc3\x15R\x9e^\xe5k\xde\x97\x01\xb8\xaew\xc7\xba\x96\x08!\x10B\xb0&\xf6\xb8p\
\xe1{\xd8\xe3,\xe7\xcf\x0bN\t\xc9CRr\xe9{Oq\xf1=\x8f\x00`]\x94\xd8\xce{\xc26\
\x18xC\xbf\x86\xbaq\x82s\x0f\xdb\x80\x89n-i\xfd\xcf\x0e\xb5\xdf\xa8\x8e\'\
\xf8\xd9\xcdM\x9ez*G\xf0V\xb8\xb9\xca\xb55\xb4\xbe\x8a\xd6o\xe2\xbf\xd8\xe1\
\x9b\xaf\xbc\xca\x8d7\x15Zk\xf4\xb6\xc1\xec\xe8\xbe\x06\x0f\x92\x18\x858\xf9\
\xdek!\x04Zk\xa4\x94\xc8\x0b\x16\xf6\xa3\x16\x8e\xe3P\xa9\x94p\x1e\xb5P\x81\
\xc6\x88\x15\x1e\xb6\xcerN\xee\xf0\xf2\x15?\xea\xcd[\xc6\xd0\xf8\xef\x01\x95\
\xea&\xcd\xc6\xc6p\x82\xa5R\x89\x1f/\x14\xf1_\xd5\x80\xa1\xd9\xf8\xfdn\x0b\
\xde\xc2\xf3\x9e\xc7\x18\x83eY}\xd2\x89\x94\x12)%\xe7\xcf\x87j\x89\x0b\x17d\
\xb7\xb1\xa79%N\rV\x01\xc0\x9d\xeeX5\xe66\xbb\xbb\x86\xdb\xb7\r\xdb\xdb\xa1\
\x14\xe38\x0eA\x10\xe0\xfb>J)\x82 \xa0\xd1h`Y\x02\xe7Cy\xcc\xedm\x0c\x0fp\
\xf7\xd6\xdb\x00\xdc|\xa1\xc5w\xae\x1b\xfe\xd1\xcf\xaec\xe87\x92\xf6\x11t\
\x1c\x87R\xb9\x8a\xe7)\x10P.\xee{\xe5\x16\x8bE\x9e}\xb6\xc4\x8f\xfdX\x1e\xcb\
\xb2\x90Rb\xdb\xf6\xc8\xde\x99\x15\xd7\xb5f\xd7\x18\x8c1h\xad\xf1}\x9f\xe7\
\x9f\xf7x\xee\xb96\x9e7\xdc\xcbbk\xa3D\xa5\xd6\xe8\x93v\xa2(l\xad5\x1b\x1b\
\x9fG\xe9{\x98=\xf0\xff\xbc\xc9\xf5\xeb\xe1\xd0{\xe6g\x8a\xe4~4\x99\x0br\x96\
\xb8\xae5\xdbZs^J.\xc4\x14\xae\x9d?\xefP\xff\xa3:B\x08VW\x05\xa7O\x87?\xcf\
\xc9p\xeev|P~\x13\xa5\xd4>\xc1R\xa9\x8c!\x0c(.\x0e\xb8n,\x13=\x92@45&A\x08\
\x11\x85\x99G\xb2\x92\x906R\x8aCE\x0e\xe0\x82\x94\x11)\xa5\x14\xd7\xc7\xdd;6\
\x04\x11A\xb3\x07\xa5\xe2\xe2\x87a\x12\xc4{n[\xeb\xc4r(\xc4\x08\x96>~\xb8zn\
\x10\x83)#\x82 H\xe4\xbey\xa4\x9c\x10\x06W\xed$C\xf6H\x11\x84\x83=\xb9\xad\
\xf5X\x92G\x8e\xe00ii;\xa6\xe6\x18\xc4\x91#\x08\xc3S\xb8\xa8\x11\xe6\xac#I\
\x10\x86\x93\x1c\xb6\xba\xf6\x11\xfcJg\n#\xf8\x920l\xa8\x0eC\x1f\xc1\x0f9\
\xb3\xdd:\xb0h\xc8\x01\xdb\xfe0\xc2\x0f\x8c\xfb\xf0\xb0C\x08\x81m\xdb}G\xadA\
,\xd5!6+\x8c\xeb\x9ch\x88\xce\xf3\xe8\xb3L\xc4\x08~`\x99\xed\x98\x1b"\x822\
\xa9\xfa\xeb\x08\xa0^\xafG\xaf#\x82\x17/\xde?\xf9\xcf\xe2q\x8c\x11\xc1\x9e\
\x1e\xe5~@\\\x08\x88\x08\x9e\x1b\xe1/v\xd4\x11\x11|\xe8~\'x\xbf"\xb6\x8a\x1e\
\xf7\xe0\x91\xc41\xc1\xa3\x8e\xfb\x9e\xe0\xa1?M4\x1a\r\xb6\xb6\xb6\xfa\xde\
\x1b\xd4\xbf\xf4~\x8f\xff\xcc\xe5B\x1d\xef\xa1\'\x18\x04\x01\x9d\x194\r\x11\
\xc1Q6\xee\xc3\x02\xc7q(\x97\xffY\x9f9\xeet\xf7\x1c\xd8S_\xf4\xfe\xc5\x9d\
\xfb\x0e}\x0f\xf6`\xdb\x1f\xa0Z\xfd\xe7\x13\xcb\xb5Z-<\xcf\xa3P\x08oE\x89\
\x16\x99\xb7\x0ei\x0f\xee\xee\xf6\xd4\x11\xab\x13J\xc2\x8b\xbe\xcf\xfa\xfaz\
\xdf\x1c\x8d\x08\xde8\xa4\x04{\x98\xa43\xba\xae5\xbf]\xab\xe1\xfb>\x9f\xfe\
\xf4\xbe\x8d>"\xd83v\x1eE\x18c\xf8\xfc\xe76i6\x9b\xfc\xc8\xe5<O|x\xdf\x90\
\x14\x11\xbcvm9\xfe\x9cY\xa0\xd9l\xf2;\xbf\xb3\x81eY\x94~\xaeH<f*"\xb8\xa8ty\
iq\xfb\xf6h\x95 \x84W\xbc\x94\xcb\xe5\xc8\xaa\xfb\x90\xec\xd7LD\x04\x83\xe0\
\xd596s>\x88\xaf\x96\x85\x9f*\xe2\xe6r\x07\\\x89\xa2m"\x8d\xd5t\x19\x18\xecA\
\xcf\xf3"\xddK.\x97\xa3\xf8\xd3\xe1\x1d\x1a&\xf6?\x1c\x01Y\xb4\xb7M\xc4\xe1y\
\x1e\xa5R\tc\x0c\xae\xebR\xa9T\x07J\x0c\x99\x83\xb3\xe46\x9a\'z\xedZ]\r\x1b\
\xedy\x1e\x95J\x85 \x08\xba\x1eP\x95\xfd\xb2C\xbe\xdf\'\xc9\xbc\xe8\xfb\x87\
\xd6\x00\xf3\xb5\xafy\xfc\xd6o\xd5\xf8\xeaW;x\x9e\x17\x89n"v\xfb\xd0\xbe;\
\xdf>\xd5>\x82O\xb9.\x9dN\x07\xd7=<\x0e\t\xbd\x1el\xb7\xdb\xd1\xa5X\xb6mS*\
\x95\xb0\xac\xf7\x8f\xf8\xd6\xfe\x10= \x8b\xf6&\xae\xe38\xd8\xb6\x8deYX\x96\
\x15y\x11\xc9\xae\xdf\x8aX\x13\x88U\xc1)!8\x1d\xb3\xd5\xad\n\xc1Z\x02\x87\
\xbc\xf8\x94\xd8\xe9\xbe\xde\x8d\x1dwn\x1b\xc3\x1dc\x0eXnm\xdb\xe6\x17~\xa1\
\x84m\x0f\x1fi\x83N\x99\x11\xc1z=\xbc\xec\xb4\xd5jF\x02k\xdc\',\x8dy-^6\xad\
\xb7\xe1\xa8\xcf!\xbcK\xff\'\x9e)\xf1\xf8\xe3\x03#L\xc4\x88\x99\xfe\xd0\xe8\
\x03=X(\x14)\x14\xc2\xb0\x80o~\xc3\xe7\xc6u\x85R\xd7\xd0\xfa:z{\x17\xb3s\x1d\
\xd3u\x92\xeb\xfd\x9b\xd4\xb0i\x11\x8d\x16!\x90RR*}b\xcc\xb0\xec\x12\x13#z\
\xb0\xaf\x94\tc\xfe\x9ez\xd2\xed{o\xf8\xeb\xf0BSc\x0c\xf7\xba\x1f\xfc\xe0c\
\x0f\xf7=\xf2\xd6\xed;\x0c\xe2\xcc\xe9\x98\x9b\xe5\xc9\xd3\x9c_\xbb\x03\x0f\
\x9eC\x9c\x8e\x9d\xed\x00\xc4\x1e\x98\x15\xb41x^0!n\xa2{>\x1c\xb6\xc8X\xef\
\xb5PWU\xd8x\xd1W\xbe\xff\xbdA\x92\x84a\xe5\xf1\xa1x\xfd\xbb`?j!/\x08\xe4\
\x88!:\xf8\xc8\xc1\xcf\xa2\xd7z\x17u\xdd\x10\\Q\xa3\xc9\x1dx\xd8\x90E\xc6\
\xfd;.\xed\x9bm\x8c6\xa3\t\r\xb6(\xfe^\xacrc\xc0\xffF\n\xe1}\xb8\xbbv*\x18\
\x03B\xf4\x1a4D\x921f\x97\x8f\xe5r\x98\xde\x87\x83\x15\x0e\x92\x13\x1c\x1c\
\xae\xd3\x98\xf93t\r0\x86\xfd\xf6w\x11\xf5`\xdb{\x89B\xce\xe5G\\\x97\xaf\xf7\
"9G\x8c\xa3\x81\x11\xda\xf7\xbeeK\xc4\xaa\xc0\xc4E\xac\x95n\xa1\xbd\xfd\xf7\
\x94\xd2\xfb\xcf\x98\xd0{\x97,\xc1\x1e\xa2\xeb\xfd;\xa1\xb0\x19\xb1\xc8T+\
\x9f`\xab\xfe\xc7\xe4]\x07\\\xf8\xba\xd7\r"\x1aBrT\x15\x96%\x0fd\x8a\x1c\x05\
\xb1*\xf0\xaft\x87\xf1\x98!\xea<na]\xda\x97V|_\x8d\xbd\xe5$\xdc*\x86\x0cQ\
\xad^\xa3\xf6\x99O\xd2~>\xe0\xfbl\x87\x1frs\xfb\x95\x0f6nL\xa3\x11\xc9\x82\
\x93\xad\x0bb\xff9c:E^\x18\x14\x1aF\x975t\xa3\x04b\x86\xa4\xbem\xc2\x7f\xe9y\
~\xf5W\xca\xf0\xef\xb6(\x16\\\xce\x9c\x16t\xbe\x12\xd3I\x8a\xfd\x07\x8d\xae%\
Y\x96U!W\xc9\xb96\xc6\xec\x8dL;+\x05\x89\xaf>\xe8\xc5\t\xdb\x8f98\xdf\xff\
\x01\x1a\x8d:0d\x1f\xbc\xfa\xb2G\xf5S%\x82_\xff\xf7T~\xf1\x1f\xf0\x93\x85<\
\xcf\xb5;\xe1\xe6\x1d_PF\xb6|\xa0\x07G\x116+\x08\xd9M\x949\xa2L|M\x1cWe\x18\
\x1c&\xb0\x7f\xc0\xc1~\xef%\x94\xba\x16}6\xf4<\xa8^\x0b\xa8\xfd\xea?\xe1\xb7\
?\xdfd\xdb@\xb1X\xc0z\xef\xc0\xdc\x1a\xd7\x8d\xbd\x06\x8f\xeb\xcd\xde\x1fbL\
\x99a\xdb\xf1\xe0"c0XR\x92\xfbH\x0e\xeb\xdd\x16\x1d\xffUj\x1b\xbf\x1b}>R\xf1\
k\x8c\xe1\xb3\xff\xba\xcc[\xdf\xd9\xa0\xfc\x89g\xc8\x7f4\x87R\xdbt:_\x1d/\
\x8a%%gV\xa2\x00\x0e\x13\xfd7\x0e\xfdW\xc0\xf7\xe6\xda\x93O>\xc9\x85\x0b\x97\
\xd0Z\xd3l|\x81\xcfn\xfe[nh\x15\xa5\x9f\x1e\xab\xd9\xb6\x1e\xb1)\xfcx\x9e \
\xb8\x8e\xf7\xd2k\x94>\xfea\n\x85\x02\xde\xd7:\x04\xaf\xaa\x03\xe2\x9b\x19r\
\xfa\x1e\xde\xd6\x15\xb4\xdeAG\x01\x87\xc9\xbe\xd6\xa5\t\x80\xeb>\x85m\xbf\
\x1fcv\xf1\xff\xd2\xa7\xf1\x85:\x8d\xffX?\xf0\xc7\x1f{Q\x80e\xd9\xc8\x87$\
\xee\x07\xc3\xe1\xf9/6\x9a\xfc\xc3\x9ft\xc9}\xd8\xc5}\x12\xd4\x1b\x1a\xfd\
\xd6\r|\xff\xc5\xe4-\x84\xc4+m\x1c\x8ec\x03\xa7\x91\x0f\x9dC\x9e\x0bW\xc9o+\
\xc5_\xbfz\x15u\xe5e\xec\xf7[CG\xd6\xd8\x1e\x94\xb6\x83\xb9m0f\x97@i~\xbfV\
\xa4\xd5\xf1\xf9\x97\xff\xa6\xce/\xfd|\x01\xe7\xfd\x17\xb0\x1d\x89e]\xe4-\
\xfd:7tB\xf1,>|\xc7\x8dv\x0c\xae\xf3\x14\x0f\x8aS\x9c9-\x90\xe7$\xc6\xec\xa2\
\x94&P\x8a\xc0\xf7\xd0\x13\xea\x1cI\xd0\xcd]\x8e\x1cj\xaa\x1b_\xc4\xfd\xe0{p\
\x9c\x8b8\xb6\x8d\xfe(\xfc\xb7v\x87\xf6\xa9\xf3\xe4?\xf2\x04\xd6\xc3\x92G,\
\x9bG,\x9b=\x0c\xechVV\xc6\x8f;\xa5\xb7\x0f\x90s\xa2T\x04\xa7\x01\x90\x0f\
\x9d\xe3\xfc9\xc1\n\xdd\xebo\xbd\x00\xad\xc2P\xf4I\xc4\xc6\x12\x0cO\xf1\x8f!\
\x85\x08\x1b\x02p\xe2\x0c\xa5\xf2&\x85B\x8er1\x87\xc9;\xb4\xda\xcf\xe3}\xe5K\
\xc09,\xfb1l\xfb\xbd\xd8\x96D\xac\x85!B;\xc6\xf0\x9a\xd2\xbcm4f\xd7`v\x0c\
\x86\x93`n\xb1\x7f\n\x91\xbc\xcf\xb69\xb1\xba\xc7\xda\xc95\xc4\xa95\xb4\xd9\
\tO\'\x06\xbe\x11l\xa3^\x7f\x01u\xe5\xe5\xc4\xa4&\x12t>\xe4v\x89\x86\x04k\
\xd5\x9f\xa7\xfe\xc56\x08\x83\xebXH)\xc2\xe0du\x05cn\x03\xb7\t|E\xe0\x83\x94\
6B\n,\xcbA\x9c\x16X\xe7\x04g,;<6u\xd7\xfb\xf8\xe9\x1b\x11\x1e\x89\x8c\xd9C\
\xbd\xb5\x8dR\nmv0\xfaM\xee\x99\xab\xdc1w\xbauL\x87\x03\x04\x8b\xc5"\xf2a\
\x0b\x10t\xbc\x00\xa5\r\x8eX\xa5\xf01\x07\xe7\xf10#\xa4\xd2\x86\xe0u\xcd=\
\xfaeB)\xa0\x90S]\xe1#<\x9c\xee\xec\xbc\x1b\xa5\x1f\xe4\x8a9\xcf\x8e\x11 N\
\x02\xa0\xd4\x9b(\xb5M\xf0\xad\xab\xb8\x1f\xfc~\x8cy\x03\xe0\x00\x99\xf8=m\
\xd3d\x0c9@\xb0\\\xf9e0\xa0nh\xbe\xd0\xec\xf0\xae\x0b\x92?\xfb\x1f\x1d\xce\
\x9f\x95\x94\x8a\x7f\x0f\xa5\xb6\x81m\xcc\xf6\x9b\x07Ln\x83\x19C\xc2N\xfb\
\x0e\x17\xd6\x00^\xef+\xeb\xa9\x80\xcazh{\x1f{\xed\x98\xb3\x7f\x95\x99%\xc2\
\x8c!i\xae\xad\xe9#X\xa9Tp\x9d\x1f\xc6\x98=\xfc\xe7C[\x85\xd91\\\xbf\t\x1f\
\xfb\xe8\xa3\xe4\xdcGi\xff\xef\x00\xee\x19\x02\x7f\xb6T\xcf\xaek/6c\x88m\xdb\
\x94~\xb1\x826{\x08\x01\xc1\xb7\xae\xf1\x83\x8eM\xc7\xfb+N\x9f0\xe4\xdd\'\
\xd0\xc6 N\xad\xe0{\x1e\xf0\xdd\x99*\x86\x05\xa7\xe7\xabT>\x85|\xb7\x8c4e\
\xa5\xa7s\x04Jc\xcc=~\xfa\'\xfe.\x96%Q\xca\xa0\xf5\xb7P*\x98{\xc3F"\xa5jc_\
\xe9d\xff}\x00\x94\xd6l\xfda\x0b\xe71\x9bB\xde\x01\xe3\xe0:\x97\x08T\xa8\xf4\
\t\x82o\x0f}\xd04)Qf\xc9\x18\x92\xfa^5\xbd\xb3C\xfd\x8b\x1d\xacw=\x88\xfd\
\x036\xdeK\x1e\x9d\xff\xd3a\xe3_}2\\\xc2\xdf0\x88\x13\x06=\xa6\xf7\xd2\xa4D\
\xc9"cH\xaa\x94(\x7f\xf2_\xfe\x8c\xdd\xbb\x06yVb\xd9\x0eR>\xc2\xfb\xdew\x8f\
\x9c\xfb(J\x1b\x84^\xe1\xffu\xdaS5d\x18\xb2\xb4(\x8f\xbbW-"\xb8k\xc2\x105uSc\
\x8cF\x9c\xb5\xf8\xba\xd2x~\xb8\xaf\xa9\xe0\xa5C\xe9\x89\x91\xfc^\xb5\xbb\
\x86\xaeV\x03\xad\x15\xea\xaa\xc7\xee]\xc3\xaf\xd5\xb6h\xfe\xd7\xff\x8b\x1f\
\xbc2\xb1\xb2V\xab\x95\xb8aYh\xf7S\xdd\xabf\xee\xf6\x7f \x1e\x10\x88\xb3\x02\
\x81\xa0\xf9\xa5\xff\x8c\xd7nL|\xd8\xb4\x19C\x16\x94\x12\xa5\xffO\xbaz\xd6B \
0F\'"\x07\x1c\xca\x8c!Cu2B\xc8\xc8\xc6\xf7\xf2\x0b\xedE\xb6\'s\x0c=M\xacv\
\x87\x98\xd6\x8a\xab/\'\x17\xc9\x8eD.\xec^\xef\xe9\x9b\x1a\xef\xb9dC\xb3\x87\
#\x90\x0b[ \xba\xa2\xfb\x8b\xff\xab\x11\xdd\x133\x0f,%\x17\xb6\x94\xe1\xc5PZ\
\xab\xe4\xfa\x95\x19\xb0Pa[\x08\x19\xb9d\xf8\xde\x97\xe6^\xf1\xb4\x98\xfaf\
\xbc\x9e\x856\xf0;h\xf5Z\xa6\x8d\x1a\x85\x85\xe6\xc2\x16B\xa2\xb5\xc2\xf7\
\x16\xb3\x8f-*\x17v\xdf*:\xee\xa4\x905\x16\xe5\xbe\x19\x11\xdc1\x86\xe0\x1bG\
\xe7\xa2\x80\xa4\x88\x08\xbe\xf2Bk\xe6ma\xda\xf4|\xf3D4\x07\xd3H,\xa3p\x9c\
\x9eo\t\x98\xdb5\xd4B\x80\xd7i&\x12\xdd\xc6\xa9:\xb4\x86\xa4\xb7\xde\x0ek\
\xf7\\"_\\\x07\\GP*d#\xa9\x98\xaeF{\x9a\x142\x99\xbb4\xe7\xba\xe4\xb2\x84\
\x10a\x12\xf1i\xa2p3\xedA!\xc0\x89\x91k\xb7}\x9a\xad\xc9\x8bW\xcf\xdc<\x08)\
\xc3,!=s@\xce!u\xde\x97\xccS\xa2\xf4\xa04\x89%\x1577"\xeb\xa4\n\x93i\x14\xf2\
!C\xcb\x12\x08vF\xba\x9d\x0cC\xc6\x19$\xf7_\x07A6\x1b]\xef\xde\xd0\xfd:\x92\
\x93\x83\xacS\xa2\xc4^g\xb9\x91\xcf\xe2`\xbb\x90\xf8\xc1\xdaz\xe1\x80\x0f\
\xdb\xe6V;u\xb2\xc4i\xb0\x10\x82\xe5R\xfe\xc0{\xcf(\xbd\x10\x82K\x8b|Y\xcd\
\xd2Qt\x0c\x16\x92\xb5g\x1e8TY{\xe6\x85T\xd6\xa5\xcc\x11\xcb\xda\xf3\xa7_\
\xf6x\xe2\t{\xff3ch\xb5g?\xbd\xf4\x90\xc8\xba\x949bY{>\xf9\x99t\xfa\xd54XZ\
\xd6\x9eY\xf3\x0e\xc61*zf\xe1Y{\xfc \x00\xc2E\xa6T\xca\xd1hv\x12i\xcd\xc6\
\xd5\x97\xff\x88C\xb9X\x8e~\xdf\xf8\x0f\x9b\x04W\x92\x1f+2\x1d\xa2\xcd\x96O\
\xb5\xd2u-\xb6$\xadF\x95f\xcb\x9bZ\x12\xb9dI\x9e\x8e]\xaa\xec\x07*\x159\xc8:\
\xc9\xa9\xd2ln\xb5Y\xaf\x86\x8e\x08\x96%\xfbR\xad\xcf\x8a\x8d\xdfK\xaf-\xc8|\
\x91\xd9\xaa\xb7\x11\x02\xaa\x95\xe4\xde\x16\x93`\x8cac\xb3\x95\xc8\x1a5\x88\
\xb9\xac\xa2\x9b[m\xea\x8d\x0e97\xf4m\x9b\xe5\xe2K\xa54\xad\x94\x1a\xf08\xe6\
\xb6Mhmh\xb5}`~\x1b}\x12\xcc\x8d\xa0eI\xf29{\xe6\xed"\x08t"\xad\xc0(\xcc\x85\
\xe0z\xb5\x90\xe9\xe2\xb2^-P\xad5\xe8L\x91^:\xf3\xd3D\xd6\xe4 \x1c\r\xf5\xcd\
2\x8e\x93,.*\x8eL{pp[H\xaat\x1a\x05\xdb\x96\x94K\xb9(\xd4\xb5V-P\xaa\xd4S=#S\
\x82\xc5\xc2\xfei\xc2\xf3\x82L\xccc\x9dN@\xa3^\x01B\xb3\xb7\x94"\xd5\x8a\x9a\
\xe9\x10\xfd@L-1K\xcf\xc5\xd1\xf1\x02\x82\x98j{0\x8f\xe1$dJ0~J\x9fv\xdf\x1a\
\x86Y\xb2\xa2\x1f+\x9d\xb2\xc02\x95N\x99\x1e\x97\xe2W`\x14\x8b\xc5\xb1\xf30>\
\x9c\'\xd5\x17\xbf!\xbbR.\x8f4\xc2\x0c{\xce\xa1\xbfOfV\xdc\xf7\x04\x172\x07\
\xe7\xadt\x1a\x87\x85\x10\x9c\xa7\xd2i\x12\xe66D\x87\xe4\xf7\x9e\x1a\xb3dR\
\xc8\x94`|us\x1c14\x8byZX\xd6J\x9fY.\xad\xfc\x90\xe9\x10\r\x14\xf4\xbc@\x85 \
\xb1\xd2iT\\\x88\x10\xe0\xd8\xfb\x976j=\x833^\x160\x06<\xdfD6\xfa\xac\x95N\
\x1d\x7f7\xf5w2\x9f\x83\x9e\x1f\x92\xcc\x1a\x1d\xcf\xa0T\xfa\xe0\xe6L\xadKqH\
)\x12+\x9d\xc6\xfa\xc9\x98\x15\x94\xda;04\x97b]\x1a\xb4\xf6\x04\nZ\xdd\xab\
\xc2\xc6AZ\xf91\x9f\x8e\xee\xb5$\xd6\xa5\xb9l\x13\xe3\xac=\x8b\xae/\xd3\x1e\
\x94\x02r\xae\xa1\xecm\x8c-\x97&\xac@J\xc8\xbb\xa3cx\x17j]\n\x82v\xe6a\x05\
\xbd\x18^!\xc0\x92a\x0co\x0f\x0b\xb7.\xa5A<\xac\xa0\xd9\xac\x0f-\xe3\xd8\x92\
r\xb1\xdf\xdf\xad\xddn\xb3\xb9\xd5N\\\xcfR\xaf\xa1^hX\xc1Q\xc1R%\x99\xb4XhX\
\xc1\xa2\xb1\x94\xb0\x82Eb\xe1a\x05\xf7+2%x\xa8\xc3\n\xb2\xc0qX\xc1\x120\x97\
U\xd4\xb2$\xd5r\x9e|\xde\xc9\xc0\xc2\xab\xa87:\xd4\x1b\xd3\x85\x1deN\xd0q,\
\x1a[\xe5\xcc\x12U\xd9\xb6Em\xbdH>\xefPNi\x1b\x84y\xb8\x91l\x94\xfa\xc8\x8d\
\x0ba\x9d\x04K\xee\xdf\xfe\x9c\xcf9T+\xf9Tr(dL\xb0\x90w"\xfb\x9d1\x86by\x0b\
\xdf\x9f-T\xf6wkE\x9e\xedfY/\x97r\xa9\tf\xbaM\xb8\xae\x1d\xbdn4;3\x93\x03\
\xf8\xcd\x8df\xb4\xfdH)q\xecK\xa9\xbe\x9f)A+64=/\x9b gcBsx\x0fBN\xce\xfd\x12\
\xc7B\xccg\x96%\x0f\xac\xa6\xf1F\xcf\xd3|\xb6\x10a\xbb\xd5\xa8\x1e\xd0ron\
\xb5R\xcf\xa7i\xb0\x10Yt\x98\n\xdf\x1e\x92\xac{\x1e8\x16\xb6\x8f:\x16Bp\xd8)\
C\x9b\xc5\\\x9f\xb4\x90E\xa6T\xde:\xe0\xc0\xd3\xee,F)<\xd7\xb8\x89\x1e<_\xe1\
e\xb0\xe9O\x83l\x0f\xbc\xb1\xd7\xb6Lo\t\x1a\x85\xf8\xfd\x86i\xcf\xc9\xd9Zxc\
\x9dd\xdb\x82j%?\x93;\xb3\x10\x90\xcf\x9d\xec{/\xed\xcdg\x99\x9b\xcfZ\x8dO\
\xe38\xa1\xbcX\xad\x142uN\xf7cQ\xa5K3\x9fy\xc1\xc1{F\xb3\x80\xd6\xd0\x19\xf0\
<Y\x8a\xf9\xcc\x18\xc8\x177h4:\xf8I#\xfc\'<\xcf\x0f\x0c\xcd\xf6\xe8\xd9\xb7\
\xd0\xe0\xac^e\xeb\x1b\xc9\xe3\x07g1\xf6,\xd4|\xb6\x08\xabT\xda\xfa\x96f>[T}\
\x99\x0e\xd1#qq\xdc,8\x02\x17\xc7-\x0eK\xb98n\xd18\xb6\xf0\x0e\xc1\xb1\x85w\
\x00\xc7\x16\xdey\xe1\xd8\xc2\x9b\x11\x8e-\xbcipl\xe1]\x022_EKE7\x81,jh4;\
\x89\x87i!\xef D7\xb1M\xca0\xd7L\t\x96K9j\xeb#n\xd8\x1a\x80c[\x89\xce\x8c\
\x83!\xb3\xeb\xb5\x06\x8df\xf2\xa0\x92l\xadK)\xc4\xc6|\xde\x81\x8d\xc9\xf5\
\x95\n\xfd\xba\x8fR\xa9\x88\xe3\x0e\xff#.\xcd\xba4\x0c\x96%i7\xab(\xa5\xfb\
\xcccqH)f\xd6\xed,UT\xb3m+u\xc8jZ\x1c\xa9\x8d\xde\x98~\xddk\x12,\xad\x07{N\n\
Z\x1b\xaa\xd5\xc1|\xf2\xa3\xbe\x93\xbe\x9e\xa5\x11\xf4\xbc rR\x98\xa7Ts\xa4\
\x86\xe848&\x98\x06i\x16\x80\xf6\x14\x17oL\x83l\xaf<\xd2\xd0h\xeeD\xb6\xc1\
\xcd\xcd\xcd\xd1egp\xf1J\x83\xb9\x05g\xcd\x1bK\xb3.\xd9\xf6\nR\xec\xd1n\xb7\
\xb1-\x8b`\xc8\xb8M+l\xf7d\x01c\xfa\xa3L\x93X\x97V._\xbe\xbcg-\xc8ge\xd1PJ\
\x85=\xa8\xd2\x8a\x07G\x08\x7f\x0b=b^\x89\x1b\x0f\xb9\x8d\x00\x00\x00\x00IEN\
D\xaeB`\x82'
def getscanprogress06Bitmap():
return wxBitmapFromImage(getscanprogress06Image())
def getscanprogress06Image():
stream = cStringIO.StringIO(getscanprogress06Data())
return wxImageFromStream(stream)
index.append('scanprogress06')
catalog['scanprogress06'] = ImageClass()
catalog['scanprogress06'].getData = getscanprogress06Data
catalog['scanprogress06'].getImage = getscanprogress06Image
catalog['scanprogress06'].getBitmap = getscanprogress06Bitmap
#----------------------------------------------------------------------
def getscanprogress07Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x1a\x9aIDATx\x9c\xed\x9d}\x8c#\xe7}\xdf?{:\xdd>{\xb7\xba}\xee\xa4\xe8\xe6d[\
\x1a%\xb15R\nkl \r\xed3\x12:A\x1a\xa6E \xba(l"\r"\xfeU\xac\x15\xb4\xa0\x83\
\xb4\xda\xa6@A\x14\x05\xb2u\xddt\xd5\xa0\x0e\x0b\xa4(\xf3\x87[\xaa@\x11\xc6@\
`\xa6u\x11\xbaH%&\x8dk6ut#E\xb6\xe6\x14\xe9\xee9U\xd2=w\xba\xbb}\xf6\xa4\xbb\
\xed\x1f\xc3\x19\x0e\xb9|\x19\xee\x0e\xc9\xdd\xf3~\x81\xc5\x0e\xc9\x99\xe7y\
\xbe\xf3\xbc\xfd\x9e\xdf\xcb\xf3,\x9c;wn\x9b\xbb\x18G\x01,\xcb\x9aw9\xa6\x02\
\xa5T@\x10\xa0^\xafG?\x94\xcb\xe5]%\x98q\xc1\xb1\x05\x00\xad\xb6\xa1PL\x96\
\xce\xb8\xfc\xf2Y\x81\x94\xc1u\xa3iPz|:\xf9|\x1e\x80#\x89J\x90\x10"vmLz\xe9\
\x9a=$vt\xfc-{Gy-\x87c\xf7v\x83\x8dJ\x93V\xdb\x9fz\xde3!X,dw|\xf79\xa5gB0\
\xd5&:\t\x16{\x1a\xf4\xf407\x82\xb3\xc2]O0\xb5>X.\x97\xd9(\x17\xb0m\x17\xe8\
\x9dv\xbe\xf1Gm\x1e\x7f\xdc\xee\xdel\x0c\x8df{\xcf\xf9%A\xaa\x83\x8c\xeb:=\
\x9f\x1d\xc7\xc1\xf3<\xbe\xf8\xa5Z\x9a\xd9D\x88\x93,\x97\xcbQ~qL\xad\x89Z\
\x96\xdc\x91\xd9n!\xc4\xe8\x01)$:(\xbfT\t\xc6%\x8cB!\x83\x94{\x1f)-k!\x92b\
\x00t\xdf\x9c?\xae\xa9\xa6\xdaDKk\x1b4\xeb%\x84\x10\xd8\x96\xa4Q+Qo\xb4\xc7J\
"}-;\x82\x10\xe0\xd8\x8b\xd1g\xad{%\xa4$\xfdp\xe1\xdc\xb9s\xdb\x96e\xf5\x0c\
\n{\xc1j1\xcbZ)\x97JZ\xfd(>S\xa5\xf9B\xb2f\x9f\xcf\xe7{\x85\xed\xb4P\xa96\
\x11\x02J\xab\xe9\x914\xc6\xb0\xbe\xd1HL.\x8e\xa9\x88j\x1b\x95&\xd5Z\x8b\x8c\
kc\xdb\x16c\xc6\x88\x91PJ\xd3hz\xe8\xfe\xce\x97\x10S\x93E\xb564\x9a\x1e\x90\
\xceH\xba[\x1cJ2\x93\xc0\xb1\xcfR\xad<\x8de\xc9\x91\xf7i\xad)\xfdz=Q\x9fr\
\x1d\x8b\xf5\xf2\x17\x10r\t\xad4\xabk5\xd4\xb0\x15\xef\x00\xa4Z\x83\xb9\xdcc\
c\xc9\x01H)Y\xfd\xe5L\xa24K\xa5\x1c\x8es\x16\xdb\x92\xb8\xaeM!\xefNT\xa6\x99\
\xac\x07\x07!\x93q\xc8\xe7\\\xfc\x11\xb5\xe1\xd8\x92lf\xc8$\x99\x10s#\x08\
\xb0\xb1^\x98z\x1e\x07n\x90\x99T=3\xd7\x1alN8\xbfy\xbe\xa2ZkN\x94\xc7\xdc\
\x08z\xde%\x8a\xa5\xea\xd4\xf3\x19Hp\xb7z\xd1aB\xf3 h}u\xcf\xf9\xf5cP:\x07\
\xae\x0fN\x8aT\tN2\x00\xa4\xa9\x18\x1e\x85T\xfb\xa0\xe7\x83\x14&\x12\xae\xdb\
\xed\xc1\x92\xca\x16\x86\xdf\xfe\x0f\xcd4\xb3\x1e\x8a\xd4\x07\x99V\x8cS\xb9<\
\x1d]\xcc$\x98\xda(*\x04\x14\xf2n"\xd1m\xd4\xe0\xa45\xf8j\xf7\xe5\x98\nA\xd7\
\x01\xd7\x11\x14r\xe9H*\xc6@\xb35\xdc\xaa4\n\xa9\x8f\xa2\x99\x0e\xb94!\x04\
\xe4b&\xb4I\x90j\r\n\x01N\x8c\\\xb3\xe9Qo\x8cW\xf0\x86\xb6\xbc~H\x19\xd8\x1b\
\xc3A+\xe3@\xa35Y\x99R%\x18\xb7\x90)MbI\xc5\xcd\x0c&\x88\x02\xa5\x0c\xb9l\
\xc0\xd0\xb2\x04\x82M\x0c\x0b\x89\xcb\x94\xaa\xea\xfe_\x94\xf3d\xdc`\x9dW\
\xa9\xa4\xa3\xa5S:\x18h\xc2\xe6)\xe5\x02J\xcfIu\xff\x13nw1\xaa\xb5\x19\xa8J\
\xdf\r\x02\xbd\xea\xce~\x9dDu?=a[,D\x99M\xdb\xc2;Ju?=\x82\xa6\xeb\x9d2M\x0b\
\xef\xb8\xa6\x9a\xea41L4\x1b\x844,\xbcI\xfaa\xaa\xcb\xa5\xddb\\~N\xa5\x88e\
\x05\xe2N\xa5Z\xa5=A\xcd\xdf\xf5\xcb\xa5\x99\xac\xe8\xa7a\xe1M\x8a\x99\x10\
\x9c\x96\x857\t\xa6j\xe1\xdd\x0fi\xa5J\xd0\xf3\xfd\xe8:-\x0bo\xf6\xd3\x0evl\
\x0e\xd5W&[R\xa4\xdaD\xeb\r\x8f\xd2\xaa\x99\xd8\xc2;\x0cg-\xc9S\xb9\xaet\xe4\
\xf9\n\xff\xc2\x1c\t*\xa5\xd9\xa84#\x0b\xafeIV\x8b\xd9\xd4\xd2_\xffrc\xe2g\
\x0e-\xbc\xbb\xc1\xa1\x85w\x86\xf8\xc1\x94dv+\x8b\xca\x8e\xeed\\\x93\xec\xb7\
\xf0\x8e\xcaOJ\xc8\xbaA\x9a\xba\xa3|\x1a6(O]uo\xdb$\xeao\x93Xx3N@R\x08\xb0$8\
\xf6de\xda\x17\x16\xdea\x82\x8a\x94\x81\x1ef/8\xb4\xf0\xee;\x1cD\x0bo\xbf\
\x9f\xe90(\x1d\x18x&\xc1\xbe\xb0\xf0NS\x830\xb7&\x1a\xb7\xf0N\x13\x07\xaf\
\x0fN\x88C\x0boR\x94\xcbe\x84\x80\xb5R\x0e)F\xaf\xc0\xd3\xb0\xf0\xceEu\xbf\
\xb6\x16d\xaa\x19\xaeJO\x13s\xf3\xba\x1f\xa5J\x9fu~\xa9\xd6\xa0\x14\x90q\r\
\xc5\xf6\xfa\xc8\xfb&q\xa7\x1c\'l\xcfTu\xef\xfb\xcd\xd4\xdd)G\t\xdb\xfbZu\
\x1f\x17\xb6\xeb\xf5\xea\xc0{\x1c[R\xcc\xf7\xca\xab\xcdf\x93\x8dJ3q>\x87\xc2\
\xf6~\xc3\xa1;e\x1f\xf6\x85\xb0=M\x1c\n\xdb\x07\x1d)\x0b\xdb\xc9\xfb\xd3\x81\
\x13\xb6\x01\xaa\xb5\x16\x96%g"l\'E\xaa\x04\x8d\x81\xf2\xfa\xe4\x06\x92i"\
\xf5Q4\x89\x0b\xa5\xd6\x86Z\xbd\x95\xb8\x99\xe6\xb2\x0eB\x08\x94\xd6\xb4Z\
\xfeD\xe5I\x95`\xb1\x90\xa1\xbc6\xc4\xef\xac\x0f\x8em\xb1\xb6>\xde\xddk\xad\
\x94\xeb1\xc1\xad\x95k\xd4\xea\xc9\xed\xfbs\xf3\xba\xcff\x1dX\x1f\x9f_!\xd7\
\xab\xf8-\x14\xf28\xee\xe0\x978(\x9d\xb9M\xf4\x96%i\xd6K(\xa5\xb1\xed\xc1\
\xf7H9\xde\xce1\x0es\x15\xd5l\xdb\xea\xb1\xbfO\x03\x07j\xa27\x06\xd4\x84\xfe\
\xdbs\xabAc\x0c\xf9b\x05\xad\r\xa5R)\xe13\x93\xe737\x82\xed\xb6\x8f\xe7\x05\
\xd51M\xa9\xe6@5\xd1\xdd\xe0\x90\xe0$\x98d\x00hN(\x91\xec\x16\xe9:\x02i\xa8\
\xd57A\x04^\xf1\x1b\x1b\x1b\xc3\xef\xddM\x94\xc7.\x90\xaa\xea~\x96\x98\x8b\
\xea>\xccT\x08@\xb7\xc9\xe5\xdc=;\xe4i\r\x9eo\x06\x1a>\xe7\xe2u\x1fh\xa2\rR&\
S\xec&I/\xe3\nl\xcb\x0c\x8dz\x99\xa9\xd7\xbdkk\xa4\xec\x8a_\xa3\xe2\xe4CXC\
\x82\x92\xe2r\xa8e\t\\\xc7\xd0\xef\xf7>\xd3\rsZ\xcd\x1a\xc5|\x11\xe8J*\xe1d>\
\n\xa3\n\x19\xdf/\xd1\xb1\x05m\xaf+\x15\xcc\\u\xef\xbavt]\xab\xb7\x12\x91\
\x1b\x97\x9f\x10\xd0n\x96\x11"XY\xd4\xaa\xff\x0e\xcf\xbf\x94\xb8L\xa9\xd6`\
\xbc\xa9\x85\xe4\x1c\xc7AJ\x89\xe3\x04\x8bE\xdb\xb6\xd9\xda2\xfc\xd0\x19\x8b\
k\xe66\xdeK\xdf\x85[\x9b(\xa5\xf0<\x0f\xadu\x8f\xf2\xca\x98@\xac\xcbt\xb6|\
\x10r\x91I\x90nx\x9dt\x10\xe2A\x10\x9f\xa4Pt(\xac\x9e\x00\xeeC\xe9M\xcc&\xe8\
\xad-0\xb0\xcd\x16o\x99;\x00\xd8O\xfc0\x88#dVN"\x97\x96\x10\x02\x04\x9a\xef\
\xfb\x1e_\xaf\xd7i\xb7\xf7\xe6\x9d\x9f\n\xc1b\xb1H6\x9bCZ\xf7So\x1fEk\x85\
\xd6\xdb\x08q\x1bc.!\xc4\x12\x00\x86\xad\xe0\x81\x0e9s\xbb\x13&wc\x1b\xff\
\xea\xe5 \x1a(\x8a\xc3\xb2\xc8\xe6\xd7(\x95-li8.\xafsS\xff\xd9\xc4e\xdb\x15A\
!\x04\x96e\xf1\xd4Sy~\xfesE\x946\xf8\xbe\x06s;\xf8\x83\x88THL\xdc\xf9\x00\
\x8e\x1c\r\xfe\x1f\x03s\xeb\x08\xe6=\x8dX\x12\x98\xed@\xf8\x11\x0b\xc0}\x9d\
\xa1\xf3\x03\x83\xd2\x9az#\x18\x85\xe5}\x82\xcc\x8fe)\x95,\xca\xe52~\xcc\x01\
>U\x82\xae\xeb\x92\xcb\xe5q\x7f\xe23\xe8\xade\xda\x9e\n^\xba \xfa/\xb8\x8e\
\xf9`\x81w\xae\xfc?\x00~\xc6\xb5A\x04}G\x10\xfc\xf7\x95\xe6\xdb\xdf\xd6\xb0y\
\x83{o\x9f\xe0\xd6\xfb7\xb9y\xe7\x08\xd7\xae\xc3\xe2q\xc1\xd1\xa3\x82\x13BFS\
\x85y\xdf\xd0hy\x80`m\xbd\x86DQ.\xaf\x8d5\x93\'&(\xa5$\xff\xb7\x0b\xe4~\xe1\
\xef\xa0\xb7\x96\x83~\xb5\xa5;\x85\x06>\xb8\x81\xfd\xc0\xfbX\x96D,<\x80w\xde\
\xc7\xdc\x1b4A\xb9\xb2S\x9a\x91Bp\xff\xfdK@X\xd3\xb1\x1a7\x9b\x18\xf3\x0e\
\xfa\xda\x9b\\\xbf|\x0f\x0f?js\xf3\xa6@\x10n\xaf\xeb\x03P*\xff6\x92\xab\xac\
\xae\x16\xd1z\xf0|\x9b\x88`&\x93\xa1\xb8ZBH\x1bm\xc0ln\x06\xc4\x16\x17\xd1\
\x97_\xe9\x16\xfa\xc3g\x90\x8bA!\x82a\xbd;\x1a\xc6Zl\xf7;\xd1\xbbA\xb41\x0b\
\x9d\xef\x97\x10b\t)\x03\xb2\xee\xc7\x04\x06xS\x19\xfeR\x01\x11\xd1\xcb\x08\
\xa0\xf2\x9f\x9a\xbc\xd8\xa8\xf2\xdcs;\x85\xfb\xb1\x04\xd7\xd7\xbfL&\xfb\x14\
\xbe\x7f\x05\xa37\xa3\xb0\xc0\x1bW/\xf0\x9e\xd6Q_\x13l\xc7\x88\x88NS\r\xae\
\xbd\x97Ul\x7f\xc2\x80\xb4\xd6A|\x05\x9d\xe8N\x83\xe9%l\xc0\xb0\xd0!+\xb0\
\xa4\xc0\xb6@\xfd\x81\xc7\xa5\xb75\xcbg\xec(\xb5\xc6\xb7<\xac\xb3?E\xb5\x9a\
\xa1X\xec\xb5\x1a\x0f%h\xdb6\x95\xdf\xa9\xa2\xdf\x97\xb4\xcf_\x0c\n.\x00\xfd\
\x0eJ\xbd\xd1y\xc3\x02\xe8\x16* \xd1Q\xf5\t\x11\xc9Z\xda\x180\x06\x11\xd1\
\x8bA\x84\xaf!\xd6\x8c\x8d\xe9|o\xa2tE\xa7e\xfc\xc8\x87$\xf7\x9f\xb8\xcd[\
\xfa"\xea\xda\x11N>h\x03&\x8a\xf5m\xfc\xf7\x16\xe5\x7f\xd2\xd5\xf1\x0c\\\xf0\
\xda\xb6M\xb9\xbc\x8e\xba.\xd0W\rri\t\xa3\xdfA\xf9\xaf\xa0\xf5;H!\xa2\xb7\
\xdd\xa1\x14\xd5PXQ\x96\x14\x9d\xdf\xba\x7f\xf4}\x1e\xf8\x17\xbe\x18\x116s\
\x11D_/\xd1}\xc9b\x99\x07\xad\xfb\xf9\xd8\xc3\x82\xeb\x97\xbb\x83\xcc\rc\xa8\
\xfd7\x9f\xd5R\xb7\xa9\xee X(\x14\xf8/\xf5\x06\xe2LGr\xe0\x06\xc6\xf8\xdc0\
\xd7\x11l\x07\xc4D\x8cTX\x98\xb0\xa6\xc2\x97d\xc9n\xccD|\x8c\x19\xf49\x0ec\
\xb0-\xab\xa3\xf4\x15\xb8\x8e\x15\xb5\x8c\xf0\xf7\xe8\xe5\x8a%\xfe\xda#\xf7q\
\xdc\\d\xeb\xba\xe2D\xa7\xd6\xe3\xa1\xb3=M\xd4q\x1c\n\xc5\x12\xff\xd7\xd3QzZ\
_\xc1h\xcd=\x9d<D\xc8f@\xd9\x021K"E(\x1cK\x1cg\xb4bw{\x13\x16\x16v\xaa\xd5\
\xccv\xe7\xb9\x85\xb0\x99\x82\xef\x87R@\x97\xa4a\x99\x07\xadeN\x9aM\xd4\xbb>\
\xe2\xb4\xdd\xa3\xa5\x8bj0\x9b\xcdR\xad\xfeg@ V\x04\x98\x1b(\xbf\x8d\xd6\x17\
b\x84\x86\x93\x0b\xbf\xf0:\xf3b\xbc\x0f\x8e\xc2\xc2R\xec\xde\xd83""-a[`\x8c\
\xc1\xf7U\xb7\xe9\x86\xd9F\xb5y\x1b\xfb\xa1{1o\xfd\x05"\xd6\xd3#\x82\xab\xab\
%<\xfff \xe8n\x19\xb4\xfe\xab\xce\x83\'b}\xa3\xaf\x85u;N\xf4!\x14\x8e\xd5%\
\x8d\xd1\x9bc\t\x0eD\x87\xa8X0\xb0m`Ap\x07\x89\x90\xb1\xa1h\x07\xc9e`\x19\
\xfbA\x89\xb9\xf6\xbd(\xa9\xa8\x89\niw\xfa\x91\xe6M\xdf\xe7\x1eL4\xcc\xc7*.V\
\x88\xee\xc5\x80n\x84\xf7\xf2\x84:\xf6\xfe4bW\xce\xa36\xd6\x19\x8b\x8c\xeb\
\xa2.+\xbc\xf3~\xf0\x8b\x10\x04S\xad\x01\xb1\x1d\xcc\xa3b\x19\xfbah_\x0c\x9e\
\x8ej0\x9c\xdf\xb4z\xbbKN\x0c \x17U\xa1@\x0c \x97&\xc2\x11\xda\x7fME\x92\x8a\
u\xc6\xe2\xafg\xdcn3\xed\xb4\xa0\xe0\xdfvg>^\x8e\xd2\xe8\x0e2[\x06_y\x18\xd3\
%7\xae\xd6L0\x1bw`\x82\xd7b\x82-\x8f\x1e\xb5-VN\x8e\xa6/\x8et\xd6v\xa2\xbb\
\xf9\x86`\x11\xc4BT\x86\xa0\xefi<\xcf\xe7\xe3\xae\xc3q!8.\x04\x99\x8cK\xdb\
\xf3\xd0JG}^\x18\x83\x11 b\x9b\x14t\tn_\xdfAN\xeb\xb7\xd0\xfa&7\xb7\x0c\x0bw\
\x02&[[\x06s\x0b\xe8|6w:\x85\xbb\xd5\xedo\xd6\x19\x89w~$\xb7\x08\xc7\x8f\xef\
\x945\xb6Yfe\xa5;\xaf\xdeAr\xcbh^~\xbe\xcd\xd3O\x17\xa3\xfb\\\xc7\xc1_V\xf8\
\xaf\xfa\xdd\x81\xcd\x98\x9e&\x17\xa5\xae.\xbf\x11\x8d\x92\xad\x17\xbe\x85z\
\xfd5\xf4\r\x03\xef\x1b\xf4\xd5\xa0\xb3\x1b\xd3\xfd\x0b1\xceud\xd4\x16\xd2I\
\x9e\xed\n\x10\xc1\xf5\xf9\xf3\x1e\xeb\xeb\xeb\xd1=\xf6\x87-\xf4\xdb\n\xadMg\
5#z\xd2\x8d\x08\x96V\x8b8\x8e\x83\xbefP\x17}\x84\x10H)\x83\xbf\x95P\x04\x1b\
\\\xd8\xf8\xf7\xc6\x98\xb1\xfbb\xef\x86l\xf8\xbb\xef\xfb\xb4\xdbm\xdc\xd8\
\xce\'\xae\xebR\xad\xd6h6\x1b\x18c\xd0ZG\xbf\xf7\xb4\x0f\xcf\xf3(\x16\x8b\
\x9c=k\xb1\xb2r\x9ac\xe2\x18\xa7\xa5\x8c\xde^H\x1a\xe0X\xa7/\x84d\x16c\xa4\
\x96b\xd7Ikp\xb3s\xbd\xd5\xf9\x1f\xfev\xd3\x18n\x19\x83\xd92\x98M3tYT,\x16\
\xa8\xd5\xaa\xd1Bx \xc1b\xb1\xc8o\xac\xafsj7\x9b\'\xed\x02"\xe1\x8b\x18\x04c\
\x0c\xaa\xcf\xdaS\xab\xd5\xf0<\x8fZ\xed\xf9\xe8\xbb\x1eYtmmmf\xe4\xf6\x8ax\
\xff\x0c!\xa5\xa4P(\xd0h|#\xfa\xae\x87\xa0=\xcc\xdda\x9fB\xf6U\xc6\xa0Vpt\
\xd4\x8f\xfb\x1dB\x08l\xdb\x8e\xfa\xebH\x82\x07\x19\xa3*\'j\xa2\x07\xady&E\
\x8c\xe0\xa3\xf3,\xc7\xd4\x10\x11\x94re\x9e\xe5H\x15\xd5j5\xba\x8e\x08\x9e93\
]\x97\xaaY"\x9b\xcdF\xd7\x11\xc1S\xa7\x0e\xc6\xfc\x97\x04\xf1\xc3\xea"\x82\'\
\x0f\xc8\x04?)"\x82\xa7\xefv\x82w+b\xa3\xe8a\r\x1eH\x1c\x12<\xe88$x\xd0\xf1\
\x83Cp\x982\xe7\xa0\xe3\x07\xa7\x06\xdf\xbd\xdbk\xf0\xda\xddN\xf0\xca\x84\
\xdbZ\xeeg\xc4\xf5\xa5\x11\xc1\xcb\x97\xf7f\xcf\xdbOh6\x9b\xd1ul\x14\x9dMT\
\xf4,P,\x16\xa3\xebHm\xe8\xfb\xaf\xcd\xa3,{F\xa8\xc2W\x97\x15\xea\x92\xe2\
\xc5\x17[=\xbf\xc7\x08\xfa\xb3.["T\xabU\xea\xf5\xdf\xef\xf8\xaf\xf5\x9a\xf0\
\x94Rc\xadR\xfb^\xf1\xdbn\xb7{l\r\x83\x10j\xb8]\xd7\xa5V\xab\xf5\xfc\x16\x11\
\xdc\xcb\x99\xef\xd3DX\xaeL&\xc3\xe7\x0b\x05\x8ew|U\x85\x10<hY\x9c\x96\xb2G\
\xc94\x94 \xc0K\x9e\xc7\x13\x1d\xdf\xea\xfd\x86\xcf~6\xcb3\xab\xab#\xef\t\
\xd5\x85\x95j\x95F\xe7T\xda\x1eQ\xed\xd3\x99t\x829\xd2D\xd2\x96\xd5l6i\xb5ZT\
*U\xe2F\xfa}\xdf\x07C\x82++\xa7\x87\xdesEkr\xb9\x1c\xd5j\xd0<\xe3\xaf\xe4\
\xc0\x08\xdb\xc7\xc4\xb1\xa1\xbf\x9d\xb5,*\xd5\xea@7\xb3\x03P\x83\x81\xa7\
\xfe\xf1!&2\xc7\tvI\x08\x1d\x00\x11\xbd5\xb8\xef\tj}\x05\x18l\x03\xcc\xe5r\
\xf8\xbeO5\x1a9C\xe7\xb3\x01\xcex\xfb\x15a\x1f\xec\xd7\xdb>\xfb\xec\x1a\xcdf\
\x93\x7f\xf0\xa5\xb5An\xc4\xd1UDpV\x11\x99\x93""\x183\x0e=\xfb\xec\x1a\xcf=\
\xb7A>\x9f\xe7\x93O\xba\x9d\xfb\xa0\xcbt@\r\x1a\xb3\xc5k\xfbP\\\x0bU)\'W\x02\
\x82\x1b\x1b\xff\x86\xe7\x9e\xdb\xc0u]\xf2}{\xae\x19\xd3\xf1\x9f\x8b!"\xd8l\
\x9fg\x01\xc1K3\xda\x8f0)B\x82\xafx\x1e_\xadTX[\xfbGX\x96E\xa9\xb46\xf8\x01\
\xd3\x1b\x97\x1f\x11,\xad\xfe"\xcd\xb6\xc7C\x96\xcdK\x9e\xd7u\xe3\x92r\x87\
\xf83+\xc4\xfd\xe2\n\x85\x02\xbfZ*a\xdb6\xeb\xeb\xc3\x83\x9f\r\xbd\xc2Aw=\
\xa8.R\xfe\xd2\x17i~\xc7\xe7\x87m\x87\xff\xd9jG\x19\x14\x8bE\xa4\x94d\xb3Y\
\xbeZ\xa9\xd0h4x\xc9\xf3\xa6.\xbf\xf6{2\xb9\xae;2\xb2;$\x17\x1f\x90z}\xd5\
\xce\x7f\x87_\xfb\xfbE\xf8J\x85|\xce\xc5{\xc5\xa7\xf5Bw}\xb5\xf1o7\xf8\xd5\
\xce\xbe\x13\xf1a\xdb\xb2,\xa4\x94X\xd6Y\x84XDv\xfc\xdb\x16\x17\x05\xc7\x8f\
\x0f\x9e\xbfn\xde\xec\xbag\x861\x83\xe1\xf2Gk\xbdc)\xe4\xba.\xf5\xfa\xf0#\
\x89B\'@\xfb\xa3\x0e\xce\x8f<J\xadV\xddI\x10\xe0\x8dW\xdb\x94\x9e)\xe0\xff\
\xfa\xbfb\xf5\x97~\x86\x9f\xcbe\xf9V\xb3\x851\x86\xd2\xaf\x04\xe4^~\xb9\xcd\
\xf9\xf3\x1e\xbe\xef\x07\x8bM\xa5:\xeb\xc9tN\xe2\t\x9d\xfe\x1c\xc7\xe1S\x9f\
\xca\xf0\xd4/d\xb1\x1f\xb20\xf4\x86.D\xe4:\xcf\xd8?\xea`\x7f\xf8,J]\x8e~\x1b\
8\xd1\xab\x8b>\xe5_\xfb\xbb\xbc\xf5n\x85\xbf\xf7\xcb9\xf2\xf9\x1c\xcd?n\xa1\
\xde\x08\x9a\xcccO\xba<\xf6\x98; \xe6\x01\xceZ\x02n\xdf\xe4\xc6\xcd[\\\xbbqa\
\xe4F\x1cB<\xc2\xc9\x13p\xe2\xf81\xce|\xe8\xa1\xa0\xe6G\x10W\x97\xf4\x8e9\
\xcf`\xb0\xa4\x85\xf3\x84\x03w\x04-\xef5\xaa\x95\xdf\x8a\xd2\x19*\xc9\x18c\
\xf8\xcd\x7fZ\xe4\xdd\xb7\xd7)\xfe\xe2\xe7\xc8~&\x83RWi\xb5^\x1c\xde\xf7\x04\
l\x1ap\xdd\':\x19\x0c9Z\xbd/(\x0b3\xfe<Am\x0c\xde\x85n\x9f\x0c\xfb\xda\x93O>\
\xc9\xa9Sg\xd1ZS\xaf}\x8d\xdf\xdc\xf8\x97\\\xd3*:\xb4q\xa4\xa8f=d\x93\xfb\
\xe9,\xbe\x7f\x85\xf6\xf9\x8b\x14~\xfe\x13\xe4r9\xda\xff\xa7\x85\xff\x9a\xea\
\xbe\xcd\xc0\x81;*H\xab\xe5a?b!O\xc5d\xc4~B\xfd$\x87\xc0\x18\x83\xbab\xf0/\
\xa8N\xc0V\x90\x91\xeb~\x12\xdb~\x18c\xb6\xf0\xfe\xc2\xa3\xf6\xb5*\xb5\xffX\
\xdd\xf1\xf2G\x12\xcc\xfe\x8d<\xf2\xb4\xc4\xfdX\xb0b\xfe\x87\xebu\xfe\xd6\
\xcf\xb9d>\xe1\xe2<\x06\xde+\x1e\xfe\xf7\xfd\x01\xcd\x86\xe0m\xbf\xbc\xf3\
\x05\x0cl\x83I\x07c\x01\xae\xfdI,\xfba\x04\xe0\xbd\xea\xd3\xf8f\x93\xc6\x1f\
\xd4h\xff\xaf\xd6\xc0G\x86\x12\x14B\x90\xcf\x1707\r\xc6l\xe1+\xcdo\x95\xf34Z\
\x1e\xff\xf8\x9fWy\xfa\xf39\x9c\x87\x1f\xc5\xf9\xb8\xc3\x9b\xbe\xe2\xaf|\xbf\
k\xc0\xe9/\xb0\xe9\xbb\x16C~\xdb\xf1\x98\xe9\xac\x14$\x96\xf5PTc\xca\x7f\x9d\
V\xfb\x15\xaa\xff\xfe+C\x89\x8d%X.\xff\xb3hC\xa9\xd2\xfa\xf3\xb8\x1f\xfb\x10\
\x8es\x06\xc7\xb6\xd1\x9f\x81\xff\xdal\xd1<\xb6B\xf6\xd3\x8fc=(\xc9d2lc\xb8\
\xaau0\xeco\x19\xc410[\x83\x19(\xa5\xc7\x0c@\x02\xc7~\x02y\xfa$\xf2\xa4DiM\
\xf3\xc5?\xe3\x85\xff\xf1MZ\x7f\xda\xe2\xcf\xbf\xddJ4\x0f\x0f$X\xa9Tp39\xa4\
\x10\xa8P!|\xe4\x04\x85\xe2\x06\xb9\\\x86b>\x83\xc9:4\x9a\xdf\xa1\xfd\xc2\
\xd7\x81\x93X\xf6G\xb1\xed\x0fc[\xa7zN\xea\xd94\x06\xb6\xdf\x0b>t\x02\x98\
\x17\x16\x82\x01\xc2\xf3|\x84\x90|\xc4\xb69\xb2\xb8\xcd\xd2=K\x88cKh\xb3\xd9\
\t\x9e\x04\xfd\xee[\xf8\xaf\xfb\x9c9\xf1*\x99G\xdf\x86\xab\x82\xd3+Y\xfe\xe6\
\xcff\x07TJ9\x19A\xe7\x89`\xf4\xb3\xac\x80`\xb9\xf4y\xaa\xcf7A\x18\\\'\x08}k\
{\n\xa3.`\xccM\xe0&\xbe\xa7\x90\x80\xdaz\x02\xb5y\n\xf8!\xc4q\x81uR \xe4}\
\xc1`s\xaaS;\x08li\x823\x05\x05\x18\xbd\x851\xdb\xa8w\xaf\xa2\x94B\x9bM\x8c~\
\x87;\xe6\r~\xda58\x0ft\xcb6\xa9\xdf\xee\x0e\x82\xf9|\x1e\xf9\xa0\x05\x08Zm\
\x1f\xa5\r\x8eX$\xf7S\x0e\xcec\xc19JJ\x1b\xfcK\x9a;\xec\\bY+\xdf\xc7\xea8.\
\x1a\x03\x9b\x9b\x0f\xa0\xf4\xbd\\0+l\x1a\x01\xe2\x1e\xe0>\xc4Q\x83~\xef\x03\
\xcc\xd6\xdb\x9c\xbd\xef\x0e\x8b\xf7\x04\x01U\xb6\xb8\x1d\xf4Q9\xa5\x8d\xe3\
\x8a\xab\xbf\x02\x06\xd45\xcd\xd7\xea-\xee?%\xf9\xc3o\xb6XY\x96\x14\xf2\x9fB\
\xa9\xab\xc0U\xcc\xd5w\xc6\x9a\xdc\x82\xd0\xa7\xb79\xb5\x040`\xff\x89\x07\
\xe2\x1f\xee\xed\xfb\x9f\x0ezV\xf4\xab\xab\xab\xb8\xce\xc7\x11B\xe0}/(\x90\
\xd94\\\xb9\x0e\xae\xfb\x08\x19\xf7\x11\xcc\xadm\x8c\xd9\xc4\xf7fs@b\x1c{\
\xda8\xce\xb6m\n\xbf\xb4\x8a6\xdbA\xb4\xe5\xeb\x97\xf91\xc7\xa6\xd5~\x85\xe3\
G\x0cY\xf7q\xb41\x88c\x0bx\xed6\xf0\xde\x1e\x0b;\xe3\x8d\xe3VW\x9fA> \xbb\
\xeb\xaf\xa72T\xeb-\x8c\xb9C!\xf7\xe3X\x96\xc4W\x1a\xad_G)\x7f\xf2\x9c\xfa0\
\xab\x8d\xe3"\x82\x96\xfd\x93\x00(\xad\xa9\xfcN\x03\xe7\xa36\xb9\xac\x03\xc6\
\xc1u\xce\xe2+\x851\xe0\xfboN\xaf4S@w\xc1\xbb\xb9I\xf5\xf9\x16\xdew\xbf\x87\
\xfd\xa36\xed\xf3m\xd6\xbfRE\xca\xa5`\x08\x7f\xcb`\xaekt\n\xb57KD5\xf8{\xbf\
\xff\x87l}`\x90\xcb\x12\xcbv\x90\xf2!>\xf2\x91;d\xdcGP\xda \xf4\x02\xff\xbb\
\xd5\x1c\x99\x98R@B\xdb\xcd\xcc7\x8e\xdb2\x9dU\xf5u\x8d1\x1a\xb1l\xf1\xe7J\
\xd3\xf6\x14B\x80\xf2\xcf\x8f\x9c\x16B)"\x8c\x7f\x1f\x87\xbd\xaa)\'\xdfW\xed\
\x03Cw\x9f\t\x05Z!\x96-\x9e-W\xf8\xecO\xfe8\xdc\xf2\'\xcat\xbf\x1c\x89\xd25\
\x80~\xd0\xfb\xb08*\x10\xcbA\x18\xb2\xef\xff%\xf6\xe9\xf1C]\xfcx\xbeb~}\xe8}\
i\x9e\x18\x12\x92\x83\xc1\xfb\xaa\xc5&\xfa\xde\xa7\x16\x97-\x04\x81\xd2\xc8:\
\x9el\x1c\x9f\xc7\xf1|\xbb\xdaWM\x08\x19EqZ\'\xef$\x96\x07\x9b\xcd&\xae\x93K\
to\x1a\xc7\xf3\xedz_\xb5\xc5\xceV\xeeZ+\xbeZ\xab\x8c/\xed.\xb1\x9b\x13C&=\
\x12e\x87u)\xac=}]\xd3\xfe\xd6|4\xda\xa3\xb0\xc7\x13C\x04\xa2\xa3\x15~\xe9\
\x8fk\x98\xcd\xe9Z\x9cf~bH\xa0\x97\x14h\xad\xb8\xa6\xa7\xeb\xbb6\xf3\x13C\
\x84\x90\x88N\xdf\xf3\xda_\x9fz\xc63?1$\x94>|\xaf\x85V\x17g\x92\xf9,\xd0S\
\x83Z+\xbcvc\xd7\x89\xed\xfb\x13C\xf6\xbaR\xd8\xd7\'\x86l\x1a\x83\xff\xf2h%\
\xea8\xec\xc7\x13C\xa2&\xfa\xbd\xef6\xa6>-\xcc\x03Q\r\xbe\xf1\xea\xde\x95H\
\x8e}\x96j\xe5\xe9\x04G\xa2$\x17\xb6]\xc7b\xbd\xfc\x05\x84\\B+\xcd\xeaZm\xa2\
\xa5V\xaa~2\xb9\xdcc\xa9\x1f\xcfW*\xe5p\x9c\xb3\xd8\x96\xc4um\n\xf9!&\xb9!\
\xd8\x17ga\x0f\x83cK\xb2\x99\xbd\xb9w\x1e\x1e\xcf\xb7\xdf\x90\xca\xf1|\xb38\
\xb5\x07vw\x16\xb6eg)\x97\xb3\x03\x7f\xdfW\xa7\xf6\x1c\x9e\x85\x9d\x12\x0e\\\
\x1f\x9c\x14\x87gaO\x02\xcf\x07)L\xa4\xa4j\xf7\x1f-\xd0\xc1\x81=\x9e\x0f\xa0\
\x15\xe3T.\xcf_\xa73\xb5QT\x88dG\xf5\xc1\xe8\xe9Ek\xf0\xf7\xa0=\x99\nA\xd7\
\x01\xd7\x11\x14r\xe9H*\xa6\xa3\xd1\xde\x8d9#\xf5Q4\xd3!\x97&D\xc7$\xb0\x9b\
\xfdDR=\xb5\xc7\xb2$\xc5F\xd7\xd5\xb8\xd9\xf4\xa87\xc6/\xc3B\xc7\xb9~H\x19\
\x9c\x12\x12\x0eZ\x19\x87\xe8\xdc\x97\xb9\x9c\xda\xb3\xb1\xde\xb5\xb5\xb7\
\xdb>\xeb\x95V"\xeb\x92\x9b\x19r\xea\xa4\x02\xa5\x0c\xb9l\xc7\x8c`\t\x04\x9b\
\x18\x02\xef\xc4$\xd6\xa5T\x9bh\xbc\t\xd5\x1b\xedTLg\xe1\x0e\xe8\xdd<v\xba^&\
\xb4.\xed\x1d\xf1\x9e7\x89\xc6z\x1cFi\xebfzjO\xbb\xeda\xdb;W\xdc\xe5\xb5\x1c\
N\xdfI\x91\x1b\x95f\xcf\x8e\xe6\xbb\xc1\xccO\xed\x19\x86b!\xbb\xe3\xbb\xcf)\
\x1d\x11\x1c\x97\x9fS)\x06~m\x04\xc1\x8f\xc3\x1c\xd3\x07an\xc2\xf6\xe2T7\xb0\
\xee\xe2p5q\xd01\x93\x15\xfd7\xfe\xa8\xcd\xe3\x8f\xdb\xdd/\x8c\xa1\xd1\x9c\
\x8d3\xdfL\x08~\xf1K\xf3[UL\xad\x89&YE\xcc"\xadT\tz\xb1\xf8\xc3B!\xb3\xe7\
\xc3\x15\x01\xb2\x9fvzN[\xd6\x13n\x0b\x93j\x13\xad7<J\xab\x9d )K\xd2\xa8\x95\
\xa87\xda\xbb\x8eR;kI\x9e\xcau\x05\x07\xcfW\xf8\x17\xe6HP)\xcdF\xa5\xc9Z)\
\xf0\x95\xb1,\xd9s\xd4\xfa^\xb1\xfe\xe5\xc9Ms\xa9\x0f2\x95j\x13!\xa0\xb4\x9a\
\xcc!(\t\x8c1\xaco4\x12Y\xa3\xfa1\x95Qt\xa3\xd2\xa4Zk\x91q\xed\x9e\x18\x8a\
\xdd@)McBw\x938\xa66Mhmh4=`\xbe1\xc1S#hY\x92l\xc6\xde\xf3t\xe1\xfb:\x91V`\
\x18\xa6Bp\xad\x94KupY+\xe5(\x95k\xb4v\xe1%\x9c\xfar)\xe3\x80\x93\xb2\xd2\
\xc9\xb2$\xb5\xca*\xf5\xa6aT,\xca\xd4\xadKB\xf4\x92\x9b\xa6\xd2))R%\x18_\xb4\
+Mb_\xb4\xdd*\x9d\x92`jJ\xa7\xe0\x9c\xb2\xbd#\x89\xd2i\x14\xa6\xa6tJ\xd3z\
\xb4\x97\r\tf\xb2\\\x9a\x96\xd2)\tfBp\x9c\xd2i\x9a8T:\x1dt\xdc\xf5\x04S\xb5.\
m\x94\x0b\x91f\xbb\xde\xd9\x99\x0e\xa6\xa3t\x9a\x8bu\xa9\xdf\xa1\'\xb4\xf6LK\
\xe94s\xebR\x1c\x96%S\x0b\xcc\x1a\x17\xcd63\xebR\xdc\xc4\x9c\x96\xd2\xc9\xb2\
\x16z$\xa4\xfeu\xefL\xadK\xa5\xb5\r\x9a\xf5\xd2\xc4J\xa7aN\x08B\x80c/F\x9f\
\xb5\xee\x95\x90\x92\xf4\xc3\x85s\xe7\xcem[\x96\xd53(\xec\x05\xab\xc5l\xa4tJ\
\x1b\xc5g\xaa\x89\xf52\xf9|\x1e\xa5\xd4\xa1\xd2iW8T:\xcd\x10\x87\x92\xcc$8\
\x0c+\xe8\xe00\xac\xa0\x83\xc3\xb0\x82\x048p\x83L*a\x05\xb3\xc2\xcccxg\x89\
\x99\xc7\xf0\xce\x1a\x87a\x05)!\xe5\xb0\x82}\x1e\xc3\xbbW\xec\xeb\x18\xde8\
\xf6\xeam\x18N\xddI\xd3I\xcb\xbbqf\xc1YB\x04\xab\xf4Vcm\xcf\x16^\xad\xc1\xf3\
\r\x9e\xbf\xbb\xe7S\'(%\xe42\xa1Mo\xef:\x19)!\xe3\nl\xcbLl\x1b\x84)\x10\x0cw\
\xef\t1J\xd6\x0ca\r\x89\x17\x88\xa7cY\x02\xd71\x0c\x89\x16\x1a\x8a\xd4\r\xa0\
\xf1\xb2\xe6\n\x1b\xd1\xe6p\xa30\xaa\x0ff\xdc\xc0\xca\x0b\xc1\xff\xb67\xd9\
\xf0\x9b\xea4\x11\xefn\x9eo\x12\x91\x1b\x87VL\x01.D\xb0o\xd4$H\xd7\x00\x1a\
\xcb|\xd2M\x16GA\xa9n\xad\xedy\x7f\xd1i\xc0\xb2\xe4\x8e\xd1t\x12\xc7\xf2\xbd\
`&\x04\x1b\xb5\xd2\x0e-\xf7F\xa51\xd1\xdeL\xbb\xc5Ld\xd1A*|\xdb\xb2\x06\xdc\
\x99>fb>\x9b\x06\xf6\x95\xf9lZ\x98\xab\xf9\x0c\xb1\x10e6h\x95\xa1Mz[\xbb\x8c\
2\x9fMo\x901\xdd\xbd\xec\x0b\xc5J\x8f\xdf5@\xb3\x95N\xcd\xce-8+>_\xb5=E;\x85\
I\xbf\x1f3\x0f\xce\x8a7\xbbO\xb86\xb5z2;\xfc\xa8\xfc\x84\x10\x14\x9a\xdd\xa8\
\xd2J\xa52\xd1\xcbJ\xb5\x0f\xc6\xfd9\xbf\x90\xcfPZ\xcd\xee\xc9\xb2dY\x92\x7f\
\xfd\x1b\xf9\xc8\x84m\x8c\xc1\x9b0$;\xd5&\xdahzx\xde%\x1c\xe7,\x10\xd8\x08\
\xd3\xb4\x13\xd6\xeb\xed\x89U\x1d\xa9\x8f\xa2\xab\xcf\xfe\xeeTN\xe1\xf2}Eyc\
\xf2\xb0\x82\xd4\t\xfa\x174\xf9b\x85j\xedO&nN\x83\xa0\x94\xa6Zk\x92+Tv\xe5u8\
\x95iB)My\xfd\xf7\xa6\x91\xf4\xc48\xd4\x8b\x1et\x1cZx\'\xc1\xa1\x857\x86C\
\x0boJ8p\x83\xcc\xa1\x85\xb7\x0fw\xbd\x85wn[\xff\xc5-\xbc\xd3\xb4.\x1d\xb8>8\
)\x0e7\x8e\x9b\x04\x87\x1b\xc7\xcd\x01\xa9\x13\xb4\xed\x05d\xe7t\xc8\xd2jv\
\xe0=Z\x1bj\xf5V\xe2f\x1a*\xe4\x8ca\xe2\xbd\xd5R\x16\xb6!\xe3v\x9d\xc8G\x1d\
\xae\xe1\xd8\x16k\xeb\xe3\xb5\xdf\xe1&t!\x9a\xed-|?\xd9\xf1\x9a0E\xf3\xd98d\
\xb3\xc9\xe6\x94\xd0\xf8\x19B&<;4\xc4\xdc&z\xcb\x924\xeb%\x94\xd2\xd8\xf6\
\xe0{\xd28\x9e/\n+\xb8\x1b\x11\x85\x15\xf4\x9f9}7\xe1\xff\x03=Q\x82\x9c\xbc:\
\xc2/\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress07Bitmap():
return wxBitmapFromImage(getscanprogress07Image())
def getscanprogress07Image():
stream = cStringIO.StringIO(getscanprogress07Data())
return wxImageFromStream(stream)
index.append('scanprogress07')
catalog['scanprogress07'] = ImageClass()
catalog['scanprogress07'].getData = getscanprogress07Data
catalog['scanprogress07'].getImage = getscanprogress07Image
catalog['scanprogress07'].getBitmap = getscanprogress07Bitmap
#----------------------------------------------------------------------
def getscanprogress08Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x19MIDATx\x9c\xed\x9dm\xac#Wy\xc7\x7fw\xf3r\xcf\xdd\xbdd\'\t$\xb3\x01\xb2\
\x13^\'P\x88A\x82\x1d\x12D\xdd~\xa0FU\x15S!0\xa5-nU\xb57T\x95\x8cD\xcbmUU\
\x16\xaa\xd4\x1bT\xd1\x0b\xfd\x80?P\xd5\xfd@k\xaaV\x18*\x8ay\x13\xae\n\xc5\
\x14\x95L(a\x87@\xc2l\x92f\xcfFI\xf6l\xb2\xbb\xf7\xdc%\xd9\xdb\x0fc\x8f\xc7\
\xbe~\x99\xb1\xc7\xf6\xbd\x9b\xfb\x97\xae\xae\xed\x999\xe7\xfc\xe7\xbc=\xe7y\
\xce\xf3\x9c\xa5\xbb\xee\xbak\x87+\x18W\x03\x98\xa6\xb9\xe8r\xcc\x04R\xca\
\x80 @\xbd^\x0f/\x94\xcb\xe5\x89\x13\xcd\xd8\x90\xb1\xc5T\x05\x1b\x86Fk\x1b)\
\x877\xb8h\xb9\xf3\xf9<\x00\x87\xd2.\x84\xeb\x81\xeb\xe9\xb4\x93\xa5\xe5\xea\
\x91\xe4\x86\xe1\xea\xf1\xb7$\x87\xeb\x81\xe7k\x9a\x8d\x1a\x96e"\xc6Th6\x9b\
\x1dzM\xe9%\xa4\xdcAO\xf8\xcefB\x10@kh4=\xc0\x1b{\xafafG\\\x9dn\x0c\x9c\x19A\
!\xa0\x90\xcf`\x9a\xc6\xd8{3\xf6\xf0kJ\x81/\'/\xc7L\x08v\x06\x9aB\xae\x90JzZ\
C\xb3\xa5\x91*\xf9\xb3\xa9\x0f2\xce\x0cFQ! \x97\x15\x18\xe3\x1b\xc3.\xa4Z\
\x83B\x80\x1d!\xd7lz\xd4\x1b\xee\xd8\xe7:Cz?\x0c\x03lK\x84\x83\x94cC\xa3\x95\
\xacL\xa9\x12\xb4"\xf2\x82TP,Uc=\x97q\x06\x13D\x82\x94\x9a\\6`h\x9a\x02\xc1\
\x16\x9a\xa5\xd8eJ\x8d`\xb9\\\xe6\xder\x1e\'\xe3\x00P\xa9\xd4\xc7<\x11\x0fR\
\x05\x03M\xa7y\x1a\xc6\x12R\xc5\x17FR\xad\xc1\x13\x99L\xf8Y)\x8dm\xdbx\xde\
\xf8ib\x1c\xb4\xd6\xc0\xee~\x1d%Y.\x97\x07\xe67\xb3i\x02\xb1\x14fV^\xcfa[\
\xbd\xf2\xeef\xa5I\xcb\xf5S\xc9\xaaCt\xd0\xcb\x9c\x1dA\xdd\x9d\xa0\x8b\x85\
\xec\xae\xcb\xef\x96*\x15\x82\xe3\x9aj\xaa\xd3\x84\xeb\xc6o\x8e\xcb\x03\x9a\
\\R\xc4\xe9\x87\x03kp\x9a\xd5\xc4$\x18\x97\x9f])b\x9a\x81\xb8S\xa9Vq\x13\xd4\
|\xea\x13\xfd^\xc3\xec\xfa`\x04_\xfe\xa6\xcb\xed\xb7[\xdd\x1f\xb4\xa6\xd1\
\x1c/\x00\xa4\x81\xb9\x10\xbc\xe7\xc3\xb5yd3\x103k\xa2qV\x11\xf3H+U\x82\x9e\
\xef\x87\x9f\x0b\x05\x07\xc3\x98~\xa4\xcc\xdeicE\xe6Pu6\xd9\x92"\xd5&Zox\x94\
\xd64B\x08,\xd3\xa0Q+Qo\xb8mI$9\x8e\x99\x06w\xe7\xba\xd2\x91\xe7K\xfcS\x0b$(\
\xa5b\xb3\xd2d\xbd\x94\x03\x82\xa6\xb5V\xcc\xa6\x96\xfe\xc6\xc7\x1b\x89\x9fI\
}\x90\xa9T\x9b\x08\x01\xa5\xb5\\jij\xad\xd9\xd8l\xd0\xfc\xaf\xe4r\xedLF\xd1\
\xcdJ\x93j\xad\x85\x93\xb1b)\x9dFAJE\xa3\xe9\xa1\xd4d\xcd|f\xd3\x84R:\xb6\
\xd2i\x96xaJ2\x93\xca\xa2F[w2\xaeI*\xa5(\xfdY=\xecS\xa3\xf23\x0c\xc8f\x824U[\
\xf94lP\x1e\x94N\xaa5hY\xc4\xeao\x86a\xb0\xf6\xdbN\xac4\x1d; )\x04\x98\x06\
\xd8V\xb22\xcdET\x1b\x04\xc7\xb1\xc9\xe72\xf8R1LP1\x8c@\x0f3\r\x16F\x10`s#\
\x1d\xbd\xe9(\xec\xbfA&\xe1l\xb1\xd0\x1al\xb6\xe7\xb7\xcc(\xdd}\x04R\x81\xe7\
\'\xcbca\x04=\xeft\xa87\x9d\xa5\x06aaMT\xa9ss\xc9g\xff\xf5\xc1\x84H\x95`\x92\
U\xd1\xa4\x06\xcd\xa4HUu/\x04\xac\x97r\x18b\xf4\n|\x1b\xcd\xa7\xff\xbe9u~q\
\x90\xea \xb3\xbe\x1ed\xaa\x18\xaeJO\x13\x0bS\xdd\x97\xcbeL\xd3 c\x0br#\xec\
\xef\x1dLk\xe1\x9d\xab\xea\xbe\\.\xb3^\xca\xa5\xba\x92\x1fe\xe1\x9d\xab\xea~\
\x16\xe4`\xb8\x85w\xee\xaa\xfb~\x1dL\\\x0b\xef0X\x96A\xb1\xe0`\xb4\x99\x99\
\xc2\xa7T\xae&J#\xd5&\x9a\xcfu;\x93\xeb\xfa\xb1-\xbc\xa3\xd0j\xf9\xd4\xaak@\
\xb0\x021\x0c\x91H}\x91j\x13\xbd-\xa2\xbf\x9c\xa6\xe6\xa2h\xb9>~d\x94\xb1\
\xfa\xec\x8c\xe3\x90*\xc1\xa8IlR%\xd1 \xc8I\xf6\x8f\xb41\x17a{\xd6\x16\xdeQ\
\x98\x0b\xc1YZx\xc7aa\xc2v\x1a\x16\xde88XM\xecw\x1cXx\xd3\xc0\x81\x85w\x86i\
\xa5+\x8bZ\xdd\xcf\xa5\xb5\x1c\xb5z+\xd6\x84?*?\xd3\\\xc2\xb2\x96\xc3\xef\
\x85B\x91!\x9b\x13\x07\xa6\x93j\x13\xf5%t\x14\xf2B\x10\xdb\xc2;l=(\x04\xd8\
\x11rJ%Wu\xa4JP\xeb`\xc7}gCl\xda\x16\xde\x96\xb7\x9d\xf8\x99\x03\xb7\x82Ip\
\xe0V\xd0\x87Y\xba\x15\x1cH2I\xd0o\xe1-\xe67\x06\xde\xd7o\xe1\x1d\x99f\x02\
\x0b\xef \x1cXxg\x85}g\xe1-\x97\xcb\x94\xd6\xb2d\xec\xf8\x1b\x80\xa6\xb1\xf0\
.Du?j\xb8O\r\x91\xfe\xb7\xd8]\xf71\x90\x96\x85w1\xbb\xee\xc7 -\x0b\xef\\U\
\xf7\xcdf3\xf6\xbdiXx\xaf\x98]\xf7\xd3 e\x0bo\xfc\x19x\xdfYx\x01\xaa\xb5\x16\
\xa6i\xcc\xc5\xc2\x1b\x17\xa9\xaf\x07\xcb\x1b\xc9w\xe5\xce\x12\x07\xc2v\x12\
\xd8\xd61\xaa\x95\x0f\x8eU\x12%\x11\xb63\xb6\xc9F\xf9}\x08c\x05%\x15k\xeb\
\xb5D\xc6\x98Tk0\x97{m,\rX\x12a\xbbT\xcaa\xdb\xc7\xb0L\x83L\xc6\xa2\x90\xcf\
\x8c\x7f(\x82=!l\x0f\x83m\x19d\x9dxR\xce0\x1cl\xa7\xdckX\xa8\xda0)\x9a\t\xdd\
\x05<_R\xad5\x13\xe5\xb1\'\x84\xedY"UY4\xe6\xaa\x07\xe8\x15\xb6\xd3\x92Eg\
\xbe\xeb~/\xe2`;e\x12x>\x18B\x87\x9a\xb5a^\xd9\xfbV\xd8\x06hE8\x95\xcb\x8b3|\
v\x90:A\xcbZ\xc2\x10\x81\xba\xbd\xb4\x96\x1dx\x8fR\x9aZ\xbd\x15\xbb\x99v\xb6\
\xd8hM\xe2\x982)\x0b\xdb\xe0d\xba\xf6\xbcQ*D\xdb2Y\xdf\x18\x1f\xd0\xa3?\xcaW\
\xd3\xdd\xc6\xf7\xe3\xdb+R\x1dd\x92\xf8\tf\xb3\xf1\xe6\x14\xdb\xeaM\xb4\xd3:\
\xe2ba\x13\xbdi\x1a4\xeb%\xa4TX\xd6\xe0{\x0cc\xbc\'\xdb8,TT\xb3,3\xf1\xee\
\xc1\xa4X\xa8\xea>)\xb4\x06)\xbb\xf9\xc5\xc1\xc2T\xf7Zk\xf2\xc5\nJiJ\xa5R\
\xccgz\xbf\xefi\xd5\xbd\xeb\xfax^P\x1d\xd3J5\xa3T\xf7\xfb^\x16\x1d\xd7TS\xad\
\xc1f\xb3\xb9\xab\x0f\nq\x13\x00\x87\xc4\xad<\xff\xfc9\xae\xba\xea(\x00\xb6\
\xe3P\xdex\x03\xde\x8f~\x88m\xdbh\xad\x91\xf2\t\xb4\xbe\x18;\xbf\xb9\xab\xee\
=O\x06\x84\xc4\x9bQZ\xa3\xd4a\x94\xba\x8c/\xcf\xa2\xb7@m\x1f\x01\xfd\x1c\x9a\
m\xd0\x97\x81W`\xbd\xee\x15\xb8\xf2\x10\xe6\xd1\x9b1-\x1b!@\xa0x\xd8\xf7\xf8\
b\xbd\x8e\xeb\xba(\xb5\xe0-\xcd\xc5b\x91l6\x87\xe6f\xea.(%Qj\x0b!V\xd0:\xf8\
\x0f\x04\xc4\xa0M\x0e\xf4\xf3\xed\xf8h\x17v\xf0\xcf\x9d\t\xc2@\x85\x01\xb8L\
\xb2\xf9uJe\x13%=Z\xcd&\xd5j5q\\\x8c\x89\x08\n!0M\x93\xbb\xef\xce\xf3\xaew\
\x17\x91J\xe3\xfb\n\xc4v\xe0\xb8\x04!)\x00\xad\xb7\xb8t\xe9\x1c\x97w\x04\x87\
\x964\x17#\xadP\xac\x08\xf4\x0e\x88\xa5\xe0\x8f\x17\xb5g\xf6\xe74R)\xea\x8d \
A\xe3\xe59*\xb5"O\xfb->\xb5\xb9\x89\x1f\x89|\x92*\xc1L&C.\x97\'s\xe2\xed\xa8\
\xedU\\O\x06/]tGC\xad\xb70\x8d\xe71\x8c\xe70\x8c\x1b\x11<\x0fm\xc2\x1d\xc1$\
\xac\x07\r\x9a\xab\xd0\xfa\x02Zk\x94z\x0euq\t\xf5l\xe7E\xb5o\xfb\xb9\xa6\xd9\
\xf2\x00\x93\xf5\x8d\x1a\x06\x92ry}\xac\xf3Wl\x82\x86a\x90\xff\xf5\x02\xb9_{\
\x0fj{\x15\xa9\xb6\xd0\xdb\xaa[\xe8\xe7.\x80~\x12\x80\x9cs\x1c\x96\x04\xe2\
\xda\xce\xd3\xd7\xecJ/\x94\xc0\xda\xb2\xb9^1\x82\xfb\x8f\x83\xde\xd6\xed\x95\
\x83B>y\r\xf2\xa9\x9f\xa3\xb7@\xb4\x9f\xealf/\x95?\x8d\xc19\xd6\xd6\x8aC\xfb\
i,\x82\x8e\xe3P\\+!\x0c\x0b\xa5Aom\x05\x85\\^F\x9dy\xb0\xe7\xde\xacs\x1c\xb1\
\xdc\x15 \xf5\xa56\xa1k\x19\x88\xceu\x08\x88\x85\x10`\x1d3\xb1\xcc.\xd9\x96\
\xab\xb9\xa0\xe1*\xd1!z\x06\x01T\xfe\xa9\xc9w\x1aU>\xf9\xc9\xcd\xe4\x0476>\
\x8e\x93\xbd\x1b\xdf?\x8bV[a<\xb8\x0b\xe7N\xf1\xacRa_\x13\xec\x04\xa63C\x04\
\xa5C\xd3r%Z\xf5\x16\xba\xc7M\\\xd0n\xd7\x02\xdd\xef?\xde\x1el,\xcb\xc4\xbc\
\xde\xc0\x12\x06f\x0e\xbeX\xff>O\x9c\xb9\xc4\xea\xcdVx[\xe3?<\xccc\xbfH\xb5\
\xeaP,\xf6*\x93\x87N\xf4\x96e\xd1\xf8z\x13\xeb\x8ew\xe2\x9e|\x1c\xcdVP\xa0\
\xed\xa7\x90\xa7\xee\xe7\xe7z\xbb-\xed\xef \xc4\x0e\x08BrB\x04Q%\xb5\xd6\xc1\
3\xed?\x01=\xdf\x03\x92\xc1\x05!D\xf7\x8f\xe0;\x80\xf4%\xe2Z\x10\xcb\x02cYp\
\xabu\x03\xbfp\xfcE\x1c\xd6\x8f\xf3\xcc\x132|kRm\xd3\xf2\xa0\xf1\x8d\x16\x8e\
\xd3\xb5{\x0c$hY\x16\xe5\xf2\x06\xf2\xbc@\x9d\xd3\x18++h\xf5\x14\xd2\x7f\x10\
\xa5\x9e\xc2\x10\x01\xb1ny\x83\x82E\x977Z\xeb\x1e.!\x9fq\x7fB\xf4\x90\x06\
\x81\xde\xd6\x88\x95`\x9c\n\xae\xafr\x93y#\xaf\xb9Up\xfeLw\x90\xb9\xa05\xb5\
\xaf\xf9\xac\x95\xbaMu\x17\xc1B\xa1\xc0\xbf\xd6\x1b\x88\x9b\xedv\x81.\xa0\
\xb5\xcf\x05}\x1eA\xb7\xb6:\xa4\xc2\xc2\x0ck\n\x800\x04\x86a\xf4\xfc\x85o\
\xa2\xffA\xad\xb1L3xY\x9d\xebK\xddW\x10d\xd9~\xb9b%\xac\xcd\xed\xf3\x92#\xa2\
\xd35\xfc0\xb9\x9e>h\xdb6\x85b\x89\xff\xf5T\x98\xb1Rg\xd1JqU\xd8\xcc\xbalv\
\x91Z\x12\xbd\xbf\xb5\xbf8\x19+lr\x1d\xf8\xbe\xc4\xf3\x15\xa0{\xe7\x0e\x11D8\
\xb7,\x13\xc32p=?h\xa2a\x9e\x01\x89\x0eI\xcd*7\x99\xab\\\xa7\xb7\x90O\xfb\
\x88\x1b\xac\x1e\xe1=\xac\xc1l6K\xb5\xfa\xcfA\x12G\x05\xe8\x0bH\xdfE\xa9S\
\x11B#\xc8\x89\xf6H\xd9\xae\xd5\x95N-wj\xba\xff\xf6e\xd1m\xdea\'\xed&\xeeK\
\x89R\x8a\xac\x93A\xac\x88\xde|D\xf7Evk\xf3y\xac[\xaeA?\xf1\x00"2`\x855\xb8\
\xb6V\xc2\xf3/\x86y(\xf5h\xfb\xc1#\xed\xa6"v\x13\x13\xbb>L\x85v\xdd\x84\xdf}\
)\xe1j\x81\xf5\xb2\xf6\xaa\x7f\'rU\x08\x84\xd6m!c\x07\xadW\x01\xb0n\x02\xff\
\x89\x87\xc24\xc2\x1a\x14\x86\x85a\x04\xc3\xf5C\xbe\x17\x19$\x827\x1b\x1d(\
\xfa\xc9\xed\xba6!\xb9\xce\x7f\x11\xf9\xd5\x7f\xcc\xe7b\xa7\xcd-\t\xcc[\xac\
\xee="R\xff\x1de\x94X\xc5\xba\xb5ke\x0e\tv\xe67%\x9f\xe4\xaa\xce{\x1c4\x80tS\
\x0f\x13\x1f\x84\x9d1\x94\xa3\x13\xfc0t^\xf0!\x14\xec\x04$\xcd\x9b\x0c\xde\
\xead\xba\xcdT\xd0\xad\x00\xb1\x83`\x07X\r\xd3\xe8\x0e2\xdb\x1a_vk\x0e1\xa0\
\xf0}\xb5\x16\x17\xae\xeb\xf7\xf4\xc3%4\xa7\xc7\xd8\x05\xfb\xd3\x17+]Y\xf7\
\xb0\x108N\x06\xd7\xf3PR\x05=\xae\xd3d\x05\x88Ht\xda.\xc1\x9d\xf31\xc8%#\xd6\
A`\xe4\x9c\xde\xda"\x84F\xebn\t2\xb6\x8d\xbf*\xf1\x7f\xea\x87$\xd1\xba\xe7\
\xed\x84MT\x9ey\xac\xa7\xbfuSe"r\xc6\xd1t\x06\x1e\x04\xa1\x9b9\xb0k\x99d\xbd\
\xcc\xc4z\x95\x15\x19U{\xf3\xedN\xf4m\xe6\xa3\x06\x92q8w\xae+\xd1\x9b\xc7\
\x0c,\xcb\x98j\xf4\x11\x022\xd6\xcd=\xbfI\xa9v\xad\x1c\xac\x97\x99\x98\xa69\
\x90d\xb7\x89\xf6\xcfU\x13\xf4\xb7\xd3Rs\xdc\xd2a\x06\xf6\x18\xc5n0-\xc4\x87\
<\x1d\x10\xf3|\x1f\'\xd3\xbb_\xc6\xb6-\x94V\xa0z\xc5\xf6\x1eQ\xad;*u?$\xad\
\x00\xd7\x95\xec\x04\xab\xa9\xa1\x85\xef\x13\\bAk\x8dwJ\x86\x0fz\xfe\xa3\xbb\
\xeey\xa3\xdd\x11/\xbb\xe8\xce\x83=W&\x9f\xdb\xb4\xd6|\xf7~\x0fyZ\xf5\xe8O\
\xa2\xf3\\\x87X\x9c\x1a\xd4Z#O\xb7=\xb6#7+\xf9Tw~l\xe3\xb0\xe8\xefc\x03\xd6\
\x83\x93\x8d\x93\xfd\x85\x02\xef\xc7r\xeat\xc6\xe1A\xcf\xef\xd9\xef\xad\xd4\
\xa5]o\xac\x8f`J#\xdf\x9c\xa0\xb5\xc6u=\x84q\x04\xf4\x85`:\x1aZ\x83\xbbkwb\
\x08\x01\xd6q\x13\xe3\xfa\xae\xa0\xdd\x9fv\xa7\x99\xc6i\xa2\xbe\xaf\x86\xee0\
\xd4Z\xa3\xa5\x0e\xd3\x04\xd1c\x0b\x08\t\x0ek\x9a\xdd~\xa4\x03\x01n\x80^\xf2\
2\x9aC\x08.\xa39,\x04\xc7L\x13v4\xea\xe9\xe9&wq(\xd0H\x99\xc62B\x18x\x9eOt93\
\xb8\xcc\xbd\xb5\x18\x12l\xb5\x9a\xa8\x8b\xdbpi\xab{\xeb0y\xf1\xb2\x86C"\xf8\
\xdf\x87c\xe6Q\xbc\x93\x81\x9ef{{k\xd7\xf5aX^^\x19\xf8\xfb\xe1\xc3\xddFv\xf6\
\xec\xb3\x9cU\x11/\xd0k\xbb\xcf\x88C;\x08!\xb8v\xc5\xe0uwt\xfbe\xf8t\xbd^Gs\
\x08\xda\xb6\x01\xbdM(\xe0N\x1aeyV\x88N\xe4a\x17\x88\xfc\xff\xe2\xe7\x03\xfd\
-D\x08Z\x965\xf0\x0c4\xadw\xfb\xcdj=\xb8ff\xf1"\x06-\x96w\xdf\xb3\x12\x96)P\
\x1ew\xfbkH\xb0X,\x86\xac\xa3\xb8\xb6/\x83\xc3\x83V\xe7\xd3\x1a\xd2\x07\xa0\
\xffeE\xe7\xbcK\x91\xcfQ]\xaa\xde\xea~\xaeT*@\x84\xa0a\x18\x03\t\xeeWt\x08v\
\x17\xbc{\xac\x9f\xa5\x85}o\xe1\x1d\x87\x90\xa01\xc9\xb1T\xfb\x00/\x9c\x1a\
\xbcRq@p\xbf\xe3\x80\xe0~GHp\x9a\xbd({\x19/\x9c\x1a|\xfaJ\xaf\xc1g\xaet\x82g\
\x13\x9eg\xb4\x97!eW\xa3\x17\x12<sf\xf6j\xbey!\x1a\xb0 2\x8a\xce\'\xf6\xfc<P\
,\x16\xc3\xcf!A\xdf\xff\xd9"\xca2sD\x08\xfa\x0b,\xc6\xec\xf0\xc2\x99\x07_\
\x10*\x8b\x1f\xcd0\xf0\xfe\xa2\xd0C\xf0\xce\xc8&\xb6+\x05/\x9c>x\xa5\xe2\x8a\
\'\xb8P\xef\xb34pV\xa9\xc0\\7\x04{\x9e`\xc7\x98\xf2\xb4R<!%\xbe\xefs\xf2\xa4\
\xc7\xa9S>\x8dFc\xec\xf4\x16\x12\x0c\x1c\x15g\xeb\xcb7\t\x1c\xc7\x19\xea:\
\x90\xcdfi4z#\x10\xf5\x1b\x82B\x82Zo\xf3\xb3=(\xaeuT)\x96e\xe18\x0e\xb7\xddf\
q\xe2\x84C.\x97\xdbu\xef +WH\xb0\xe9\x9e$\xe7d\xf8\x91\xe7\xf1:\xdb\xdeu\xe3\
\xa2\xd0!\xf8\x9dV\x8b\xebG\x98\x17\xa2\xe4\xaa\xd5\x1a\xf5z\r\x88\x8c\xa2\
\xa5\xb5\xdf\xa0\xe9z\xdcbZ{F\xa2\xd1Z\x87},.\xb9J\xa5\xd7\xbf\xa9\xbb\x1e\
\x94\x8fS\xfe\xf0=4\xef\xf3y\x85e\xf3@\xd2c\xe2f\x80N\xed\r\xb2<w\xd0C\xaeZC\
\xd3kH\xea\x99\x07\xbd\x93\xf7\xf1\x91?*\xd2h\xba\xbc\xde\xb6\xf0\x1fY\xec*\
\xbf\xa3z\xb0\x87t\x99(\xb9\xcd\xcd\n\x02\xb0_m\xe38o\x0b\x7f\xdf5\xd1?\xf6S\
\x97\xd2\x87\nl~\xe6K\x987\x19\x9c]\xa02\xaa\xb3F\xbd\xe3\x8e\xdd\x96\xe7\
\xfefi\x18\x06\xd6\xabl\xecW\xde\x86\x94g\xc2k\x03%\x19\xf9\xb8O\xf9#\x1f\
\xe0c\x9f\xaas\xae\xdd\x9c\x9b\xdfj\xa5X\xf4x\xf0\xfdG\x00\xb8\xf7\xde\x8d\
\x9e\xdf{\x9be\x15\xd34q\xeet0_l\xd2\xf2~Fy\xe3\xde\xf0\xfaPQMk\xcd\'\xfe\
\xa2\xc8\xc6\'\xaa\xb4\xdcSd\xdf\xee \xe5|\xf56\xeb\xeb\x7f\xb2\xeb\xb7(\xb9\
Z\xad\xce\x89\x13\'\xc88\x0e\xfa2\xd4j\x9f\xe5\xf7\x0by\x1a\xf5\xcf\x86\xf7\
\x8c\x94d\xcc[,r\xbf\x9c\xc5\xf7\xcf\xe2\x9e|\x9c\xc2\xbb\xde\x14\x84{\xbf\
\xbf\x85sb\xfeK\xab\x0e\xb9J\xb5J\xc6~3\x96u+Zo\xe3=\xe0Q\xfbl\x95\xda?\xee\
\xf6\x10\x1dI0\xfb\xce<\xc6\r\x06\x99\xd7\x04\xa3\xd8\x1fo\xd4\xf9\xd5_\xc9\
\xe0\xbc)\x83R\x1a\xefA\x0f\xffa\x9fBa\xc8\xf1\x01)A\x08\x81\xef\xbb\xf8\xbe\
\xcbSJpc{w\xaf\xf7S\x9f\xc6\xd7\x9b4\xbeT\xc3\xfd\xde\xe0.4\x94\xa0\x10\x82|\
\xbe\x80\xbe\xa8\xd1z\x1b_*\xfe\xb6\x9c\xa7\xd1\xf2\xf8\xd3\xbf\xac\xf2\xc1\
\xf7\xe6\xb0o\xbd\r\xfb\x8d\xc1\x94\xf2\xa8\xef\x93\xcbeS\'W\xad\xd5p\xee\
\xb0Y^>\xc2\xd2\xcaK\xb8\x91m\xa4\xff\x08-\xf7A\xaa\x7f\xf7\xd7C\x89u0\x94`\
\xb9\xfc\xb1P6-m|\x8e\xcck^\x8am\xdf\x8cmY\xa8\xb7\xc3W\x9b-\x9a\xd7\x1e%{\
\xe7\xed\x987\x198\x8e\x83R\x9a\x1d4l)\xee\xff\xb1\x8fT\x8a\xb5\x88\x8er\x1c\
*\x95j\xf8Y\x08A\xab\xf5\x03\x8c\x1b\xae\xc3\xb8\xce\x08\xbcA\x7f\xec\xd3j~\
\x83\x7f\xff\xda\xbf\xf1\x83\xffi\xc5\xd2#\r$\xf8\xd6\xbb\xb2ds\xef\xc5\x10\
\x02\xd9Q\x08\x1f:B\xa1\xb8I.\xe7P\xcc;\xe8\xacM\xfd\xcb\xf7Q\xfa\x83\xf7\
\xf3\xe2\x17\xdf\xcc{>\xf0{d2o\xc02\r\xc4\x8a\x89\xe3\x98li\xcd\xb7[.\xcf>\
\xa3\xd0\x974\xfa|\xdb\xc7A\x07\x04\xb4V\x88v,R\xb1*0V\x05\xc6\r&\x17.j\x8e\
\x18F\xb0\xb1\xf6a\x1f\xf8\t\xc7\x8f\x9e\xc26\x9e\xa4\\\xaf\xf0\xdf\t\xcek\
\x1aH\xf0w\x7f\xab\x08\x04A\xbc\xa5:G\xb9\xf4^\xaa\x9fk\x82\xd0d\xec\xc0\xf5\
\xcd\xf5$\x8d/|\x06\xef\xe4}\x00|\xeb?\x1b\x94\xd7\xdfM\xee]\xefGn]\x0f\xbc\
\x08q\xd8\xc0\xbcN`\xdejaD\xb6\x1b\x87\xfbD\xdb\x9bF\xb5\xdaF\xe9-\x94\xda\
\xa6\xf5}\xc9\x03?qy\xf4!\x0f\xef\x81\x16\xfaYE\xbdZ\x08\x07\x98\xa9\xdd\xcc\
\xf3\xf9<N6\x0b\x08Z\xae\x8fT\x1a[,\x93\xfbE\x1b\xfb\xb5\x16\x96e\x06n\xe5\
\xa7\x15\xef\xcc\xbe\x05\'\xf3\xea\xf0Y\xcb\x04\xf3\xe8\xc3\x98A\xb0\x03*\
\xd5&\x8do+\x8c\x1b_\x82\xf5\x8a\x0c/9\xf6R\x00^\xf9r\x03u\xfe\x1a\xf4\xf9\'\
\xf1\x1fy\x8c\xcck^\x89\xd6O\xb4\t\\\xe4\x08A\xe0\xc6\x8c\x9d\xc3\xc9\\\x85\
\x10]\'\xe7|a\x8daCZ\xac#Q\x8ak\x7f\x08\x1a\xe43\x8a\xcf\xd6[\xdcx\xbd\xc1W\
\xbe\xde\xe2\xe8\xaaA!\xff\xb6\xf6\\x\x0e}\xee\xa9]&7_\x06\xc7\x96tD\xc1\xb5\
b\x96\xb5b\xf4\x0e\xd5\xf7\x1f\xe0\x08\x10\x15\t\x87o\xec\xf3\xfc\xe4\xba\
\xdb\x9e\x89~mm\x8d\x8c\xfdF\x84\x10x\x0f\x9d\x06\x82\x1d|g\xcfC&s\x1c\'s\
\x1c}i\x07\xad\xb7\xf0\xbd\xc1\xe7&5\x12\x9e&\x10\x17JAk\x82\xa3\x9aB\x82\
\x96eQ\xf8\xcd5\x94\x0e\\X\xfdG\xce\xf0z\xdbB\xaag8|H\x93\xcd\xdc\x8e\xd2\
\x1aq\xedR\x9b\xdc\xb3\x03\x13\xd4\x1a\xeaM\x8d\xe7k\xbcq\xa7\x93\xc6\x80\
\xd6A\xcd\xd5\x9b\x93\xbd\xb5\x88\x83\xe4\x870^l\x84\x9d\xb8p\xb7C\xb5\xdeB\
\xeb\xcb\x14ro\xc14\r|\xa9P\xea\x11\xa4\xf4\xc7\x16\xaa\xe5B\xb9\xbc\xdb\xaf\
}\x10f\x19g;$hZ\xef\x00\x02\x87\xfc\xcag\x1a\xd8\xaf\xb6\xc8em\xd06\x19\xfb\
\x18\xbe\x94h\r\xbe\xff\x7f3+\xcc,\x10\x12T[[T?\xd7\xc2\xbc\xf1\x1a\xacWY\
\xb8\']Z\xdfk\xb1\xf1\xe7\xf7\xa0\xf5\x0e\xf2\t\x8d8\xa4Qcjo\xcf\x06\x8e\xfb\
\xfc\x17\xbe\xc2\xf6s\x1ac\xd5\xc0\xb4l\x0c\xe3\x16^\xfe\xf2\xcb8\x99\xe3H\
\xa5\x11j\x89\xef\xb7\x9a#\x13\xdb\x8b\x81\xe3B\x82\xdb:\xf05\x92\xe7U a\xac\
\x9a\xfc@*\\O\x06\x91\r\xfc\x93cwb$\x0e\x1c\xb71\xfe\xbe\xf4\x02\xc7=\xd7\
\xf5\xd2TJ\x82\x92\x88U\x93\x8f\x96+\xfc\xd2;\xde\x02\x97\xfcD\t\x8f\xc3\xdc\
\x03\xc7\xe9\xe7z/\x88\xab\x05b5pC\xf6\xfd\x9f`\xdd0\xba\xc3L\x12Wm\x9a\xc0q\
\x13\xc4U\xeb%\xb0\xbcj"\x10\x1c>,0\x0f\xc7\x1b\rf}$J4p\x1cL\x11WM\x08\x83\
\x95v\xdb0\xaf\xbb<u3\x19\\\xd8\xe9\x03\xc7u\x90\xf8H\x94\xe5\xf6\x12fu\x05\
\xae\x131\x1c\xde\'@Z\x81\xe3\x12\x1f\x89\xd2\xa9\xbd\xd5\x15x\xd5\xcd\xc9F\
\xc6$G\xa2\xa4\x81\t\xe2\xaa\tD{)\xf0\xd5\xcfW\xf8\x175\x1f\xc5\xef\xdc\x8eD\
1\x0c\x03\x81@)\xc93\x13\x90k\xb5\xfc\xd8\xf76\x13\xdc;\r\xba\x0e\x92\xc2\
\x08\xd5\x07\x9e\xfb\xc5\x89\x12k\xb9>\x99l9\x96\xb3\xd64\x07x\'A\x84`P(\xdf\
k\xa1\xe4\xe3\x13\'\x98VX\x87\xb4\xd0S\x83JI<\xb71\xea\xfe\x91\xd8\xf3\'\x86\
\x8c[)\x8c\xc3\x9e>1dKk\xfc\x1f\xcf\xcf\xc02\xf7\x13C\x1e\xfaa\x03\xbd5_S\
\xd9\\O\x0cy\xec\xa7\xf39|{Z\x1c\x9c\x18\xd2\x87+\xfe\xc4\x90\x85\xedU\x9b\
\xd7&\xf8+~3\xde\xc1\xf1|I\xb0o\x8e\xe7\x9bv\xf9\xd2\x99M\xe3\xa6spj\xcf\x14\
88\x0b;\t\x0e\xce\xc2\x8e`\xdf\x9d\x85=\t\x0e\x8e\xe7\x1b\x84\xfd(lO{\x16\
\xf6(\xec\ta{\xdf\x9c\xa4\x9c\x04\xf3\x12\xb6\xf7\xd5\xa9=\xfd\xf9\xc5A\xaaM\
\xd4q\xb2\xb1\xefMC\xd8\x9e\xfb\xa9=\x9d\xe3\xf9<\xcfc\xf9(l\x0fi\x85i\x0b\
\xdb\x89\xadK\xd3 W(\xa7\x9d\xe4H\xcc\xf5\xd4\x9ehf\xa6i\x90u\xacXz\xd2Q\xb3\
\x84R\xc1\x16\xb1q\xf9\r\xc3L\x96K\xeb\xa5\x1ck\xc5\xecTiD!\xa5\xa2T\xae%2\
\xeet\x90\xfa4\x9169\x08ZCu\xb3\x88m\'\xb7\xe7\xa7\xdaDM\xd3\xe8!\xd7lz\xd4\
\x1b\x93\xeb[-\xcb\xa0Xp\x02\xb3\x9e\x10\x94K9\nk\xd5Di\xa4J0\x9f\xebv&\xd7\
\xf5SQ\x0b\xb6Z>\xb5\xea\x1a\x10\xac@\x0cC$\xd2\xa5\xa6\xdaDo\x8bl\t\x99\xa6\
\xe6\xa2h\xb9>~d\x94I\xba\xed$U\x82\xcb\x91\xcd\xacI\xde\xf28Lc,\x9d\x8b\xb0\
]^\xcfa\xf7\xbd\xf9\xcdJ\xb3\xe7\xe8\x92Ya.\x04\x8b\x85\xec\xae\xdf\xde-\xd5\
\\\x08.l5\xb1\x9cZL\xe8\xd1\xd8\x7f+\xfa\x84\xb8\xe2\t\xce\xa5\x0f~\xf9\x9b.\
\xb7\xdfnu\x7f\xd0\x9aFs>\x06\xd7\xb9\x10\xbc\xe7\xc3\xb5yd3\x103k\xa2qV\x11\
\xf3H+U\x82^$\xd0@\xa1\xe0\xb4\x0f{\x9b\x0e\xd9;\xed\x1e\xe9E%\x8c\xff\x96\
\xear)\xaa\xb6\xb7L\x83F\xadD\xbd\xe1\x8e\xb5\x1b\x0e\xdbH+D\xef\x9em\xa5\
\xa0\xf8;\xc3\xf7\x96\xc6\xf2]\x9a\x06Z\x83\xeb\xe9p\x97|\xff\xeabZ\xb4\xbc\
\xddQ\xdb\xc7!\xf5>\xe8z\x01\xc9\xb4\xd1r5R&\xdbq\x0f3\x1aE]/\xf07j6jX\x969\
\xd6\xe24j\xaf\xb7\xd2KH\xb93\xb1\x16nf\xd3\x84\xd6\xd0hz@\x0c\x1b\xa0\x99\
\x1dq5y\xadE13\x82B@!\x9f\x99\xa9\xd2)\x0efB\xb0\xe3\x8eS\xc8\xa5c\x1e\xd3m\
\xcb\xee$\xcb\xc2\xd4\x07\x19\xa7\xcf\xd7(\r\x88\xb6i|\x92h\xf5\xa9\xd6\xa0\
\x10`G\x1d\xa9b*\x9d\xf2\xf9\xc1^\xb9\x86\x11\xcc\x83\x9dA\xca\xb1\xa1\x91p\
\xc7g\xaa\x04\xa3\x8bv\xa9\x88\xadt\xca8C\xdc\x8eep\xdcm.\xdb\x99W\x05\x82-4\
K\xb1\xcb\x94\xaau\xe9\xder\x1e\'\x13l.\xa8T\xc6\xbb\xce\xc5\x81T\xbd\x8e\
\xcf\x86\xb1\x84T\x0b\xb2.\x9d\x88\x9c8\xa2\x94\x1eh\xed\x99\x04Z\x0f>%m\xee\
\xd6\xa5\x1e\x88\xa50\xb3Y+\x9d\xe6j]\n\x119\xa6r\x96J\xa7\xc4\xbeK\xd3\xc0u\
\xe37\xc74\x94N\x0b\xb3.%\xc5\xb8\xfc\xecJ\x11\xd3\x0c\xc4\x9dJ\xb5\x8a\x9b\
\xa0\xe6\x0f\x94Ni\xe0@\xe94C\x1c(\x9d\x92`\xdf(\x9d&E\xbd\xe1QZ\x0b\x8e\xa9\
M\xa2t\x1a\x86c\xa6\xc1\xdd\xb9\xaet\xe4\xf9\x12\xff\xd4\x02\tJ\xa9\xd8\xac4\
Y/\x05\xbb\x9d\xd2V:m|<\xb9\xeb_\xea\x83L\xa5\xdaD\x08(\xad\xa5\xb7\xa5Kk\
\xcd\xc6f#\xd6\x16\xe8~\xccd\x14\xdd\xac4\xa9\xd6Z8\x19+\x96\xd2i\x14\xa4T4\
\x12\xfa8E1\xb3iB)\x1d[\xe94K\x1cH2I\xb0\xe7}x\xa7\xc5\x9e\xf6\xe1\x9d7\xe6\
\xee\xc3\xbb\x08\xcc\xc3\xad \xd5\xf5`\xcc\xcd\xf3S!\xebd\x87\xaa\xfagn>K\
\x8a\x03\xb7\x82\x14p\xc5\xbb\x15\\\xf1\x13}\xca>\xbc\xb3\xb9w\x1a\xcc\xc4\
\xad\xa0#\\\x0fS#.\xdc\x87w\x1a\xb4"\x9c\xca\xe5\xc5\xe9b:\x98\xc9(*D0\'\xb6\
\x1a\xebS\xebf\x94\xa2\x1d\xabt\xb2\xe7S\'h\x18\x90s:6\xbd\xe9u2\x86\x01NF`\
\x99:\xb1m\x10R6\x9f\x014\xeb%\x84\xe8*\x89F\xc9\x9a\x1d\x98CL\xb7\xd1\x85\
\xb2i\n2\xb6\xa6\xd3\xad\x17b>\xabV\xcaXV7\\\xf4\xfa\xc6\x17\xa8\xd7\xef\x1b\
\xfb\xdc\xa8\xc2:\x99\xeen\'\xdb\x12={p\xe2\x98\xcfR\x9d&\xa2\xdd\xadV\xbf?\
\x16\xb9q\x88\x06.\x16"pe\xef\xc7(\xf3Y\xaa\x04\xa3M\xcau\xd3;\x91R\xcan\xad\
\xf5\xebw\xe6\xea\x9c\xe5\xba\x1e\x96\xb5{Aj\x9a\xc6\xae\xd14\x89\x85h\x18\
\xf6\x8c\xf9\xacQ+\xed\xd2roV\x1alV\x9a\xb1\xf2\xdb\xf3\xe6\xb3A*|k\xc4I<i\
\xe2@\xd8\xde\xef\x98\x0b\xc1A\xc6\x17\xa5\xe7\x1c\xbcq\x96(\x14+\xbb\xbc\
\xc6\x9a\xad\xf9h\xbcg\xba\x9d\xb2\x03\xd7\x93\xb8\xdebN\xe1J\xb5\x89F\x9b\
\xdd\x9b2V*i\n!\xc8D\xd3J\xb8RN\x95`\xd4\x89\xf8}y\x87\xd2Zv*\xcb\x92i\x1a\
\xfc\xcd_\xe5{\x8eCIz\xc4C\xaaM\xb4\xd1\xf4\xf0\xbc\xd3\xd8\xf61 \xb0\x11\
\xa6i\'\xac\xd7\xdd\xc4\xaa\x8e\xd4G\xd1\xb5\x8f\xfe\xc3L\xc2\xdb\xfa\xbe\
\xa4\xbc\x99\xdc\xc2\x9b:A\xff\x94"_\xacP\xad}7\x95\x13C\xa4TTkMr\x85\xcaD\
\xb6\xfe\x99\x8c\xa2R*\xca\x1b\x9f\x9fE\xd2\x89q \xc9\xecw\xa4\xba\\\xea\x0f\
\x1c7\x0c\xfd\x16\xdeQ\xf9%\t\x1c7\xf3\xd0\x7f\x07\x81\xe3"8\x08\x1c\x97\x12\
\xf6\xdf s\x108\xae\x17W\xbc\x85w\xe9\xae\xbb\xee\xda\x19u\x14\xf3~\x86\x942\
\xa8A)\x17\xb3\x18\x9d\x07\xfe\x1f\x90\xfc\xba_\xbc\xd8\x85\xe6\x00\x00\x00\
\x00IEND\xaeB`\x82'
def getscanprogress08Bitmap():
return wxBitmapFromImage(getscanprogress08Image())
def getscanprogress08Image():
stream = cStringIO.StringIO(getscanprogress08Data())
return wxImageFromStream(stream)
index.append('scanprogress08')
catalog['scanprogress08'] = ImageClass()
catalog['scanprogress08'].getData = getscanprogress08Data
catalog['scanprogress08'].getImage = getscanprogress08Image
catalog['scanprogress08'].getBitmap = getscanprogress08Bitmap
#----------------------------------------------------------------------
def getscanprogress09Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x18\x8cIDATx\x9c\xed\x9d]\x8c$Wu\xc7\x7f\xb3\xf6\xee\xdc\xd9\x1d\xef\xde5\
\xc6[klo\xd9\x18(\x03A\xedH\xc4\x8d\xd7\x82&\x0f\xd0(Jh"\x04MH\xa0\x15E\xa4!\
J\xd4\x96\x882$R\xd2\x8a\xf20!\x08\r\xe1\x81ID\x94~q\xd2<D\x19\x1e\x10M \xd0\
$(i\x91\x04W\x0cf\x0b\x7f\x96\xbfvk7\xb6\xf7\xcezw\xe7\xce\xda\xde\xc9CuW\
\x7fwWuWw\xef\x0c\xfd\x97f\xa6\xa7\xba\xea\xde\xfb\xaf\xfbu\xee9\xe7\xde\xb3\
p\xf2\xe4\xc9\x1d\xf60\xae\x070\x0cc\xd6\xe5\x98\x08<\xcf\xf3\t\x02lll\x8c\
\x9d\xe0Z1K&\x93\x00\xa0\xb0Rf\xa3b\x03P\\Ic\x99\xed/qm\xbdJ\xcdvC\xa5[^\xcf\
\x91LZ\x00dr\xeb\xd8!\x9e\xcbd2\x00M\x82\x93D.\x9b\xea\xba\xf6!O\x85&8\x0ez\
\x12,\x16\x8b#%\x96H4?g2\x99\xa0\x06{a\x11\x11:?\xd3l~\xce\xe7rx\xaa\xf7}\
\xbd\xd2\xd970\xe5=\x80=Op*}\xf0\x9b\xdf\xb3\xb9\xfbn\xb3yAk*\xd5\xfe\xcd7NL\
\x85\xe0\xa7\x1f(O#\x9b\x9e\x98X\x13\x95b\xf8=a!\xc4\xe8\x89\xc5J\xb0ut\xb3,\
\x81\x8c\x81\xa5a, e\xf3\x7f\xa5\xa3=\x1fk\x13u=H\xd6?\x0b\x01\x95r\x81\x8d\
\x8a\x8d\xd6\x83K\x95\xb0z_\x17\x02,s1\xf8_)\x18\x92T\x17b%\xa85\xd8\x8e&a\
\xf95g\x18\x92|.\x15[\xfa5g;\xf23\xb1\xf7A\xdb\xf1I\xc6\x8d\x9a\xad\xf1\xbc\
\xe8\xeb\x82\xd8j\xb0S\x8a\x90R\x90L\x98\x98\xa6\xc1\xb01"\x95J\xf5\xfdN\xe9\
\x05<o\xa7\xabi\x86\x95\xb6bm\xa2\xad\x99\x16\x8bE\\\x0f*\xd5\xea\xd0\xe7\
\xa4\x91\x1a\xf0m\xffZ\xeb\xcc\xcf\xb2,\x1c\xc7i\xbbg"\xf3`\xb1X\xc40$\tK\
\x90\x1eP;\r\xf4\x1bd\xc0\x1fX\\ox~@\x179\x98\x00\xc1b\xb1\xc8J!\x1d\xeb\xe0\
\xa25Tk\xba\xa7\x90=\xac\xa9\xc6:\xc8L\x82\x1c\xf8\xd3E:%\xda\xe6\xc3F~\xc3\
\x10\xebr\xa9sZ\xa8V\x9d\x81K\xa6a0MI.\x9bD\xd6\x99\x19\xc2\xa5P,EJ#\xd6&\
\x9aI7;\x93m\xbb\xe4\n\xd1\n\xd3\x0b\xb5\x9aK\xb9\x94\x07 \x99\xb4\x90R\xa0"\
\x883\xb16\xd1;Z\xd4\x12\xe3\xd4\\+j\xb6\x8b\xdb2\xca\x98f4\xfdQ\xac\x04[W\
\xe9Q\xde\xf20x\xfd\x96\xf0!0\x95\xe5\xd2\xb8J\xa7q\xb0\xe7\x95N3SY\xb46\xe7\
Ib\xcf\xebd\xf6<\xc1\xb9\xd2)\x0e\xecI\xa5\x93a\xc8\xe17M!\xadxeQ\xb3\xf9\
\xb9\x90OS\xde\xa8\x85\x9a\xf0\x07\xe5g\x18\x0b\x98-z\x99l6G\xdd\xae\x12*\
\x9d\xb9\xd2)\n\xe6J\xa710s\xa5S+l\x07\x1cWS\xad\x94\'\xa6t\n\x8b\x89M\x13ZC\
\xa5\xea\x00\xddz\x92N\x8c\xaat\n\x83\xb9$\x13\x05\xb2\xae;i4\xc9\\f\xb5\xe7\
}J)\n\x7f\xbcA\xf5?B\xd4\xae\x84T\xc2OS\xd5\x95OQ\x9ak\xac5h\x9a\x0c\xedo\
\x00RJ\xf2\x9fH\x0e\xbf\x11HZ>I!\xc0\x90`\x99\xd1\xca4\x15Q\xad\x17\x92I\x8b\
L:\x81\xeb)\xfa\t*R\x82a\x8c\xb7\xac\x8aUu_\xc8\xa7HX\xe9\xd0\xcf\xac\xadf\
\xc7\xca/\x0cb\xad\xc1A\xc3}lh\xe9\x7f3S\xdd\x87E\xb5\xea\xa0\x94&1Hw\xdf\
\x02O\x81\xe3v_\x9f\xaa\xea>,\x1c\xe7L\xa07\x1dU\xb8\x0f\xf3l\xac\xa3h5\x84%\
\xa9\x01\xa56\xc7\xceo\xea\xaa\xfbQ1\xc9\xfcb\xad\xc1a\xcb\xa2\xf6{\xe3\xcc\
\xb9?b\xed\x83\xa5r\r\xc3\x90H1x\x05\xbe\x8d\xe6+\x7f_\x8d3\xeb\xbe\x88}=X\\\
\xad\xc4\x99\xe4\xd8\x98\xd8(j\x18\x92T\xd2\x1c[7\xe3\xbaj,C\xceD\x08\xc6m\
\x04])\xa4)\x14\xcb\xd4jn\xe4gc_.M\xc2\xc2k\x18\x92\xd2Z\x0e\xcb\x8a\xeez\
\x1dk\rN\xd2\xc2+\x84\xa0XH\x93\xcd\x97"\xa51\xb7\xf0F\xc1\xdc\xc2;"\xe6\x16\
\xde\x01\x98\x8a\xd7\xfd0\x0b\xef\x9e\xf4\xba\x9f[xc\xc2\x9e\'8\xb7\xf0\xc6\
\x81=i\xe1\x9do+\x08\x89\xf9\xb6\x82!\xd8\xf3\x16\xde\x89x\xdd\x17\xf2)\n\
\xf9\xf0*\xfc0\xf0-\xbc\xbd\xf3\x1b\x84\x89y\xdd\'RE\xb2\x19\xbf\xc1N\xca\
\xc2;S\xaf{\x80\xf5R5\xd4\xfd\xe3Zx\x07\xa9\xeec\x9f&\xae5\xa5q\xac5X.\xfd\r\
\xb5\xca\xcaPMZ\\\x16\xde\xa9\xab\xee\xd3\xe9\xb7\x84R\x136,\xbc\r\x82\x83\
\xf2+\xad\xe7\x90\xd2\x9fG\x0c\x01\x9e[em\xbd\x1a\xbaL\xd7\x84\x85\xb7\x1f,S\
\x92J\x863\xad\xf5\xc3L\xed\x83\xe3Xx\xc3b\xd7-\x97f*\xc9DE\xc3\xc2\x1b\x16\
\x8e\xebQ*W#\xe5qMXx\'\x89\x995\xd18,\xbca\xb0\xeb\xfa`T\xcc-\xbcQ0\xb7\xf0\
\xce\x00{\xbe\x0f\xc6*\x8bv\xbaS\xf6C\xa7\xb0=(\xbf(\xee\x94\x13W\xdd\xcf\
\xdd)[\xb0\xeb\xdc)G\xc1\\\xd8\xee\x85\xdd(l\x8f\xebN9\x08\xd7\x84\xb0\xbdk\
\x9c\xf1\xa2`.l\xc7\x84\x98\x85\xed\xc9\xdc;\x0ebU\xdd\x0b\xe1\xbbrMC\xd8\
\x9e\x89\xea~e\xc5\xcfT\x01\xab\xab\xabX\x96\x85\xd6\x1a)%\x96\xe5\x8f\x94\
\xa6i\xb2\xbd\xad\xf9\x83\x07R\\\xd0\xaf\xe1\xfc\xf4\'\xc1}\x9ew\x0e\xad/\
\x87\xceo\xea\xaa{!\x0er\xcf/\xdc\n\xd4\x8f\xf3\xdc/\xd1\xaf\x08<u\x1e\xbd\
\x05j{\x1b4\xec\xb0\xcd9}\x15\x00\xf3\xadwb{\xfb0\x8e\x1c\xc30}W-C\xeeC_\xb9\
\x8c\xfb\xa4\x83\xe7\x9d\xc3\x90>i\xad\x19x\xa6\xcc\xc4\xbc\xee-\xcb\xa2T*c\
\xdcf\xf2\xd4\xffm\xa1\xd46Jm!\x04h\xbd\x85\x10K~\x01\xa9\x9b\xbf\xea\xe4\
\xf4k\x0b\xfe\xff\x97vp7\xcf\xa2w\xf0\'\xf2\xbatfH\x83D2A\xc2\x84\x83\xbc\
\xc0e\xf5\xdfT\xedm\\\xb7i\xaf\x98\x98\xea\xfe\xb0\x94\xdciZH\xe3\x0e\\\xef<\
\xae\xa7p\xb5\n$\x8d\x06\xa9\x061q\xf5U\xd8w\xbd\xff\xf7\x80\x7f]\x9d{\x19\
\xb1$\xd0; \x16@\xec\x17\xb0\xbf\xfe\xd0\xab\x1aO)6*\x8a\r@\xde H\xbe-E"\x01\
/\xa9Z(r\x00\x0b\'O\x9e\xdc1\x0c#\xf4\t\xb1\x89D\x82t:C\xe2\xde\xfbQ\xdb\xcb\
(\xa5\x82\x97\xde\xf8+\xb8\x88\x90\xafC.\xbe\x86X\xdaA\xf0ZP+\r\xd1Y\x03ZiT}\
8\xd5\xdaWy(\xb5\xcd\x85+\xbeU\xf7\xf2\xd5vA\xdbW\x89\xf8\xa7}I<\x8a\xc5\x95\
\x9e\xcd\x12|O\xab\xb6#p\x87AJI\xe6\xd7\xb3\xa4\x7f\xf5\xc3\xa8\xede<\xb5\
\x85\xdeV\xcdB\xbfz\tc\xf9\x15LS"\x16n\xf2\xaf\x1fh$\xbf\xbf+=\x01pL\xd0\xf0\
`\xd3W@\xd4kVo\xebz\x7fSx/\xec\xc7{\xf1\x15\xf4\x16\x88\xfa\xebi\xb8\x80\x15\
\x8a_A\xb2I>\x9fC\xa9\xde&\x80P\x04\x93\xc9$\xb9|\x01!M\x94\x06\xbd\xb5\xe5\
\x17rq\x11u\xf6\xd1f\xa1o:\x82\\l\x7f\xeb--54\xc4\xa2\x80\x050\x97\x0cL\xa3I\
\xd6u7q\xd5"\x04D\xcf"\x80\xf5\x7f\xac\xf2\x9f\x95\x12_\xfa\xd2Zt\x82\xab\
\xab\x9f\'\x99\xfa \xae{\x1e\xad\xb6\xfc\x81\x00\xb8\xb4\xf94/+\x15\xf45\xc1\
\x0eB\x88:\xa1\xdek8\x7f\xd0i\xfc\xa7\xb1m\xafM\x13\xd75\xf7k\xd0mW/\xc1\x85\
\xe7\xb8\xb8u\x1d\xcb\xc7\xcc\xe0\x99\xca\xf7\x1d\x8c\xe3\xef\xa1TJ\x92\xcb\
\xb5/\xc1\xfa\x124M\x93\xf5\xaf\x96P\xafH\xecS\xa7\xfd\x82\x0b@\xbd\x88\xe7=\
\x87\x10Ku7\x91\xe6\x88&\xea\xbf\x07\xad\xea\x1b\xdf\xa9\xf3\x04\xfd\xaf\x0b\
\xf5\xbe\x16\x90\xd3\xa0Y\x00\x961o\xf7\xff?\xa7N\xe3]\xd8\xc7\xe1\x9bM\xc0?\
\x92L)\xa8\xfck\x8d\xe2\x9f\x14\x82\xa4z\x8aj\xa6iR,\xae\xe2]\x14\xa8M\x8d\\\
ZB\xab\x17\xf1\xdcGQ\xeaE\xa4\x10\x08\xb1\x13\x90\x12\x08\x84\x10,\x89\xc1*\
\x0b\xd1\xe3o\xd7\x8f\xa8\'"\xfc\xcf\x82\xc6\xe7\x9dz\x9e\xcb \x96\xb9\xd9x\
\x1do\xbe]p\xf1ls\x90\xb9\xa45\xe5o\xbb\xe4\x0b\xcd\xa6\xdaE0\x9b\xcd\xf2O\
\x1b\x15\xc41\xab^\x88Kh\xedrI_\xac7\xc3\x1d?\xc3:\xa9\xa00\xc0NK\xd3\xecU\
\xf8Vr]\xac\x1b\xd0\x1a\xd30\x90\xb2>\xa4\xb4\x12\xa5\x95\xa8?\x15\xbd\xfd\
\xc4\r\x1c\xd4\xa7\xd9\xbe\xe8qH\xf8cy\xab\xa3m[\x13\xb5,\x8bl\xae\xc0\x8f\
\x1d\x15d\xac\xd4y\xb4R\\W/e\xe3\x8d\xf6*\x9bR\xba\x07\x81\xdeU\xaa\xb7U\xfd\
;\xdd>w\x08\xff\x84s\xd34\x90\xa6\xc4v\\\xd0\x1a\xdd\xc8\xbb\xde/\x1b$5\xcb\
\xdcl,sXo\xe1\xbd\xe4"n4\xdb\x04\xf9`\x1eTJ\xb1\xba\xfa\xd7x\xea*z\x07\xf4\
\xe6\x8bx\xdech}\t!\x0e\xd5\t\xf5\'\xd7\xbc\xde\xac\xd1AP\xba\xfd\x9e^C\x8di\
\x18\x98\xa6\x89\xeb\xfa\x8e\xe9m=V7\x87\x1f\xad\x17\x80\x8b\x00\xb8\xcf(\
\xc4\xe17\xe29\x1b\xed\xf3`>_\xc0q/7\x07\x01\xf5\xac_`q\xa8\xdeT\x1aCH7\xa9.\
:z\xb8\xea\xa4\xd7\x0b\xf0\xeb\xb3\xf9\x8d\xebyp\xbd\xc04Mn6\x0c\x1ev\x1c\
\xb4\xaa\x13\x13\x02Q\')\xc4\x0eZ/\xfb/\xe5fp\xcf=\x11\xa4\x11\x10\x14\xd2\
\xf4\x07I\xadx\xdeu\xb9.\xc8\xaaO\x8d\xb4\x90\xeb\xfcN\x080O\x18\xc8\xa3b,OA\
\x00\xad\xb6PJ!\xa5$\x99H\xe0\x9d\xf5pN\xb9\xfewB 4\xf8\xcd|\xc7\xafI\xe1\
\x8f\xb4\xf6\xe9\x0e\x82\ryPy/4\xc9\r\xa9\xb5\x9e\xb5 \x05I\xcb\x08\xa7\x01\
\x0e\x01!\x970\x80\x97\x94\xe6\xa0\x10\x18\xc7\x0c\x0e\x1f\x91<l;A\xdf\x04\
\xbf6\x11;\xf5\xd6\xb3\x1c<\xdf\x1cd\xb65\xae\xe7\xa0u\x93\\\x94Zk \xd1A.\
\x8aI\xad+\xbb\x96tn\x94M\x92\x07\x85 \x99L`;\x0e\xcaS\xedMV\x80\xd0\xcd\xb9\
\xb9Ip\xe7b\x08r\xfd\x89\x01\x1c7\xda\x9b\xa4m\xbbco\x10\xb1\xdeb`\x1c\xf75\
\x04\x07E\xcbZ\nHX\x16\xee\xb2\x87\xfb\xb8\x1b\x90D\xb7\x8f^\xc1<\xe8\x9d}\
\xae>\x02\x8a\xee&\x19\x82\x1c\xc0\x91#MU\x85wF\xc5\xb2\xfb\xc5\xf9\x99\xd7,\
\x8a\x10\xb8\xae\xdb\xf6\xbdy\xab\x81y\x97\xd9\x9cg;\xbaFs\xa2\xaf3\x8f\xd2\
\xdf\x06Am\xc6\xa7Uj}Q\xfe\x92\xaa}\xe5`\xdej`\x18FO\x92M\x82\x9d\x83\xc2\
\x18\xe4:!\x84\x7fjs\xebO4\xb4\xbf,\xa7\xa3\x16\x01,\xcbD\xc8\xee\xb2\xb6I2"\
\xf8\xd5\xfc0\n\xb9\xceg\x92\x1d\xeeX\x02\xdf\xe7\xc5uG\xdct\xa5\xc1q\x9f\
\xc52ok\xbb\xfc\x0e\xcb\xe2\x875\xbb-\xff\xa0\x06;\xc9\xb5\xca\x8f#\xe4\xdf\
\x85VYT\x03R,\xf6\xb8+<\x94\xf7"\x97;F\xe8\x83\xa2{\xc2\xee\x12\xb6E\x0cM\
\xb2\x1f\x1a\xe4\x04\xa0w\x16\xc6N\xef\xd1\x0eK\x8cRW\xba\xden\xc7zpR\xd4\
\x9a\xf9\x065\xb90~\xb8\'\xad5\xb6\xed \xe4!\xd0\x97\xfc\xc1\xa8\x83B\x93`\
\x08\x01y\x1ct\xad\x8ab\xa8A\xf0Ij\xaf\xae\xb8j\xe4\xd4\xd2t\x9b\xb2\xe8\x04\
\xe9\xd9\xb6\xdb5?\xf5S\x12\x8d\x8f\xf6Z\x8c\xdd>\xd8\xe8g\xad\xf0\xe7\xb1\
\xc9[[\x9a\xab\x91f^\xb1\x9b\xcf\x1a\x83\x88<\x12S\x8b\x10\x04\x07\x89\x87\
\xbc\xbd\r\xb1\x12T\x9b\xcd\xf7g\x1c\x97\x98\xa6\x1c\xabc\x0b\x01\t\xf3X{\
\x1eC\xc4\xbf\xa0\x1f\xd6\x11k\x13\xf5<\x852e\xd0\xdfL\xd3\xf0\x8f\xfe\xa3\
\xbd\x81\xb6j("\xa5\x7ffx\xbf\xed\xcc+\xfe\xc3\x1bm\x0ft\xb7nF\xb4\xfct^\x0b\
\x03\xad5\xce\xd3\xde\xf0\x1b;\x10\x7f\x1f\xd4\xbeV\xcb=\xa3\xba\xd6\x82\xba\
\xe3o\xe3\xf3 \xa2Zk\xbc3\xf5\x1d\xdb!\xab\xbc5\xad\x89xYh\xdd\xbe\xcc\x99%Z\
V\x133,\xc5\x04\xd1\xac\xc1\x18\xa7\xa9~J\xa7N5a\xaf9\xb3\x13Zk\\W\x8d|\xdcC\
S\xe9\x14\x93\xdb\x83\x10\x02\xcb2\x02b\x83\x8c+as4M\x89<\x0c\xf6\xc3\xd1\
\x9b}@p}}\x8d\x03\x87$\x07\xf6\t\x96\x0f-\xd4\x0b\xbb\x08\xfb\xea\xeb\xc2\
\xba\xed\x8e\x03M{\x98\xd8\xd7!0\xef\x13\x1c?&\xf1\xbc1\xda{]a\xb4\xb9\xbd\
\xcd\x91\x86\xb0\xa0w\xd8\xdc\xdef\xeb\xb2\xc2;\xbf\rW\xb6\x9a\xb7_\x01\xaej\
\xb4\xf6\xcd\xe3Zo\xa1\xb5\x0e\xe2\x9a\x06\x04m\xbb}_\xfb\xa8\xfaL\x11\x93F\
\xad=\xcd\xc1~5\xbd\xf2\xe9\x8a\x01\xfa\xb9\xcf\xad\x00p\xfe\xbc?\xbc7\x7f\
\x9ao\xa63\xb1^\t\x8fK\xaa\xd7\x8b\xed\xbc\xd6\xf8_\x88%\xbf\x95\xd5\xafI)9z\
T\xf2\xfac\x06\xffV?\xa5/ \xf8\xfet\x1a\xf3\x84\x19\x14\xb2\xb1Z\xbe\xd2Jh\
\xbb\xe5\xf3Vo"\x13!\xb8\xd4A\xb0\xc5\x8a|\xa0~\xffA\xe1\x0fh\x8bB\xb0$D7A\
\xef\x8cG\xf2\xdepn\xc6\xbb\t\xc1<\x18W\x7f\xb9\xd6\x10\x10|ib\x0b\xd0\xd9" \
xa\xaf\x13<\x7f~\xef\x10\xf4ZN\x14\x08\x08\x9e={m\x08\xc7q\xa0\xf5 \xd7\x80\
\xe0\xb4\\\x8c\xa7\x81\\.\x17|\x0e\x08\xba\xeeS\xb3(\xcb\xc4\xd1B\xd0\x9da1&\
\x87\x9f\x1f\xa7\xf4=?\xd1\x03\xfc\xb4\x8f\xef\xe5nF\x1b\xc1\xfb\x92{X\x16\
\xdd\xab\x98\x13\xdc\xed\x98\x13\xdc\xedh:\x02\x8dq\xcc\xec\xb5\x8c\x96\x89~\
\x9b\xa7\xf6\xa0\xb8\x16\x10\xac\xda\xa7X@\xec\xb9\xc9> X\xc8\xff\x06U\xdb\
\xe1\x16\xc3\xdc\xf5$\xd7\xd7K\xc1\xe7\xe6z\xd0;M\xf1\x81OS}\xc8\xe5N\xd3\
\xe2\x91\xa8\xbb\x81\xaf\x11\xac\x97\xcam&\xef\xb6Q\xd49\xf5\x10\x9f\xfd\xfd\
\x1c\x95\xaa\xcd\xdb,\x13\xf7\x99\xdd\xb5\xca/\x977\xb0\xded\x91L\xbe+\xb8\
\xd65M<\xf7\xb8M\xe13Y\xd6\xbe\xfa\r\x8c\x9b%\xe7w\x892\xaa\\\xde\xc0\xbc\
\xcb\xc2z\xe3\x1dx\xde\xd9\xe0zO\x03\xa8w\xda\xa5\xf8\xd9\x8fs\xee\xa5u>\xf5\
\x894G\x81\xea\x0fj\xa4\xee\x9f\x8d0>\xc8N\xb2^*aH\x83\xe4}I\xb8*\xa89OQZ\
\xffr`\xaa\xeb;\xd1k\xad\xf9\xe2\x9f\xe6X\xfdb\x89\x9a\xfd4\xa9\xfb\x93x\xde\
t\xf56B\x0cvj/\x977\xb8\xf7\xde{I$\x93\xe8\xabP.?\xc8\xa7\xb2\x19*\x1b\x0f\
\x06\xf7\x0c4a\x1b\xb7\x98\xa4\x7f9\x85\xeb\x9e\xc7>u\x9a\xec\x07\xee\xf1\
\x0fI\xfd\xdf\xdaT\xd5\xfc\x9d\x8b\xf1R\xb9L\xc2\xfaEL\xf3v\xb4\xde\xc6y\xc4\
\xa1\xfc`\x89\xf2?\x94\xba\xee\x1dH0\xf5\xbe\x0c\xf2FI\xe2\xcd\xbe\xad\xed\
\x0fW7\xf8\x95\xf7\'H\xde\x93@)\x8d\xf3\xa8\x83\xfb\xa4K6\xdb\'\xacj\x8c(\
\x977@\x80e\xbe\x95t:\xe3\xfb\x9c>\xeeR\xf9N\x95\xca7\xca\xd8\xffU\xeb\xf9\\\
_\x82B\x082\x99,\xfa\xb2oBs=\xc5\x97\x8b\x19*5\x87\xcf\xfdE\x89O~$\x8du\xfb\
\x1dX\xef\xf0\xa7\x94g]\x97t:\x15;1\xd7\xb59\x7f\xc6%\x95J"\x84D\xebm<\xf7\
\x19j\xf6\xa3\x94\xfe\xee\x0b}\x895\xd0\x97`\xb1\xf8\xe7Al\x87\xc2\xea\xd7H\
\xbc\xf9\rX\xd61,\xd3D\xdd\x0f\xffR\xadQ=p\x84\xd4}wc\xdc,I&\x93\xfe\x88\xbb\
\xf32\xe8\xd7XX\xf0\x9b\x8a4\xac~Y\xf4$\x03\xbe\xed~gGp\xe6\xccc\xb0p\x03\
\x0b\x08\x94R\xb8\x8e\xc3w\xbf\xb5A\xed\x875\x1e\xfe\x9fZ(=RO\x82\xbft2E*\
\xfd\x11\xa4\x10x\r\x85\xf0\xbeCdsk\xa4\xd3Ir\x99$:e\xb1\xf1\xcd\x87(\xfc\
\xee\xc7\xb8\xe9\xa6c|\xf8\xe3\xbf\x83\xf5\xd6\xb7c\xdd~\x14\xb1\xb4\x88\x10\
\xb0\xa55\x8f8./_P\xe8+\x1a}\xb1\xbe\xc7A\xfb-Dk\x85\xa8\x1f* \x96\x05rY o4x\
\xf1\xb2\xe6\x90\x94ln\x82z\xc1Ck\xc5\xb1C\x8fc\x8a\xe7\xf8\xdb\xaf\xacER\
\x90\xf5$\xf8\xdb\xbf\x95\x03\xfc\xd3x<\xb5I\xb1\xf0\x11J_\xab\x82\xd0$,\x7f\
\xeb\x9b\xedxT\xbe\xfeU\x9cS\x0f\x01\xf0\x83\x7f\xafP\\\xf9\x10\xf2\x03\x1f\
\xc3\xdb:\n\xdc\x808(1\x0e\x0b\x8c\xdbMd\x8b\xbbq\xe0]Qw\xb5\xd0j\x1b\xa5\
\xfd\xed\xe9\xb5\x1fy<\xf2\x98\xcd\xb3O88\x8f\xd4\xd0/+6JY\x7f4]\x12\x98\xc6\
Q\x1c\xf7\xcc\xe8\x043\x99\x0c\xc9T\n\x10\xd4l\x17Oi,\xb1H\xfa=\x16\xd6[LL\
\xd3\xc0S\x1a\xf7\x8c\xc2{\xfe\x89\xb6g\xa5X\xc48\xf2$\xc6\x11(\xac\x94\xf9n\
\xcd\xe3\x96\xe3\xc71N\xdc\x8ayg\x82\xd7\x1f\x7f\x03\x00o\xbcM\xa2.\xeeG_|\
\x01\xf7\x99\xe7p\x9f\xb4q\x7f\xf6\x18\x00\xeeS\xddr\xb0m\xbb\x81c\xbb\x90\
\xd1|\xbd\xbb\x08\xe6\xf2\xbf\x07\x1a\xbc\x0b\x8a\x077j\xbc\xee\xa8\xe4[\xdf\
\xa9qdY\x92\xcd\xbc\xab>\x17nR\xfb\xfe7\x86v\xf0\x0b\xca\xe3\x82\xf2P\xe7\
\x9fBy?\x0e\xae\x7f\x1bp\x1co*\xba\xd86\x82\xf9|\x9e\x84\xf5\x0e\xb4\xde\xc1\
y\xc8\xb7U\xe8-\xcd\xf9\x8b\xf0\x9e\xfbO\x90L\x9c\xa0\xfaC\x17\xef\xf4\xf3\
\x94\xd6\xff*t&\x95r\xa1k\xaf\xc4\xdaz%\xd2Q\xb6\xa3" h\x9a&\xd9\xdf\xcc\xa3\
\xf4\x0eB\x80\xfb\xccY\xdef\x99\xd4\xecG9\xb8O\x93J\xdc\x8d\xd2\x1aq`\x01\
\xad\x9e\xa5P(t%\xd6/$J\xaf\x8d \xa6a\x04\x9f\xa7\x12\x12%\x9f\xff\x0c\xf2&\
\x19\xb8\x8fd?\x98\xc4\xf5\x14Z_\xe5\xd7\xde\xf7N\x0cC\xe2y\x1a\xa5\x9e\xc1\
\xf3\xdc\x81\x05\xba\x96\x10\xd4\xa0a\xbe\x1b\xf07\xe4\xaf\x7f\xb5\x82\xf5&\
\x93t\xca\x02m\x91\xb0\x8e\xe3z\x1eZ\x83\xeb>?\xb3\xc2\x8e\x82\xe6\x82wk\x8b\
\xd2\xd7j8?y\x02\xf3.\x13\xfb\x94\xcd\xea\x17JH\xb9\x84\xd6;x\xe7\xfcyL\x8dP\
{\xbd\x06\x13\xa5\xa7\xb3\x0c\x0bj\xf0\x9f\xbf\xfe-\xb6_\xd5\xc8eY?\xd7\xe5\
\x16n\xbb\xed*\xc9\xc4\t<\xa5\x11j\x81\x1f\xd5\xaa#e\x92\xcd\xadwE\xbc\xaa\
\xd6\xa6\xa3\x16\t\x08nk\xdf\x85\xcb\xbb\xa8|\tc\xd9\xe0aOa;\x1eB\x80\xe7\
\x9e\x8a\xe6\x89!\x9a\x1b?l\xc7\xc3vf\xa3\x1dhN\x13\xaf6\xf76(\xe5\x81\xf2\
\x10\xcb\x06\x7fT\\\xe7\xbd\xef~\'\\q\x87&\xd6\xda\x10M9\xfe\xd6\x9d\x06Z\
\xcf7\x8c:s6\xf5\xa2\xaf\xb6\x7f!\xae\x17\x88e\xdf\xef\xcbu\x1f\x03\xfd\xf2\
\xd0\xc4Z\xe3A\x98\xa6\xa0\x90O\x8d\xb5WY\x08H%\xafk\xbb\x16U\x83\xd22\xd1\
\xb7\xbf\x9b\xc5e\x03\x81\xe0\xe0A\x81q0\xdc{s=\xbf\x00\r\xa5V!\x9f\x8e5\xb0\
\x86\xe3F\x97|z\xaa,\x84\x90,\xd5_\xbdq\xf8j\xa4Z\xa8D\x8c\x17\x1f\x16JAm\
\x84PM=W\x13\x8b\xf5%\xcc\xf2\x12\x1c\x16WB%\xd4*E\xac\x19\x92|\xee\xbd$\x93\
\'\xba\xa2\xd6E\x85\xd6\xe0z\xba\x8b\xdc\xc8\xe7\xaa5joy\t\xee:\x16m\xbf\x7f\
\x98s\xce\x86=\x17\x05a\xf2\xebh\xa2\x02Q\xef@Q\xc9\xf5\xca8\x0c\xb98\x10\
\xfa\\5)%\x02\xbf\xf6F\x85coP\xc8\xa7\x06\xde\xa3\x94\xa6\xbcQ\x0b\xddW\x1b\
\xad\xbc\xd7\xc1q\xa1\xcfU\x13B\x06\xea\x83[\x8f\xedG\x88W\xc2\xe5\xde\x02\
\xd7\xa9\xb0Z\xcc\x86\xba\xd72\rVV\x87\x1fu\x96\xb0\x08\xe28\x01m\x07\xc7E\n\
\x89\xd2P\xb0\xbaN\x8dJ\xb9\x12\xaa\x90\x9d\x88\xb2?>\x95\xb2`\xd5\xff<\xa8\
\xa0\xd5\xca\n\xad[J\xec\xda\xf7F\x0b\x89"\x84D)\x0f\xc7\x1e\x8d\\T\x18\x86\
\xa4\xbaQ\x18hY6Mc\xecP\xd3m}p\x94\x95\xc28h\xec/\x9c$\x82QtKk\xdc\x9f\r\xd6\
\xb1\xcc\x1aJ\xe9\xc8\x91_\x83\x1a|\xe2\'\x15\xf4\xd6\xf4LeZk2\xb9\xf5H\'\
\x96\x8c\xe2(\x11\x10|\xee\xf1\xe9\x84\xacl\xc0\xb6]\x9c),\xa1~~\xfcd\xf6*b%\
X\xab\xb9\xa1\xef\xadF\xb8w\x1c\xc4\xba\x87\xb7f\xbb$R\xc5P[\xf3\xa6\xe5Y\
\x15\xfb&\xe5i\x1d\xeb\x10\x16\xb1\x12\xb4\xcc\xe3\x94\xd6?\x19kx\xbe\x84e\
\xb0Z\xfc(B.\xa1<E~\xa5\x1c\xa9\xf6c\xed\x83Q\xc3\xf3\x85A\xa1\x90\xc6\xb2\
\x8ec\x1a\x92D\xc2$\x9bI\x0c\x7f\xa8\x05\xd7D\xc4\x90~\x98\x87\xe7\x0b\x81X\
\xa3\xf6\x84\x8c\x8b1\x16R\xc9T\xdf\xa0\xa8\xbd\xca=\x8f\x182)\xcc#\x86\xc4\
\x84\xb9,\x1a\x05{:b\x08\xf8\x03\x80\x14:\xd0\xa7\xdavoIe\xd7\x86\xe7\x03h\
\xb5k\x16\x8b\xe5\xb8\x93\x8f\x8c=\xdf\x07c\xad\xc1\xce\xf0|\xb9\xccj\xcf\
\xfb\xe2\x8a\x85\x1d\x06\xb1F\xed\xf1\r\x9e\xc3\xed\x81\x9d\xb1\xb0\x07\xa1\
\x11\x9e\x0f\xfcX\xd8\x96\t\xb63\xa3\xa8=\xa9T*\xf4\xbdq\x84\xe7\x9bz\xd4\
\x9e\xa8\x88K\xd8\x1ed]\xda}\x83LG\xff\x9bX\xd4\x9e^\xa8V\xab$\xac\xf06\xf9q\
\x85\xedH\xd6\xa5VLR\xf8m`\x1e\x0b;&\xec\xbe>\x18\x11\xf3X\xd8Q0\x8f\x85=\
\x03Ll\xa27\x0cI*i\x8em\x82v]\x15\xb8F\x8f\x82\x89\x10\\)\xa4\xc9\xe7R\xb1\
\xa6W(\x96#\x19w\x1a\x88}\x14\x8d\x9b\x1c\xf8\xad\xa1\xb4\x96\xc3\xb2\xa2\
\xdb\xf3c\xadA\xc3\x90m\xe4\xaaUg\xac\xe6e\x9a\x92\\6\xe9;(\tA\xb1\x90&\x9b/\
EJ#V\x82\x99tS\xe4\xb2m7\x16I\xa5Vs)\x97\xf2\x80\xbf\x02\x91RD\xb2\xeb\xc7\
\xdaD\xefhq\t\x19\xa7\xe6ZQ\xb3\xfd\xa0R\rDu;\x89\x95\xe0b\x8bGR\x1c\xf1^\
\x1a\x18\xc7X:\x95\xf5`q%\xdd\xe57\xba\xb6^\x8d\xec\xf32\n\xa6B0\x97Mu]\xfb\
\x90\xa7fGpd\xebR\x9f\xbdK\xbd\xd0\xda\x9c\xa7\xb2wi\xafb\xcf\x13\x9cJ\x1f\
\xfc\xe6\xf7l\xee\xbe\xdbl^\xd0\x9aJu:\xaecS!\xf8\xe9\x07f\xa7\xc2\x9fX\x13\
\x8d\x1c\x1ck\x00\xc6\x89a\x18+\xc1\xd6\xd1\xcd\xb2F\t\x01\xd6\r\xc3X\xa05"J\
T\xf9!\xd6&\xeaz\xd0\xf0~\x11\xc2\xdf\xbb\xbbQ\xb1\x87\xaa2\xfai\r\x85\x00\
\xcbl\xee\xbaV*\xba\xaa#\xf6\x15\xbd\xed\xe8\xc0K\xbesu1.j\xcev\xe4g\xe2\x0f\
\x89\xe2\xf8$\xe3F\xcd\xd6x^\xf4-{\xb1Z\x97Z!\xa5\x1fV\xdd4\x8d\xa1;h\x06\
\x19m\x94^\xc0\xf3v\xba\x9a\xe6L\xacK\x9d\xd6\x1e\xd7\x83J\xcb\x89\xc9\xfd\
\xd0\xcf\xb1\xc7G\xffZ\x9b\x99u\xa9X,b\x18\x92\x84%H\x870\xa9\r2M(\xe5\x0f^\
\xc3\xf2\x83\x10{\x97\xe2@\xb1X\x8c]/\xa3\xeb\x96\xdd^B\xf6\xb0\xa6\x1a\xeb \
3\tr\xe0O\x17\xe9\x94\xa03B\xd8\xd4\xadK\x93T:\x01\x18\xc2\xa5P,EJc\xaet\x8a\
\x82\xb9\xd2iD\xcc\x95N\x03\xb0\xe7\x95N3SY,N)\xd8\xda\x9e\xd7\xc9\xecy\x82s\
\xa5S\x1c\xd8\x93J\xa7qM\xd7q\xa5\x15+\xc1\xd68\xf1\xd9l2\x16\xa5S\xea>\xabM\
zQ\x11\xc3\'\xc5\xdaD7*\x0e\x85\xbcF\x08\x81i\xc8\xd0J\xa7~8nH>\x98n\x1a<\
\x1c\xd7\xc3}z\x86\x04=O\xb1\xb6^e\xa5\xe0;\xe4\xc5\xadtZ\xfd|t\x17\x95\xd8\
\x07\x99\xf5R\x15!\x88\xf5$ \xad5\xabk\x95P\x1e\xc2\x9d\x98\x98\xb7a\xb9\xa2\
\xa9V\xca1)\x9d\xc0\xb4\xd2\x14\x8b\x83_\xdaT7gi\r\x95\xaa\x03\x84p<\x1fQ\
\xe9\x14\x06sI&\n\xae\xc5m\x05\xb1\xd6\xa0i\x86;&)\xca\x1e\xde\xc6\xb6\x02!\
\xc0\x90\xfe\xb6\x82(\xb8&\xf6\xf0\x8e\xba\xad \x0c\xf6\xc4\xb6\x82A\xd8}\
\x83\xcc,\xcdgQ1\xdf\xc3\x1b\x03b\xdf\x9c\x15vc\xc8\xb8\xdb\n\xae\xf9\xcdYq`\
\x84\xa3\xff\xc6\xc3\xac\xf6\xf0N\xcd|\xe6\xb8\xe08\xd5\xa9n+\x98\xea\xe6\
\xaci\xecy\x8a\x9a\xdfD\x96K\x86!)\xe4R\xa4RV\x0c\xdb\n<J\xe5\x1a\xa5\xf2hg\
\xbe\xc5>MX\x96Ay=\xd7\x16\xdci\x1c\x98\xa6Aq%C*e\x91\x8b\xe8\x90\x0e\x93X\
\xd1\xaff\xdb\xc8\r:/f\x18\x0c\xd9\x8c\xda\x93JZ\x14\xf2\xa9\xc8\x01\x00b%\
\x98N55`\x8d\x93\xef\xc6=\x1c\xee/\x8b\x19>\x9a\xf1W\x1e\xb9l22\xc1X\xa7\x89\
D\xc2\x0c>\x977j\xb1\x9c|\xf7g\xab\x1b\x81VNJ\x89e\x1e\x8f\xf4|\xac\x04\x8d\
\x96\xa6i\xdb\xf1\x1c\xeb\xa7\xb5o\x0eo`\xec\x88!\x93\x80a\xc8\xae\xd1\xd4\
\x9e\x82m\x10\xa6Dp\x96\x11C\xa6\xb2\x1e\x1c\x161d\x92\xd8}\x0b\xde\x88\x98\
\x13\x8c\x03\xd7D\xc4\x90I\xe2\x9a\x88\x18\x127Z\xf5\xa3\xb3\x8c\x18\x12k\
\x13mmv\xf7\xb4H5\xe3@\x08\xd1&!E])\xc7\xba\\jm\x85\x1f\xcd$9\xe3)\xd6K\xd5\
\xa1e\xea\x97\x9f\x10\x90L\\\x87\x10\xfb\x83k\x99l\x9e~\xe1\x1c\'n]\xda5\x11\
C\xc6\xc1\xae\x88\x182\x0e\xb4\x86\x8d\xaa\xae\xfba\xab\x89E\x0c\t\x8b\x85\
\x93\'O\xee\x18S\x12\x9b\xa6\r\xcf\xf3\xfc\x1a\xf4\xbc\xd9\x0c\xe1\xd3\xc0\
\xff\x03n\xdf\xb3\xebt\x8f~\xda\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress09Bitmap():
return wxBitmapFromImage(getscanprogress09Image())
def getscanprogress09Image():
stream = cStringIO.StringIO(getscanprogress09Data())
return wxImageFromStream(stream)
index.append('scanprogress09')
catalog['scanprogress09'] = ImageClass()
catalog['scanprogress09'].getData = getscanprogress09Data
catalog['scanprogress09'].getImage = getscanprogress09Image
catalog['scanprogress09'].getBitmap = getscanprogress09Bitmap
#----------------------------------------------------------------------
def getscanprogress10Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x1arIDATx\x9c\xed\x9dm\x8c$\xc7y\xdf\x7f{Grk\xf7V\xb7u$E\xd61\xd2]S\xa1\xc8\
&\xa4H#\x05VF\xa6`M\x12 \x19\xc3\x084\x02\x0ci\x1c;\xf1 \x08\xe0\x15\x83$\
\xa3\xc0\x867N\x80\x0c\x82|\xd88A\xb0F>d\x83(\xc8\xe4\x83\x9dab\xd8\xeb \x82\
\xc6\x89m\x8d\x10%\x1a\xcb\xb4\xd8V\xc2pH\xf1\xa5O"\x8f}\x8c\x98\xab#\xefnk\
\x8fw\xb7\xf9\xd03==\xef\xdd35\xb3{\xa7\xfd\x13\xc7\x9d\xe9\xe9\xae\xae\x7fW\
u\xd5S\xcfK=KO=\xf5\xd4\x01w1\xee\x01PJ\x1dv=\xe6\x82 \x08B\x82\x00\xbb\xbb\
\xbb3\x17(\x04l\x96\xf3H!\xc7\x9e\xb7\x8f\xe1_\xfd\xbb\x06\xfe\x05=\xf3=G\
\xa1P(\x00t\t\xda\x801P\xd9\xaa\xdb,rfX%\x18\x87R\x92\\\xd6A\xa9\xf1\xad9\t\
\xbe\xaf\xd9\xad{S_?\x17\x82\x9b\xe5<\x1b\xa5\x9c\xd5\xf2\xca\x95\x1a\xcd\
\xa6\x9f\xfa\xda\xa1\x04+\x95\xca\xd4\x95\xc9\xba\xe0\xbab\xea\xeb\x87A)Img\
\x83\xdd\x86A\x8fym\x87\xd5\xdbj\x0b\n\xd1K\xae\xd1h%\xea^\x9d\x01\xa1\x1fR\
\x82\xeb\x08D\xbb\xc8\xac\x0b\xf5f\xba:Y%\xe8\xc4f\x9b@C\xa9\\Mt]&;\x9c \x01\
\x04\x81!\x9f\x0b\x19*%\x10\xecaXJ\\\xa7\x13\x89\xcfL\x00\x19\x1bO|\xdfX)3\
\xd0\xf4tK)\x93\x93\x03\xcb\x04\xe3o\x9e\xb1\xc3\xaf]\xd6\xf4\x85\xcdm\x9a\
\x88\xa3\xb2\x99\xc7uz\xa5\xa5\xed\x9d\x06M\xcf\x9f\xfb\xbd\x17B\xb0T\xcc\r\
\x1c\xfb|\xa0\x17B\xd0j\x17M\x83e\xecN%\xa3ph\x04\x17\x85\xbb\x9e\xa0\xb5w\
\xb0R\xa9\xb0])\xe28\x19\xa0wu\xf2\xb5\xaf{<\xf9\xa4\xd3=\xd9\x18\xea\x8d\
\xe9\xe5\xcb\xce\xfd\x92\xc0\xea \x93\xc9\xb8=\xdf]\xd7\xa5\xd5j\xf1\xa5/\
\xd7l\xde&B\x9cd\xa5R\x89\xee\x17\xc7\xdc\xba\xa8Rr\xe0f\xd3B\x88\xf1\x03R\
\x87\xe8\xb0\xfbY%\x18\xc4$\x8eb1\x8b\x94\xb3\x8f\x94J-\xf5HH\xbao\xce\x9f\
\xd4U\xadv\xd1\xf2\xe66\x8d\xdd2B\x08\x1c%\xa9\xd7\xca\xec\xd6\xbd\x89\x92H_\
\xcf\x8e \x04\xb8\xcer\xf4]\xeb^\t)\xc9{\xb8\xf4\xd4SO\x1d(\xa5\xac\xa8,\x00\
6J96\xcby+e\xf5\xa3\xf4t\x95\xc6\xffL\xd6\xed\x0b\x85B\xafN\xc6\x16v\xaa\r\
\x84\x80\xf2\x86=\x92\xc6\x18\xb6\xb6\xeb\x89\xc9\xc51\x17Qm{\xa7A\xb5\xd6$\
\x9bqp\x1c\xc5\x841b,\x82@So\xb4\xd0\xfd/_B\xccM\x16\xd5\xdaPo\xb4\x00;#\xe9\
\xb48V:M\x83\xa3\xa4t\xb2>\xd1\xdb&\x07ao\xa8n\x97p\xdd\xf4\x1ax\xab-\xa8\
\x94\xec!\x97T\xe94\n\x8e#)\x15\xb3H)\x11BP)\xe7)nTS\x95a\x95`!\xdf\x9d\xb1=\
\xcfO\xact\x1a\x87f\xd3\xa7V\xdd\x00 \x9bu\x91R\xa4\x1aQ\xadv\xd1Gcj\x89YZ.\
\x8e\xa6\xe7\xe3\xfbA\xf4\xddq\xd2uS\xab\x04\xe3\xab\xf4i\xe7\xada\x08\x82\
\xe9\x8d4?\x9aJ\xa7iU\xf7\x99L\xf7s\xa1P\x88\xba\xe9$\xa5\xd3\xa4\xfb9N\xf7\
\xf3F\xa9\xc4\xa8\x06\x1dV\xce\xb1\xd2\xe9N\xc7]Op!\x83\xcc<\x94NI\xb1\x10\
\x82\xf3R:%\xc1\xdc\xba\xa8\x05uL\x84IJ\xa7q\x98\x9b\xd2\xc9u\xc5B\x94N\x93`\
\xb5\x8b\xfa\x01d\xdb\x9f\x85`\xeeJ\xa7$\xb0\xeeF\xe2\xb5\x0c\x19\xb7c\x91\
\x95V\x97N\xcd\xd6~\xeak\xac\xaa\xee;(o\xe4\xac*\x9d\x00\x9a\x9e!\xe8\xca\
\xdc\x87\xa3\xba\x8f\xdf4\x93\xabP,\x84\x1dv\xd2\x18\x91\xcb\xe5F\xfe\xa6\
\xcd\x12Ap0\xb4k&Q\xdd\xcfe\x9a\xe8\xdcx\xa7\xdaHt\xbeT\xb91\xbfN\xf6\x15\\\
\x98\xea>~\xb3Ea\xa1\xaa\xfbZ\xf5_\xd3\xacoN\xd4\xa4i\xad)\xff\xcan"E\xae\
\x94\x90\xcb\x84\xbe2\xda@\xa3i\xa2\xee\x9a\xe4aZ].\xe5\xf3O$R\x13J)\xd9\xf8\
\xeb\xd9\x88\xe0\xb8\xfbUwJH\x19\xce#J@\xe07\xd8\xdei$\xae\xd3BD\xb5a\xc8f]\
\n\xf9\x0c\xfe\x98\xd5\xba\xebHr\xd9\x11\x93dB\x1c\x1aA\x80\xed\xad\xe2\xdc\
\xefq\xc7-\x97\x0eU\x92I\x8bFJ\xa3J\xcb\x0f\xa8\xd6\x1a\xa9\xeeqh\x04[\xad7\
\xad\xe8M\'\xe1\xd0\xba\xa8\xd6W\x16r\x9f;\xee\x1dL\x0b\xab\x04\xd3x\x05\xda\
\xf4F\x1c\x07\xab\xef`\xb5\xd6D)\x998\xac`\x118\x0e+\x98\x16G\xda\xc2;\xeb\
\x8a \xe3\x12\xad\xeam`\xabR\xa4\xd14#U\xf6\x1d,Du\x9f\xb5L\x0e\xc2\x05s>\'z\
\x94OIq\x1cV\x90\x06\xc7a\x05S\xe08\xac`\x02~4-\xbc\xb6q\x1cV0G\xdc\xf5\xab\
\x89\x85x\xdd\xcf\x03G\xca\xeb~^\x16\xdeCS\xdd\x03 \x96\xa2\x9b\xcd\xdb\xc2\
\xbbP\xd5}\x07J\xae[+k\x9c\x85wRW\xb5J\xb0\xb6\xdb]\x0b."\xac`\xe1^\xf7J\xc9\
(\xac\x00B\x1f\xb3$\x16\xdeQ8\xab$\x9f\xcbg\xa2\xf2Z~@\xbe\xb0\x9d\xe8\xda\
\xb9x\xdd\x07\x81f{\xa7\x11\x85\x15\xd8\xb6\xf0n\xfdjzm\xc1qX\xc148\x0e+X \
\x8eE\xb54p\x9d\xb3Tw~\xde\xaa\x857\xe3*\xb6*_D\xc8\x15t\xa0\xd9\xd8\xac\xa5\
\xf2\x00\xb6\xda\x82i-\xbcIP.\xe7q\xdd\xb38J\x92\xc98\x14\x0b\x99\xc9\x17\
\xc5pl\xe1\x9d\'\x8e-\xbcCpl\xe1\xed\x83]\xaf\xfb\x14\xafK\xdc\xc2\x9b\xf4~\
\xcaq\xd9\xdc\xcc\x8d\xfc\xfdHy\xdd\x1f[x-\xc1\xb2\x85w>\xe7\xce\x02\xab\x83\
L\xcb\x07)L$\\{\xdepI\xe5\x8e\xb5\xf0\x024c\x9c*\x95\xc3\xf3\xb6\xef\xe0\xae\
\x7f\x07\xad\xb6\xa0l\x1b*;]\xb4T\xd8\x1az\x9e-w\xca$\xb0\xda\x82\x8e3\xd9}\
\x19\xd2\t\xdbY7$)\x04(\t\xae\x93\xaeNs\x97d\x84x\x08\x80\x13\xe2\x1c\xb7n]\
\xe1\xe4\xc9P\x9d\x98\xcb\xe7\xf9\xad\xff\x94\xe3\xd9\xef<\xcb\xa7\xb3\x19\
\xae\xbd\xaby\xe7m\xbf\xa7u\xa4\x0c\x8d\x9e\xb3\xc0\xaa\xea\xbe\xb2\xf9y\xb2\
\x99\xbf\x02\xe2\x93hc\xd0z\x15\xado\xe3\x07\x971{\xa0\xf7O\x81\xb9\x89a\x1f\
\xcc\xbb\xc0:\xac\xffE\xbe\xdeZB\xad\x9fGJ\x17G\t\x1c%\x90r\x8f\x83k\xaf\xb2\
\xff\xee\xf7F\xdeoa\x04]\xd7\xa5Z\xad\xa1>\xe8\xb0\xeb\xed\xa1u\x80\xd6{\x08\
\xb1\x821\xe1_ $\x06`n\x87\x7fn\xb5\xad\xb5\xd7\x0e\xf0\xaf\\\xc2\x1c\x00\
\x1e\x91%UII&\x93\'\xe3\xc0*?\xe4\xba~\x16F\xe8E\xad\xab\xeeOK\xc9\x87\x1c\
\x17\xa9\x1e\xc5\x0f.\xe3\x07\x1a\xdf\xe8\xa8\x02\x1dR\x1db\xe2\xf6M8qO\xf8\
\xf7\xbe\xf6\x0f7\xc2!\xc0\xec\x87\xc4\xc5\xbd\x02\xeem\xffv\xd3\x10h\xcdn]\
\xb3\x0b\xc8\xf7\t\xb2\x1f\xc9\xa1>\x04\xa7u\xaf\'\xc28\xd5}j\x82J)\x1c\xc7\
\xc1\xa0\x08\xb4\xa1\xe5\xbd\x86!\x1c\x04:\x7f\x05W\x11\xf2\x01\xe4\xf2-\xc4\
\xca\x1a\x82[ \xc2\x10\x1dA\xf8Wk\xc3\xef\xff\x0f\x9f\x93\'\xafq\xeb\xd6)n\
\xbdw\x9d\xeb\xb7O\xf0\xceUX^\x15\xdcs\x8f\xe0\x94\x90\xd1\xa0e\xde3\xd4\x9b\
-@\x90\xcdd\xa9\xd5v\xb9\x18\xf8\xfc\xbdryl}\x13\x13\x14b\x15\xd7}\x1c\x84\
\xc2\xd7`\xf6.w\x9f<\xc0\xcdk\xa8\xb5\xf7p\x1c\x89Xz0<~_\xa7\xf8{\x87\x94\
\x07\x0f<\xb0\x02tZ:\xd6\xe2f\x0fc\xdeF\xbf\xf3\x06W/\x9dde]\x85d\xdb}\xb7c\
\x19\xcef>N\xad\xb6\xcb\xc6F\t=b_\xceD\xcb\xa5l6Ki\xa3\x8cFa4\x98\xbd\xbd\
\xb0\x92\xcb\xcb\xe8K/u+\xfd\xe0:r\xb9w\xd4\x8b\xf5\xd4^\xec\x83\x10\xbdA\
\x1f\xc6,\xb5\xc9\xaf \xc4\nR\x86d\xaf]3\xbc\xfb\xce\xf3\xbccNq\xea\xb4\xe2\
\xa4\xe8\x10\xbd\x84\x00v\xfeC\x83o\xd5\xab\xfc\xda\xaf\r\xaa\xf5\'\xb6\xe0\
\xd6\xd6\xaf\x92\xcd}\x0e\xdf\xbf\x8c\xd1{\xe1@\x00\\\xbbr\x81w\xb5\x8e\xde5\
\xc1\x01""4|h\x17\xa2+d\x0bLhs0\xa6\xfd\xcd\xf4\x126`Xj\x93\x85\x07NI\x0c\'\
\t\xdez\x85\xabWN\xb2\xf6\xb0\xd39\x8d\xfa7Z\xa8\xb3\x9f\xa5Z\xcdR*\xf5\xaaA\
F\x12t\x1c\x87\x9d\xafT\xd1\xefI\xbc\x17.\x86\x15\x17\x80~\x9b x\xbd\xfd\x84\
\x05\xf1\xd0\x9b@\x1bd`\x10b\xb2\xa8\xe1\xfb\xba\xcb\x1a\xa2\xee\x17\xd6\xda\
\xb4\x8f\x9b.Y\xb1\x86\x00\x9cs\x12\x0c\xbc\xa5/\x12\xbcs\x82\xd3\x0f9\x80\
\x89\xfci\xea\xbf\xdf\xa4\xf2\x0f\xba\xef\xe5P\xeb\x92\xe38T*[ \x9d\xf0^\x80\
\xd6\xaf\xa3\xf5\xdb\xed\xca\x1cD\x8d\xd4\xa9^\xbc~3M\xcdB\xf4Z\xa3\x0c\x98\
\xd8\xdc\xd0\xe9\xc6\xe1\xe7=^\xbe\xa0\xdb\xad)\xb8e\x0c\'\x85 \x97q\xd8\xad\
m\r\xb7.\x15\x8bE~i\xb3\xc2\xf7\xde4\x98}\x83\xe0\x1a\xc6\xfc_\xae\x19\xd3Gl\
8)\xa5\xe4\xa0\xc12\xd6\x18\x08X2\x867\x03\x13\x12\x89\xff\x16\xd6\x1aG)\xb4\
\xd1\x18\x1d\xfe\xae\xda\x06Bc h\x0f&\xc6\x84\xdd\xf7\xa3\xe7o\xf1\x96\xbe\
\xc8\xe5\x9b\xab\xac\xaf)\x0c\xa6\xc7=\xa5\x87\xa0\xeb\xba\x14Ke\xfeWKG5\xd6\
\xfa2FkN\n\xda\r%\xfaZ\xaf\x8bG\x1d\xc9\x13\xfd\x9bi\x8c\x10N\xd7\xd75\xde\
\x8bA\xc8\xaesJ\xf8:\x12\x04\x01\x8e\xa3\x90\x8e\xc4\x18\xdd\xa3L\xf6\xbc\
\x00?\x08\xa2\xf7\xd5\xb0\xc6Cj\x8d\xd3f\x8f\xe0\xff\xf9\x88\xfb\x9d\x1eq/\
\x12\xb6s\xb9\x1c\xd5\xea\x7f\x04\x04b]\x80\xb9F\xe0{h}ah\xab\rT[\xc0\xb2\
\x10\xed\x890\xf6o\x04\xe4\x99\xf0\xad\x13\xed\xff\xc2\xa7\xd7-\xdc\x0f\x02\
\xb4\xd6(\xe5\xc0A\xb7\x9c\xb0\xd8\xee\x1b\xdb!*\xc4-\x9cG\xee\xc5\xbc\xf5<"\
\xd6%\xa2\x16\xdc\xd8(\xd3\xf2\xafG\xf7\xd0\xfa\x07\xed\x0bO\x85\x85E\x83A/\
\xa9\x11t\'B\x08A6\xeb\x8c\xb5\xfe\n!\xe0\xc0\xc0R\xfb\x01\x1c\x98n\xf7\x17\
\x02aL[\xb88\xc0\x985\x00\x9c\x87\xc0\x7f\xeb\x95A\x82B:\xe1`b4o\xf8>\'1=\
\x03\xc8\xb0\x16\xeb|\x98vP\x11B$\n!7{m\x92K"\xdc\x1d\x88 <.\x04\xe1\x80m@\
\x1c\x84\x03\x90X\xc39\x07\xde\xc5\xf0\xda\xa8\x8bv\xe67\x1d\xfc\xb0KN\x0c!\
\':\xe4\xba\x9dk\xde\x10+\x84-I\xb8\x96\xfcT6\xd3\xed\xa6"\xacg\xf8\xe7 \x1c\
\x08Y\x8b\xae\xed\x0e2\xfb\x06?haL\x97\xdc<Z-\x8e\xa6\xe7\xa7P\xaf\t\x1ew\
\x1dV\x85`U\x08\xb2\xd9\x0c^\xab\x85\x0et\xf8\xc6u\xba\xac\x00a\xbasswE\x7fp\
5\x019{\xad\xa6\xb5\xc1\xe8P\xfd\x90\xec\x9f\xe1\xbb}Z\xba\x8c\xeb\xe2<\xe6\
\xc4\x9e\xfb\xe0\x08\x18\x11\x0c.\xbd\xde\x1e%\xc5\x98.y\xb80\x0c.\x89\x9c\
\x0f\xa8\x1e\x92\xfd\xeft\xb7\x05\xdb\x93\xee\xa8Q\xd2&\xb9\xa9\x07%\xc2\x96\
\xef_98\x1fP(\xa5\x86\x92\xec\x12\x1c&}\xb4?$\xadP\xbf\xa5h\xd4u\x06F.o\x92\
\xa0\xe5\xfb\x03\xc7\\\xd7A\xc8\xc1\xba\xf6H2\xd1\xa8\x14\xab^\x9a\xa7\xad\
\xb5\xa1\xd9^\x94\xf6\xca_\x83\x98Iuo\xa0\xe5\xff\x00\xd7\xf9`\xcf\xe1\x8f\
\xb9.\xdfnz=u\xee\xce\x83\xd1\xff\xc2\x0f\xd3v#\xd3\x99\x97\xe6\x0c\x1d\xbc\
\xcdu\xf5~Vc=o\xb5=\xad\xc5o? l\xdbx\xdb\x84\x00\xe7\xbcB\x9d\xe9\x8a_}\xe2\
\xe6\xc0\xe7\xce\xf7~\x18c\xf0}=\xd4\xb3\xe2\xa5\x96\xdf\xe3\xa3\xaa\xf5\x8d\
\x81B\xfa\x08\xceNNJ\xc1\xc7\x9fpXZ\xe9}\x98\xb1\xc5D\xcf\xdd\x0c\xe3;\xb4\
\x10\x02\xd7U(%\xf0\xbc\xa0\xe77c\x0c\x9e\xd7B\xc8S`\xae\x85c@\x1f\x85.\xc1a\
\xe2\xd8\x14p]\xc5R\xac\xa7\x0c\x935\xfb\x8f\x8c#\xd7\x81\x94\x12\xc71\xdd\
\x85r\xe7Zc0\x81\x89\x95#z^\xf0\xd8;8;\xbd\xb3\xaa+[\x1a\xc2\r\x1cg\xdd\x02\
\xd0}B\xa1\xce\x86\xcb%G\xc9\x01\x82\x83\xe8mE\xab\xb6\x89\xf5u\x19\x95\x1d\
\xbc\xa9\xad\xeco\xd8z1\xd6-\'\x08\xe7\xc3\xc6}\xbb\xa1=K\x07\xd1;\xa5\xaf\
\xd8\x1bI\xe3\x0fj\xd2\xe2c\xec<8+\xcc\xc1\xd2\xd0\x01#\\\x16\xf5\x1eK\xd7\
\xba\xfd\xe3\xed\xf83\xe3\xe7.\xc4O&\x9buz\xbe\x0bB\x9f\x97\xc9\xefSz\xf4?\
\xe0\xb9\x10\xec\x7f\xd6\xc3\x9e\xbd\x14\xcbC\x8e\xda\x87u\x13\xb6a\xf4\xb0\
\x1f\x9f\x13\xcdA\xba8\xc04\x18*\xaa\xcd\x13\xfd\x93\xbc \x1c\x90\x16\x81\
\xd8jb\xfe7\xebQ\x7f\xce\xb1\x05\xe3\xe8\xb6\xe0\x1c\xe5c\xcf\xf3\x07\xe6\
\xafY\x96Ki`\xad\x8b\x8e{>\xe1\x940\xcb\x13\x9c\xbe{Y#(\x00s\xa3\xfb]\xae\
\x0b\xfc\xa0k<\x99\\G1\xfc\x9b\x80\xf5\xe5#@\x10@_\xd6p>\x94\x1b\xd5Y\x89\
\xbb\xaf\xc2\xb9nH\xfd\xcc\xc07\xd3\xf3C\xa8\xd0\x15\xb8\xe7\x15KK\xe1\x13:\
\xd8K\xbf\xbd\xb5]\x82\xda`\xf4\x1eB\x866C\xc7Q\xa97\xfe\x1e\x87KC\xde\xdbI\
\x9d\xc3\xfa<\xe8\xb5.\xcc\x14\x16>\n\xc6\x18\xfc\x0b\xc1\xc0\xf1\xf8\xe2\
\xb9\xfbo\xc8r\xc9^E\xc2Q\xd39\xaf\x90+\xcbQkN_\x9eA_\x0e\xc9\x8d{nq\xa2q\
\xcce\xa27\xa6o\x99\xb3\x00\x8c\xea\xaaG\xc6\xdb\xb0_\xb5\x91\x16\xa2\xefo\
\x07\xf3\x11\xb6\xdbJ\'y&\x99\xf5h\x1c\xc6)\x9d\x86\x9e\x1f\xd6 \xfan\x9d\
\xa0\x90\x82\xac\xab&\xafL\x93\x94\x050F\xe94\xea\x9a\xa1\xcb\xa5f\xb3\x81\
\x94\n!\xdf\xc7iq\x92\x13t\x16\xa9q\x13\xda\xe4Jg\xfa\xc8M;\xa2\nB\xfb_\x07\
\x1d\xa5S\xab\xd5!\xd9Q4\tn\x1b\xcd\rs\x03\xad\xaf\xa3u\x80\xef_@\xb4\x97c\
\x11\xc1\xdf\xa8\xedr\xff\x19\x89\\\x0f\xbb\xd5=\xf7J\xd6N-!\xc4r\xd8\xcd\
\xee[A\x9c8\x80\x13m\xaam\x7f\xb3\xe5\x98\x94\xb1\xba*\xc0\xb4@,\x819\xe0\
\xc5\x0b\x86k\t\xf8-\xdd\x1e}\xd2\x993\xb2\xed\x10\x04W\xae\x18^{\xe3\x12\
\xdc\xd8\x0b\xa5\xa6\xdb\x06c\xf6\xdb\x9eQ\xa1\xcdBkM\x10\x04\x83\xc9\xbe?\
\xe4(\x8c1\x04m\xdbx\xfc\xc9G\xef\xd1\x92 \xbeN\xed\x7f\xbf:\x16[\xd1v\x05\
\x99\xa6\xf5\xfa\xaf1\xa6c\xb66\x91\x19\xad\xff<!B\xcb\xaf\x94\x92l6\x8b1\
\xdd\xdd\x9c#\x82\x99L\x86b\xb1\x88\xef\xfb\x04A\xc0\xe5\xcb\x9a+Wt\x9bl\xf7\
)\xc5\x0b\x1fV\x99i\x89\xc5+;\xec\xc1\xf5\xff\x16:"\xad#\x84`}]r\xf6\xec#8\
\xce9\x1ew]>\x99\xc9\x0c\xb6`\xbd^ggg\xa7\xa7\xb2{\xc6pEk\xae\x1b\xc3\r\x13\
\xfa\xcd\x98=\xd3Cp\x18\xd9\xeb}\x04o\xc4\xa5\xf06\xee\xeb\xf4\xf16\xe26\x86\
\xe1\x84\x04bE \x96\x05\xf7\xb5\xad\xbc\xebRrf\xc2NV\x11\xc1 \xe8\x1d\xa1:\
\x85O*\xe0\xa8\xa3g\xa2\xff?\x962>\x1e%\xf4\x10\xfc\xf1l\xf6\xb0\xea17\x1c\
\x19Qm^8&x\xa7\xe3\x98\xe0\x9d\x8e\xae#\xd0\x0c\xf9\xc5\x8e2\xba\xcexf\x9f\
\xd7\x86\xf8\x9f\xdc\xe9\x88\x086\xbc\x17XB\xdcu\x93}D\xb0\xbc\xf1Wix-\x1eQ\
\x0eA\xb0\x98\x08\xe9yag\xa7\x1a}\x8e\x08\xea\xe0"\x95/\x7f\x89\xc6s>R\xae\
\xdf\xb1$w\xaa5dL~\xee\x19E[/<\xc7/\xfe\xed\x12\xf5\x86\x87R\xeb\xf8\xdf_\
\xacflV\xd4j\xbb\xb8\x1fv\xc9f?\x1d\x1d\x1b\x98&^\x7f\xd9\xa3\xfct\x91\xed\
\xaf|\x15\xf5\x90\xe4\xf2\x82\xac@\xb3\xa2V\xdb\xc5y\xcc\xc5\xfd\xd3\x8f\x12\
\x04\x97\xa2\xe3\'\xcf\x9d;WY[[\xeb\xf1\xc3\xbc\xfa\xae\xe6\x9b\x7f\xf0\x9f\
\xb9~\xdfc|\xf4#\x8f!\xd7\x04\x8do6q\xce}\xe00\xea>\x16\xd5Z\x8d\xe0\xf5\x1f\
\x92\xf9d\x86\xb5\x15\xc9\xb3/\xfblo\xff\x0b\xee\xe1=\xae^\xbd:\x9c \xc0\xcd\
\x9b7\xf9\xd6\xd7w\xb9z\xfbA\x1e|\xffY>\xf3\xa9\x8f\x12\x04WX[;l\xb7\xd8.vw\
\xeb|\xfc\xcf~\x9cG?\xfc$W\xf7\x0c\xbfY{\x86_\xf9\xc5\xbf\xc3\xf3\xde\xb7p]\
\x97\xabW\xaf\x8eW\x1b\xaaG\x1c\xf2\x7f!\x87\xef_\xc6{\xe1"\xc5\x9f\xfcD\xa8\
\x9a\xff\x93&\xd9?wxK\xabj\xadF\xc6\xfd$\xb9\\\x0ec\xf6i=\xdf\xa2\xf6\xebUj\
\xbfQ\x1dP\x97\x8c%\x98\xfbK\x05\xe4\xfd\x92\xcc\xe3\xa1\x85\xe8\x97\xb6v\
\xf9\xa9\xbf\x9c!\xfb\x89\x0cZ\x1bZ/\xb5\xf0_\xf5)\x16G\xec\xb2l\x11A\x10\
\xf60c\x04\xf9|!tEy\xd9\xa7\xfe{\r\xea_\xad\xe1\xfdQs\xe8u#\t\n!(\x14\x8a\
\x98\xeb\xa1j\xce\x0f4\xff\xb2R\xa0\xdel\xf1\xf7\xffI\x95\x9f\xffB\x1e\xf7\
\xdc\xa3\xb8\x1fsy\xbe\xe5\xf3\x03\xdf\'\x9f\xcf\xcd\x85\xd8\xf2\x81`Y\x9cbi\
\xe5\xfd\xa0\xf7\t\xfc\xef\xd3\xf4^\xa2\xfao\xff\xf9Hb\x1d\x8c$X\xa9\xfc\xe3\
\xc8\xb6W\xdez\x86\xcc\xe3\x7f\n\xd7}\x18\xd7q\xd0\x9f\x81\xff\xdah\xd2\xb8o\
\x9d\xdc\x8f?\x89z(T\xd7im8\xc0\xc0\x9en\x1b-A*w\xd4-\x06\xd0\x8d\x80\x0b\
\x95\xce\xe7\x1c\x87\xd5U\x898-\t\xb4\xc6\xfbC\x8f\xef|\xa3N\xf3\xdbM\xbe\
\xfb\xc7\xcdD\xda\xbb\xa1\x04?\xf5T\x8e\\\xfe\x0bH!\x08:\xfb\xbe\x9c8E\xb1\
\xb4M>\x9f\xa5T\xc8br.\xbb_{\x8e\xf2/\xfc\x0c\x0f>\xf80?\xfd\xb3\x7f\x93L\
\xe6\xcf\xe0(\x89X\xe9\xee\x86wYk8x7\xfcbn\x01\xb0\xb4d\xd0{\xe0\xfdI\x0b\
\xd1N\xdd \xd6\x04\x8e\xe3 \xefW\\\xbbn8%eh\xa5z\xd5\xa7\xf5\xf2\x1f\xf3_\
\x9e\xf97<\xfbl\x13\xb3\x97n\xda\x1aJ\xf0o\xfc\xb5\x12\x10\x06\xe9\x07\xfa\n\
\x95\xf2\x17\xa8>\xd3\x00a\xc8\xb8\n)\x05^+\xa0\xfe;_\xa1\xf5\xc2s\x00|\xf3\
\xbf\xd7)o\xe4(\xfe\xcc\xd3\x04{g\x80\xf7#V\x05\xea\xb4@\xc8\xf7!\x85\x803a\
\xf9\x02\x81\x92\x86\xdc\x19\'t\xef\xd4\xfbh\xb3\x87\xd6\xfb4\xbf\x13\xf0\
\xfc\xf7<~\xf0J\x8b\xd6\xf3M\x827\x02\x82\x8b~*Rc\t\x16\n\x05\xb2\xb9\x1c hz\
>\x816\xb8b\x99\xfcg]\xdc\'\xc2\xbd\n\x03m\xf0\xdf\xd4\x04o\xbc2P\xa0Z\x7f\
\x15\xd5\xde;\xd5\x18\xd8\xdb{\x90@\xdf\xcb\x05\xb3\xce\x9e\x11 N\x02\xef\
\xc3\xec]\xa5\xf5\xe2+\xf8\xdf\x7f\x1d\xffU\x0f\xff\xc5pC\x00\xff5\xbb\xc2\
\xfe\x00\xc1\xd2\xc6\xdf\x02\x03\xc1;\x9a_\xdfm\xf2\xc0\x19\xc9\xef\xfe^\x93\
\xf55I\xb1\xf0\xe9\xb6\x8cz\x85\xe67\xbe:\xf0\x827\x9b>lt\xbf\x87>\xe2?\xe4\
\xcc\n\xc0\x9b=\xe7n\xef\xd4S\xa5\xf8\x9a\x16=\x04766\xc8\xb8\x1f\xc3\x98\
\x03Z\xcf\xbd\x06\x84\x91_\x97\xaf\xc2g?s\x9el\xe6<\x8do\xfb\x04\x17\xdf\xa0\
\xba\xf3\xcf\x06\nkz>\x99\\%\x91MpQ\x0b\xec\x88\xa0\xe38\x14\x7fn\x03m\x0e\
\x10\x02\xfc\xef_\xe2#\xaeC\xd3{\x89\xd5\x13\x86\\\xe6I\xb41\x88\xfb\x96\xa8\
V\xb7Gz*\xcd\xee\xf4c\x17\xb1\x00\xc9\xa7\x91\x0f\xcah\xe8-~.Ku\xb7\x891\xb7\
)\xe6\x7f\x0c\xa5$~\xa0\xf1\x9a\xdf\xa2\xd9\xf8oc\x0bUJR.\xe5\xc8\xe5\\\x0b)\
Q\x02\xaa\xb5&\xd5\xda\xf8\xf9n\x14"\x82\xca\xf9\t \x0c\x02\xde\xf9J\x1d\xf7\
\xc3\x0e\xf9\x9c\x0b\xc6%\xe3\x9e\xc5\x0f\x82N\xc8;\x9b\x9b\x9b#\x0b\x94\x12\
\xf2\xd9A\x0f\xdfi\xe18\x8a\xcaf\x81\x8dR~b2\x8da;\x94D\x04\xf5\xde\x1e\xd5g\
\x9a\xa8\x07\xee\xc5y\xcc\xc1{\xc1\xa3\xf9GM\xb6\xfe\xe1\x970\xe6\x80\xe0-\
\x838a\xd0\x81?\xf6&\x9d\xdd{:\x18\xb79c\x07j\x84\x81\'^\x8eR\x82\x8ck\x18\
\xb1\x0f\xd6HD\x04\x7f\xfbw~\x97\xfd\x9b\x06\xb9&Q\x8e\x8b\x94\x8f\xf0\xc1\
\x0f\xde&\x9b9O\xa0\rB/\xf1\x9dfcla\x8e\xeaM\xaa\x91/n\xc7L\xce\xa31no\x98l&\
L\x8b\x02\xe1_\xaf5\xa5+\xd7\xbe\t\xad\xba\xc1U\x8d1\x1a\xb1\xa6\xf8n\xa0\
\xf1Z\x01B@\xe0\xbf\xc0;\x13\x16\xbf\xf1\xd7\xad\xe5\x9bD\xe4&\xa1\xe9u\xb79\
\x12"\xdc7*\x8d\xbbZw\x9a\xb8\xd9\x1d\xfd\xb4\x0e@\x07\x885\xc5/Wv\xf8\xf3?\
\xf1cp\xc3\x9fXX\xbcK\x05\x16\xb5\x1dA`\xa2\xad\x8f\xe2;@$AD\xd0\xdc\xec\xfd\
A\xdc#\x10ka@\xab\xef\x7f\x0f\xe7\xfe\xe9\x87~\xa5\xe4\xc0h\xea- \x99\x06\
\xf4L\xf4\xbd\x04\x96\xd7\x14\x02\xc1\xea\xaa@\xad\xce6\xaf\xd5k\xe5\x81m\
\xe1\x17%\xc9\x0c\xb5M\x08!Yi\xf77u\xfa\xf6\xccC\xfe\xb0=\xef\x1de\xcf\xcdr\
\x1c\x86\x12\\n/a\xd6V\xe0\xb4\x18t \xb8\x930@\xb0\xd3zk+\xf0\xd8\xc3V<\xb2\
\x0e\x15\x03\x01\x92\xa2=\x91\xa5%7.%J\xd7\x99\xa7\x0bmf\x13\xb6\xa7\xdaW-\
\xdc\'"l\xbdi0*%J\xb1\xb43\xe0\xda\xdch\xce\xbe\xeeK\xb5\xaf\x9a\x102R\x1f|\
\xe0\xe1{\x11\xe2\xbd\x99n.D7\xa8\xdfk\x05x\x16&\xfdQH\x94\x12\xa5\xd3\x85\
\xfcV\x139%\xb9\xf8d\xf2\x89\x8c3U\x19\xc3\x10\xdf\xdf\xb0\x7f\xc2J\x9c\x12E\
\x08\x89\xd6\x01-\xaf>uE\xb6\xb7k\xd1\xe7/\x16\xb2\x947r3\rRB@.{\xb2\xe7X\\Z\
L\x95\x12\xc5\x88\x0c~\xab9\x13A\x80z\xed\xef\xe2\xbagg*c\x14j\xb5&\x9b[\xc9\
R\xb7tR\xa2D-\xb8g\x0c\xfe\x8b\xd3-*\xe3\xd8\xf8\xe5\x7f?\x17u\x84\xef\x07T\
\xb6\xd3?\xfc\x88\xe0+\xff\xbb\x9eZ\xe78\xb4"\x174\x85\xd2\x0e\xd5\xda\x1f\
\xd2\xf2g\x1fX\x82@S\xad5\xc8\x17w\xa6r\xd3\x8cF\xd1\xd7_\x9e=\x93U\xbcR\x95\
\xad\xdf\xb6V\xde,\xb8\xeb\xfdd\xac{\xdd\x17\x0b\x99\x04\x19C\x0c\xb5\xddf\
\xe2\x1dI\xf29\x17!\x04\x81\xd6\xa1\xee5\x05\xac\x12,\x15\xb3T6\x93\x99\xd2\
\\G%\x1a\x117\xcb\xf9\x9e\xdcM\x9b\x95\x1a\xb5\xdd\xe4\xaf\x93\xe5$\xa7\xc9\
\'\xbd\\.\x99\xd5)\x9f\xefM\x81\x92V\ryh\xf9&:i\xc4\xc6M)\x8e\xa3f\xd6\xab\
\x1ejB\r\xdb\xf1\x85\xc3pG\x8d\xa2Z\x9b\xd4\x89Q\x0f\xad\x05\x8d1\x14J;\xa9B\
V\xa7\x91\x90\x0e\x8d\xa0\xe7\xf9V\xf4\xa6\x93pGu\xd1ip\xd7\x13\xb4\x9a\xb5\
\'\xcd\x88\xde\x88I$\xd3\xde\xaf\x1fc\xadK6\x10h\xa8\xed\xee\x85\xe1u\xc0\
\xf6\xf6\xe8t\x96\x0b\xb7\xf0\xda\x82a)\xd2+\x1c\x05?\xf0\xe3\x8c!i\xf0#\x99\
1d\x14\xe2\xe9\xf9F\rNG*c\xc848N\xcf7\x0c)\xd52G"=_\xbf\xca\x7f\x14\x02\x1df\
\xe7J\x83C#8Mz\xbei`5kOy#G\xc6\xcd\':\x7f\xd6\xf4|\x0b\xcd\xda\xd3A.\x97\xb3\
Y\xdcD$\xb1.\xdd\x15\xe9\xf9\xacf\xed\x19\x87\x96\x0f\xadV\x03)\xc6K\xdd6\
\xd3\xf3M\xea\xaaV\t\xces\xb0\x98\xf6~V\x97K\xd3\xe2\x8e\x18E\xe18\x17v\x84\
\xe3\\\xd8m\x1c\xe7\xc2N\x80;N\xd8>\xce\x85\xdd\x87#!l\xcf\x13\xc7\xb9\xb0\
\xeftX\x16\xb6\x93\xbfOwd.\xecj\xad\x89Rr\xa1\xc2\xf6$\xd8\xdd\n\xde@ek6O)\
\xdb\x98\xdb(\xaa\x94$\x97u,\x84\xf6hv\xeb\xd3\xfb\xf0\xcc\x85`\xbfg\x84\x8d\
\xf2\xca\x95Zj\x17\x12\x98\xc3r)\xeb\x82\xeb\xda\xf5\x83VJR\xdb\xd9`\xb7a\
\x18\x17\x9b2w\xeb\x92\x10\xbd\xe4\x1a\x8dV\xa2\xee\xd5\xd9\xa6\xaf\x1fR\x86\
\xe1<\x1ds@\xd6eb\x80V?\xac\x12\x8c;L\x04\x9a\xc4\x92J&;\xc2y(\x08\xa3^\xf2\
\xb9v\x88\x83\x12\x08\xf6B\x0bVBXv\x04\xea~\xf6};\x13]\';k\xf7\x1e\xe9\xf6\
\xc8\xb7\x9b\x12%\xf6\xd9\xe6D>\xcbn\x97\x0b\x11\xb6+\x9by\xdc>\x87\x9f\xed\
\x9dFj\x9f\x97i\xb0\x10\x82\xa5bn\xe0\xd8\xe7\x03\xbd\x10\x82\x87&l//(\x9b\
\xe1\xf1j\xe2N\x87U\xeb\xd2\xa8\xd8\xa5\xaf}\xdd\xe3\xc9\'\x9d\xee\xc9\xc6Po\
\xcc\xe6#~(\xd6\xa5Q\xb1K_\xfarm\xc4\x15\xb3a\xe1\xd6\xa58\x94\x92C\xad=\xd3\
`\xd2\xd6\x11\x89b\x97l \xae\xc3-\x16\xb3\xa9\\\x9cGA\xa9\xa5\x1e\t\xa9_\t\
\xb7P\xebRys\x9b\xc6n\x19!\x04\x8e\x92\xd4kev\xeb\xdeDId\x94\x89^\x08p\x9d\
\xee\xce\xe5Z\xf7JH\xa9b\x97\xe2\x83\xc2,\xd8(\xe5\xd8,\'3c\xa7E\xe9\xe9j"\
\x83\rtc\x97\xacK2;\xd5\x06B@y\xc3\x1eIc\x0c[\xdb\xf5\xc4\xe4\xe2\x98\x8b\
\xa8\xb6\xbd\xd3\xa0Zk\x92\xcd\x84;\x08\xcd\x12b\x17\x04\x9azJ\rx\x1cs\x93E\
\xb56\xd4\x1b-\xe0p\xf7+=V:M\x83\xa3\xa4t\xb2>\xd1\xdb&\x07ao\xa8n\x97p\xdd\
\xf4A$V[P)\xd9C.\xa9\xd2i\x14\x1cGR*f\xc3\xf0w!\xa8\x94\xf3\x147\xaa\xa9\xca\
\xb0J\xb0\x90\xef\xce\xd8\x9e\xe7[1\x8f5\x9b>\xb5j\xb8\x97Y6\xeb"\xa5H5\xa2Z\
\xed\xa2\x8f\xc6\xd4\x12\xb3\xb4\\\x1cM\xcf\xc7\x8f\x85\xca\xa6\x8du\xb2J0\
\xbeJ\xb7\x91d\xb8\x83Y\x9c\xdb\x8f\x95N6p\xact\x9a#\xeez\x9d\xcc]O\xd0\xaa\
\xf9,\x13s#+\x14\n\xd1T1I\xe94\xe9~N\xec\xd2\x8dR\x89Q\x83\xea\xdc\xcdg\xa30\
/\xa5S\x12\xcc\xad\x8bZP\xc7D\x98%\x87\xe1\xdc\x94N\xae+\x16\xa2t\x9a\x04\
\xab]\xd4\x0f\xa0\xe3\x05*\x04sW:%\x81u7\x12\xafe\xc8\xb8\x1d\x8b\xac\xb4\
\xbatj\xb6\xf6\'\x9f\xd4\x879\xe4\xe1%\xf5\x16\x99I\xd0\xf4\x0cA\x90>\xc5\
\xfb\\FQ\xaf\x15n\xbf\xd9\xa8\xd7\x12)\x9d\xc6\x05\x94h\xb3D\x10\x1cLm1\x9e\
\xdb4a\x0c\x89\x95NR\xe5\xc6\xfc\x9a\xbe\xd5\xe2\xf8\xd1\x94d\xa6A\xa5R\x19\
\x08+\x98g\x0c\xef\xa1\x98\xcf\xb6\xb6~!QHj\'\xac \t\xc1N\x0c/\x80\x12a\x0co\
g#\xe3T[\xff-\x1a6cx\x17\x16\x9c\x95\x166\xc2\n\x12o\xfdg\x03\x8dF\xc3fq\xc3\
\x91\xd2|v\xa8\xc1Y\xd3\x86\x15\xa4\x99\x13\x8f\xc3\n\xe6\x85\xe3\xb0\x02K8\
\x0e+H\x83\xe3\xb0\x82C\xc0\\F\xd1#\x991\xc4\x16\\WQ\xdb)\xf5dq\x9c\x05\x9d\
\x8c!\xb9\x9cK)\xa5m\x10\xe6@pg\xab\xd8C.I\xc6\x90QP\xb2\x9b\x8f>\x97u)o\xe4\
R\'\x00\xb0J0\x9fs#\xfb]g\xe7\xbbY7\x87\xfb\xa7\x95\x02_,\x84\xaa\xacR1\x9b\
\x9a\xa0\xd5i"\x13\xdb[\xbb\xb6\xdb\xb4\xb2\xf3\xdd?\xda\xda\x8d\xa6\x1f)%\
\xae\x93n\x07h\xab\x04\xe3\xc9i<\xcf\xce\xb6~\xc6\xf4&\xdf\x10ry\xf4\xc9C\
\xb0\x10Y\xf4\x88d\x0c\x99\x1f\x0e3c\xc8B\xacK\x932\x86\xcc\xd3\xbat\xd7\x0b\
\xdb\x91\xbf\xe8\xdd\x88\xc8_4\xb0\x99$\xe9\x88\xe1\xff\x032\xf0D(\x08\xb8#\
\x97\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress10Bitmap():
return wxBitmapFromImage(getscanprogress10Image())
def getscanprogress10Image():
stream = cStringIO.StringIO(getscanprogress10Data())
return wxImageFromStream(stream)
index.append('scanprogress10')
catalog['scanprogress10'] = ImageClass()
catalog['scanprogress10'].getData = getscanprogress10Data
catalog['scanprogress10'].getImage = getscanprogress10Image
catalog['scanprogress10'].getBitmap = getscanprogress10Bitmap
#----------------------------------------------------------------------
def getscanprogress11Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x1b\x13IDATx\x9c\xed\x9d{\x8c$\xc7}\xdf?{w\xbc\xad\xbd[\xdd\xd5\x1d%^\x1d-\
\x91MK\x14\x9b\x8a \x8d\x0cH\x1c\xeb\x04i\x92?\xec1\xfc\x87VH \x8d\x9d\x87\
\x16A\x00\xaf\x12$X\x02q\xbcN\x80`\x10\x04\xc8\xcar\xe2E\xfc\x876\x80\x03O\
\x800\x1e\x05\x01\xbc\xfe\xc3\xd0X\x96\xe1Q\x1c[c96\xc7\xb2\xe9k\x92\xa2\xd8\
\x94\xc8\xbb:\x86\xe4\xd6\xdekkOw\xb7\xf9\xa3\xa7{z\xde\xdd3=\xfb8\xec\x17\
\xb8\x9b\x9d\xee\x9e\xaa\xfav\xbd~\xf5{T\xcd\\\xbati\x97\x07\x18\'\x00\x94R\
\xfb]\x8e\xa9@k\x1d\x10\x04\xd8\xd8\xd8\x988A\xd7\xb9He\xfd\x8b(%\x87>g\x8ca\
\xf9_oP\xff\x13od\x9a9W\xb1Z\xfe\x02B\xcea\xb4ai\xa5\x8a\xd6f\xe4\xef\x16\
\x16\x16\x008\x96\xac\xe8\xc9P,>5\x92\x1c\x80\x94\x92\xa5\x7f\x94O\x94\xe6\
\xf2r\x11\xd7\xbd\x88\xa3$\xb9\x9cCi!\x97\xaaL\'F?2\x1d\xe4\xf3.\x0b\xc5\x1c\
\xfe\x90\xdap\x1dI!\xefN\x94\xcf\xbe\x11\x04X[-M=\x8fL\x9b\xe8^\xc0\xdat\xcf\
\xefk\r\xd6\xeb\x1e\xc6$/\xb1\xe7k*\xd5z\xaa<\xf6\x8d\xa0\xe7]eq\xb92\xf5|\
\xfa\x12,\x97\xcbc%\x96K1\x1e\x18\xb35q~\xdd\xe8\x97\xce\xa1\xeb\x83i\x91)\
\xc14\x03@\xda\xc1b\\d\xda\x07=\x1f\xa4\xb0\x08\x11|o6\xfbK*;X\xbe\xfa\x9b\
\xf5,\xb3\x1e\x88\xcc\x07\x99F\x8cS\xb9\\\xcd:\xf9\xd4\x98\xda(*\x04\x94\x16\
r\x89D\xb7a\x83\x931\xe0\xeb\xf1\xcb1\x15\x829\x17r\xae\xa0T\xccFR\xb1\x16\
\xea\rK\x02\x19\xbb\x07\x99\x8f\xa2\xf9\x16\xb9,!\x04\x14\x0b\x029\xba1\xf4 \
\xd3\x1a\x14\x02\xdc\x18\xb9z\xddc\xa3\xd6\x1c\xf9\xbbpi\xd3\r)\xc1uD4h\xe5]\
\xa85\xd2\x95)S\x82Nl\xdd\xac\r\x89%\x95\\\xbe?A4hm)\x16\x02\x86J\t\x04\xdbX\
f\x12\x97)3\x82\xe5r\x99/\x97\x17\xc8\xe7\x82u\xde\xfa\xfa\xe4\x0bh\x08^\x94\
1D\xcdS\xca\x19\xb4I.\xfddZ\x83\xcf\xe4\xda\x8bQc,\xae\xeb\xe2y\xa3W\xed\xa3\
`\xad\x05z\xfbu\x9cd\xb9\\\xee\x9b\xdf\xf4\x84m1\x13eV^)\xe2:\x9dz\x9f\xb5\
\xf5:\x8d\xa6\x9fIV!\xd1~/sz\x04m[Y\xb7X*\xf4\xdc\xfe\x9c6\x99\x10\x1c\xd5T3\
\x9d&\x06\x89f\xfd0\xdb\xa7\xc9\xa5E\x92~\x98\xe9ri\\\x8c\xca\xcf]_D\xa9@\
\xdcY\xafTh\xa6\xa8\xf9\x07~\xb9\xb4\'+\xfa\xaf\xffa\x93\xa7\x9fv\xda\x17\
\xac\xa5V\x1f-\x00d\x81=!\xf8\xa5g\xf7oU1\xb5&\x9ad\x15\xb1\x17ieJ\xd0\xf3\
\xfd\xe8\xefR)\x8f\x94\x93\x8f\x94\x85O\xba8\xb19\xd4l\xa6[Rd\xdaD7j\x1e\xcb\
K\x16!\x04\x8e\x92\xd4\xaa\xcbl\xd4\x9a-I$=.*\xc9g\x8bm\xe9\xc8\xf35\xfek\
\xfbHPk\xc3\xdaz\x9d\x95\xe5"\x104\xad\xa5\xc5Bf\xe9\xaf\xfeJ-\xf5o2\x1fd\
\xd6+u\x84\x80\xe5\xa5bfiZkY]\xab%\xb2Fuc*\xa3\xe8\xdaz\x9dJ\xb5A>\xe7\xe08*\
Z\xcf\x8d\x03\xad\r\xb5\x94\x1a\xf08\xa66M\x18c\xa9\xd5=`\xf2\xd5\xc4$\x98\
\x1aA\xa5$\x85\xbc3\xf1t\xe1\xfb&\x91V`\x10\xa6"\x8b\xe62\xd6\xcb\xac\x96K\
\x89\x94N{\xa2\xba?R:\xf5\xc1\x91\xd2i\x02\xa5S\xa6M4\xde\x84|?\x1b\xebJ\xa8\
tj\xe7\x91\x9c\x1cdL0\xde\xf3\xb2\xb4\x1e\x8d+\xea\xc1\x1e-\x97\xa6\xadt\x1a\
\x86=!8M\xa5\xd3(\xec\x9b\xca"\x0b\xa5S\x12\x1c\xe9d\x92\xa2\\.\xb3V.\xe18\
\xc1\xfa-\x0b\xdf\xb7Q\xf9%A\xa6}0\xd7e\xc9\x0cU\xe9\xd3R:\x1d\x18\xd5\xfd\
\xb4\x95N\xc3T\xf7\xd3S:\xc9\xb3\x99\xa5%\x86,(\xf7Tu_\xddh\xab\x14\xb2R:)5\
\xd3!!\xc5\xd7\xbdI\xfa\xe1\xcc\xa5K\x97v\x95R\x99\x0c\nJI\xea\x1b\xcb\xd1\
\x1b\xd7\xdad\xa2t\n\xd3\xf3|Mqa-\xd1o\x17\x16\x16:=~\xb3\xc0\x91\xd2iL\x1c)\
\x9d\x86\xe0\x81W:\x1d\x89jip\x90\xc2\nB\x1c\x85\x15L\x0bGa\x05\x19\xe1\xd0\
\r2Ga\x05]\xd87\xaf\xfbxXA\xd2\xfc\x94\xe3\xb2\xb2R\x18x\xff@y\xdd\xc7\xc3\n\
\xa6\x89C\xd7\x07\xd3\xe2(\xac \r\x8e\xc2\n\xf6\x01Ga\x05\xe3\xe0(\xac %\x8e\
,\xbcC\x90\xa9\xea~i1\x0f\xc5\xa0\xb0\xcd\xa6?U\x0b\xef\xbe\xa8\xee\x17\x16\
\xda\x8a\xa6\x8dZ3\x13\xaf\xfbAa\x05\x90Lu?5\x0b\xaf\xb1;\x99\x84\x14\xc0h\
\x0b\xef\xbe{\xddO\xd3\xc2;\xaa\xa9fJ\xb0\xd9\xf4"\xf3Y\x1c\xd3\xb2\xf0\x1eh\
\xaf\xfb\xb8\x85\xf7\xc8\xeb~\x02<\xf0\x04\x8f\xc2\n\xb2\xc0QX\xc1\x14\xd3:\
\n+H\x83\xa3\xb0\x82\tqd\xe1\xed\x83#\x0b\xef\xb88(\x16\xde\xa9\xca\xa2I\xd3\
\x99f~\x99\xd6\xa0l\xe9N\xc2&\xb9\xb8\xb0\xda\xf7\xb94\x16^)\xa1\x90\x0b\xd2\
4-\xe5S\x9aA9\xd3y\xd0qH\xd4\xdf\xd2Xx\xf3n@R\x08P\x12\\\']\x99\x0e\x84\x85w\
\x90\xa0"e\xa0\x87\x99\x04G\x16\xde\x03\x87\x83f\xe1\x15\xe2\x11\x00\x8e\x89\
\xc7\xb8wo\x8b\xe3\xc7\x037\xcb\xfb\xe2a\xfc\x17_\xc6l\xfe\r\xf2\xdc\x87\x98\
=y\x9d\x9d\xad\xd7\x87\x0e \xda\x04\x06\x9e4\xc8\\\xf1+\xc4# ~\x02c-\xc6\x9c\
\xc2\x98\xfb\xf8z\x13\xbb\rf\xe74\xd8\xbbXv\xc0\xde\x00\x1e\x02>\n\xe2\x18\
\xea\xec\xfb\x91s\x1fFJ\x81\x92\xc7\xb0wn\xe3\x7f\xdfC\xeb7\xb1\xf6\xf6\xd8e\
\xca\x84\xa0\xeb\xba8\xce\x870\xb7n\xb3\xd1\xdc\xc6\x18\x8d1\xdb\x081\x87\
\xb5\xc1\'\x10\x10\x03\xb0\xf7\x83\x8f{\xad(\x96[\xbb\xf8[\xd7\x02E\\l\xe3\
\x11%\x15\xb9|\x0e%\x8e\xa1\xf5\xf7i6\xd3/\x92\xc7&xFJ~\xdcq\x91\xea\t|\xbdI\
\xadq%(X\xab\x89\x85\xa4 &\xee\xdf\x85c\'\x82\xcf\x93`\xef\x1c\xc3\xde0\x88\
9\x81\xdd\x051\x03\xe2!\x11T*\xc0]\x8b6\x86\x8dZ\xb0z\x90\xef\x12\xe4sE\xd4y\
\xf8\x8b\xbfhp\xdd$[U\xa4&\xa8\x94\xc2q\x1c,\nm,^\xf3\xd5\xe0\xa5\x0b\xa2O\
\xc1M\x84|\x189{\x0f17\x8f\xe0\x1e\x88\xd9\x808\xc1\xa7\xd6\x96?\xfes\x03\
\xdb\xb7x\xe8\xdei\xee\xfc\xe86\xb7\xef\x1f\xe3\xfaM\x98=%8qBpZ\xc8h^\xb5?\
\xb2\xd4\x1a\x1e \xc8\xe7\xf2<\xaa4\xe5\xf2\xcaH\xe5rb\x82B\x9c\xc2u?\x08B\
\xe1\x1b\xb0\xdb\x9b\xd8\x9d\x9dV\xa1\x81\xbb\xb7x\xe8\xee\r\x94\x14\x08)\
\x80\xad\xa0B\xc3A\xc3v|\xa0\xb5\xe1\xe1\x87\xe7\x80\xb0\xa6c5n\xb7\xb1\xf6m\
\xcc\xf57\xb8y\xed8sgU@\xb6\xd5vC}\xear\xf9\xab|\xc2=\xc77j\xdf`e\xe5_\x8d\
\xefu\xaf\x94\xc2y2\x87\xbe\x01\xd6\x80\xdd\xde\x0e\x88\xcd\xceb\xae\xbd\x14\
{\xf2&\xbe\x9e\x07\xbd\x85`\xb7\xdf&>]/\xad\xf3\xbb\xb53\xad\xebs\x081\x87\
\x94\x01\xd9[\xb7,7\xae\xbf\xc0u{\x9a\xd3g\x14\xc7EH\xf4\x1a\xcd\xe65\n\x9f\
\xf9)\xfe\xc5\xb3\xef\xf0\x9f\x7f\xadW4\x1cI0\xff\xa9\x02b\xfe"\xbe\xbf\x19\
\xbc\xd9\x96F\xfe\xd6\xd6k\xdc0&\xeak\x01\xa1\xd3@[e/b\xffG\x7f\xc6\xa7\x81\
\xa8\x8a\x05\x16\x8b\x10\xb1\x1d\xb1-XfZd\xe1\xe1\xd3\x12\xcbq\xf4\x9b\xafps\
\xeb8\xf3\x17\x9c\xf01j\xdf\xf2P\x17\x9e\xa1R\xa9\xb2\xb8\xd8)<\x0c$xFJ>S(\
\xf2\xf2U\x8b\xbe|\x051\xd7*\x90y\x1b\xad_o\xbda\xd1AHJA\xee)\x07ynx\xd5Y\
\x0b\x9e\xa7\xd1\xc6D\xd5(\xe2/\xc2\xda\xd6u\x8b\x94"\xb2kX\x0bM\xcf\xc7\x9a\
[\xbci\xae\xa0\xaf\x1f\xe3\xcc#\x0e`#+T\xed\x0f\x1a\x94\xff\xcdr\x94T_I\xa6R\
\xa9p^*\xfe\xca7\x98-\x8b\x9c\x9b\xc3\x9a\xb7\xd1\xfeK\x18\xf36R\x88\xe8m\
\x8bV\xe1\x84\x10<\xa1\xe4Pr\xe1\x1d)\xc0y\\\xb6~\xdb\xf5/\x98LA\x04\x7f\xbb\
\x8eBJ\x19\xcc\x8f*\xd0\xf5 \xe6yD=\xcc\x07\x1f\x13\xdc\xbc\xd6\xees\xb7\xac\
\xa5\xfa\xfb>K\xcbm\xcf\xfc\x9e\x1a,\x95J\xfc\xe2J\x99\x97\xafZ\xec\x8eEp\
\x0bk\xff\x1f\xb7\xac\xed\xe8W\x02\xd1\xd3\xfavc\x17\x06\xd1\x0c\xaf\xabs\
\x02\xa5dk\x10\n\'\xbf\xa0\xfdJ\xa5\xa2\xeb\xa1\xca\xd0\xda0c\x81\x10\x06k\
\x83\xe6\xfb\xe1\xc7\xef\xf1\xa6\xb9\xc2\xe6\xddS\x9c\x9dWXl\x87Q\xa7\x83\
\xa0\xeb\xba\x94\x16\x97\xf9+\xcfD%1f\x13k\x0c\xc7[\xaf8N\xac\x1f\x89\x9ekC\
\xd6O\xb9\x9c\xc3\xee6\xcc\xcc\xf4\xcagv7\xfc\x9d\xedH&\xbc\x1a\xb6 \xcb<\
\x8f\xa8y\xce\xd8m\xf4;>\xe2\xbc\xd3!\xeeEM\xb4P(P\xa9\xfcO@ \xce\n\xb0\xb7\
\xd0~\x13c^\xeb[k\xbdDZ\x85\x88\x9a\x98\x18J.\xc4\xcc\\\xff\xdf\x88\x88\xb4\
\x84\xdd\xcet\x84\x10=D\x85\xb8\x87\xf3\xe8C\xd87_@\xc4F\xb2\xa8\x06\x97\x96\
\x96\xf1\xfc\xdbQ\x99\x8c\xf9a\xeb\x87\xa7\x83\xc4\xa2\xc1\xa0\x93\xd4\x00\
\xba\x89`\xf4&vwX\xd0\xb1E\xcc\t\x84\x90\xb0\x0b\xfa\x9an\xfbk\x0b\x81\xb0\
\xb6%\\\xecb\xed<\x00\xce#\xe0\xbf\xf9J/A!\x9d`\x90\xb4\x867|\x9f\xe3\xd8\
\xf60/\xfa\xd7X\xf8\xc78\xf4\xac\xd9\xa6\xe9]\x1bB\xad\xfd\x97\xfb\x84\x83\
\xba\xa0P\xca\x81\x19\x81w\xd9\x0f\xee\x08\x81\x08\xfb\xb0\xd8\r\xe6Q1\x8f\
\xf3\x184\xaf\x04\xbf\x8e\x9ah8\xbf\x19\xfdV\x9b\x9c\xe8C.\x1c\xee\x82\xb1s\
\xec@\xd5H\xf0\x1e\x82p\x84\xf6_\xd5\x98\x96\xec\xa9.(>\x91\xcf\xb5\x9bi\xab\
[\x04\x1f\xbb\xc1@\xc8|\x94F{\x9a\xd8\xb1\xf8~\x13k\xdf\x8a\xc8E\\\xe29\x12\
\x92\x9b.\xba\xd3\xf7<\x9f\xdb\xad\xd1\xe3\x94\x10\xe4\xf39\xa4\x92\xb1"\xb5\
\t\x8b\xd8\xdc\xdc&\xb8{\x13km\x07\xb9\xde\x1c\'\xab\xb5I\xf1\xdd.\xef\xc5\
\x9c\xeb\xe2|\xc0\xe9 \x19|\xb6\x9f\x89\xfa\xa0\xbe\xf6z4J\x8e[k\xbe6\x88\
\xd9d\xf4\xd3\x1aQ \xe8\x97\x9e\xe7\xe1\xbant\xcdyo \xe5\xf8\xdf\xf3[\x03\
\x8e\xe80\xf6\xb4\xe7\xc1\x96x4Q\x93\xb4\xe0\xbd\xa8S\x17<)\x04\x81\xc6\xdc\
\x18\x83\x8c9\xae9\xefU\xd8\x9b\x16\xaduD2D\x9b`\xf7\x9c5f\x7fSJ\x8e\x9c\xfe\
,\xc1r)\xa9\x02IJ\x11\xd5\x8c1\x16\xcf\xf7\xc9\xc7\xf62\x05p]\x07c\r\x18\xdb\
\x91l\x87$\x13\x8dJ\xedo\xa9\xc9\xb9\xae\xeaY4tC\x00\xfe\xacHT\xdb\x8e#;\x0c\
\xa0\x9e\x17\xcc\x85\x9e\xffC\\\xe7}\x1d\xcf~\xc4u\xf9N\xa3\xd9Q\xe6h\x90\
\xe9&\xd73\x82&@\xb4\xfa\xee\xf3[\x11\xfb\xb4\x04\xb2h\x12t\x9b\xaf#AD\xbf\
\x1d\x8d\xaa!N\x89\xde\t\xbbG\xd8\xcej\x8c\xecW\x83!\xb9p\xa4\xce\xe7\x9d.5\
\xa1\xedxR\x0814\x02\xfb%\xcf\xef\x88\xdd7\xe6NO\xc6]\x04\xb3\x9d\x00\xe2M5^\
\xf4\x10R\x08\xac\x88?\x17\xbf;\xba,\xd6Z\x9aM\x0f!O\x83\xbd\x15\xd8\x10\x07\
\xd6`?ql\x02t\xf7\xc3\xee\xb4\xe3\xcd8\xa9\xb2zw\x9b\x1eC\xa8\xb5\x16\xabm,\
\x1d\xd1\x11\xb3\xd0\x96E3\xae\xbd\xce\xc6\xd6y=\xcc\xa9\xd1\xf4c\xeb\xc1\
\x04i&z\xac\xb3\x163W\xdd\xf7#\x14~\x8a\xd8\xfd`>\xcb6:\xa4\xd5s;J\x90\xb9\
\xf1%N$\x9eqx\xaf\xdf\x08\x9b%\xba\xd3\x9ez\r\xee5\xba\x07\xabLk0M\x933\tU\
\xefi1\xd5\x1a4\xc6R\x8f\xc7\xf6td\xdb9n\x1e\xca\xe0,\x184\xd2u\xcf\x86\xd3E\
\xbc\x16\xa7b\x00\x15\x02\x9c\xc7\x15\xf2\\\xa7$\xd2o.\x1c\x05k-\xbeoR\xc5\
\xce\xc7\xd11\xd1g\xf1\x82\xa5\x14|\xf4)\'\xd0\x96ua\x90\x8c:T0\x17\x02\xd7U\
(%h6\xd3/\xc5b\xeb\xc1\xd4\xbf\xed\x0b\xd7U\xcc\xc4Xt{\x1avg3\x8a\\\x08)%\
\x8e\x13\xd4f\x1ad\xd6D-\x04\xa6\xb3V\xa1v\xb7\xe1O\xff\xd2\x9fh\xdb>\x00\
\xf7)\x85\xba\x18\xac(\x1c%\xf7\x8f\xa0\x80\x0e\x07\xd8\xd7\xae\xe9`\x01\n\t\
gw\xd1\xf7\x9b\xf7\xa2\xe6\x82\x94\x91\x82\xb8[%1\n\xd9:!\xc4\xf41;\xd6\xb6\
\xe5\xdb>\xe4z\x1bn\xa7\x854~\xff\x9a1\xa8\xb9\xa0\x16\x85\x184R\xf7\xc7\x9e\
8\x02\x05\x1a\xf9N\x96\x9dBAp/n\n\x8f\xdf\x113\x83O\xd2\x1d\xd58\xf6\x84`>\
\xe7\xf4\xe8||_\xf7\xf4\xa7\xeeU?\xe1\xa7\x98\xe9\xb8\xdf\xef7\x83\x86\xb2\
\xbdq\xe5\xea\xb3*\xef\xa7^\x8c\x1b\xd1\x06\xa9<\x06f\x11K#\x8e\x03\xe5\xca%\
\xba>\xbb\xef\x0c\xebz\x83\xee\x1d\x18\x82\xa3\x84\xb9Q\xe3\xca\xa0\x97\xb3o\
\x04\xed\x9d\xce\xef\x83k\x0f\xd2H!\xdd\xcb\xa5=\xe9\x83\xcd\xa6\x8f\x10\x82\
\x99XA\xaf\xf6YZe\xb1\x18\xee\x16\xfd\xa6Fp7\x96U0%\x8c\xae\x85\xc1\xe4\x86\
\xd0\x1e!\xccN\xad\x89\xca\xb3\x19)&\x04\x03C\x84\x92\xb8OgJpk\xab=\xaf\xa9\
\x8b\x12\xc7\x99,@K\x08\xc89\x17:\xae\x85\x02B$\x10t\xd5^wef\xdaD\xafj\xcb\
\xe3j\x1b!\x83\xb5\x92\xe3\xa8\x0e\xbbB\x12\x0c\xd3\x95\xea\xab\xc1\x0b\x14\
\xc3\x1e\xea\xba\x91y\x13mz\xd7\xd2\t\x8b1\x0c+\xb7\xb5\x16\xefE\xdd\xb6\x7f\
\x0c\xcdbJJ\xa7\xb0 \x8d\xa6\x8f\xbej\xc6^*\xc5\xbb\x96\xb5\x16}5\x88\xd8\
\x1eE\xae\xdf\xe5\xa9\x8c\xa2v\x1a\x86\xd0\x045\xd7\x1eP\xa7\xd8D\xa7\x85t\
\x1a\x95)O\xf4\xa1\xd2Iu)\x9d\xba-L\xfd,N=*\rk\xd1\xda$\xb6\x08\x0f\x9c\xe8\
\x17\x17K\xb8\xaeK\xa1P \x97\xcb\x0f\xb5\xcb\r\x83\x94\x82\\\xceI\xb4\xb4\
\xe9W+\xdd\xd7\xa4\x10H\xf7\x02R\xca\x81\x1b\x04\x18k\xf9_\xcfU9q\xdcbnY\xcc\
[\x86\xdcG\x03\xbbaG\rz\x9e\xd7\xf2b\xa8\xe3\xba.J)N\xceI\xce\x9f\x15\x08q\
\x8a\x93\xe2$\xc7\x84\x8c\xac\xc1\xa2\xa3\xd8\x01B\x13v<\xf38\xb1~6\xc38\xfa\
\x8f\xa0\x80\xdd\xe6\xcci\xcbw_y\x1b\xec\r\xde\xd9\xb2\xdc\xd96h\xad\xf1}\
\xbf\xc7#\xbf/\xc1\xc5\xc5E\n\x85\x02Z\xbfI\xb9\xfco\x11B\xb4|5edm\x15b\x0eq\
\xea,\xe2dk\x95~L N\x06\xbf?wNb\xed\x8b\x84\x8e8/\xbef\xb9\x95\xa0Y\xcd\xdc\
\xef|hg\xc7\x06\xc2\xf8}\x8b<+8u\xea\x04\x9b\x9b7\xb8\xaa\xb7\xd0Zc\xb6v0[\
\x9b\x98w\x82\x81\xec\xd9g\x97YXX\xe0\x8c\x94\x9c\x97\x92j\xf5kQZ}\xfdE\x01\
\x94z\x84z\xbd\x8e\xef\xfb\x18c\xf0\xfd@C\x16\x0e\xfd\xe1g\xd8\x94\xe3\x9f\
\xe1\xdfI\xa7\t\xcb1h\x05\x7ft\xa7\x1f\xa2\xfd\x82\x83\x97\xee<\xa6p>\x9d\
\xa7T*Q,\x16;\x9e]\\\\\x8cv\x19\x8a\xf6\xd9^_\xaf\xe08\xaa\xc7(\xe2\xfb~\xf4\
Ok\x1d\xbcAc0f\x0bk\xb7\xa3\xc2\x0c*\xd8\xa0k\xfd\xfax\xff\x975\x87\x94g\x91\
R\xa2\x94\x8a\xc2\x1a\xc2.\xd4\x0fB\x88\xde}\xb6\xad\xdd\xe1\xd5X\x1c|\x08\
\xc7qp\x1c\xa7oB\x81\xdfJ0\xa1\xdf\xb6\x96;6\xf0\x12\xb6\xdb\x83\xc9\x0e")\
\xe6\x04bVpR\x08N\t\xc1Y)\x99\x1b\xe1\x84\x90\x04\x11\xc1z\xf32\xc5|\x8e\xbf\
\xf1<>\x14s\x95\x1aU\xc0Ao\xf1\xa0 \x9a\xe8\x97\x97~\x9ez\xd3\xe3Q\xe5\xa0\
\xf5\xde\xec\x1c9-\xac\xafW\xa2\xbf#\x82F_\xa1\xfc\xec\x97\xa8?\xef#\xe5\xd9\
CKr\xbdR\xed\xf0c\xeb\x10\xd5\xbc\xcb\xcf\xf3/\xff\xf9"\xb5z\x13\xa5\xce\xb2\
9%+\xec\xb4P\xadn\xe0>\xe9\x92\xcf\xffdt\xadG\x16}\xfd{M\xbc\xe6\x1f\xb3\xfe\
[\xdffN\x08\xcc^\x99b\'D\xb5\xba\x81\xf3\x01\x17\xf7\xfdO\xa0u\xdbUz\x80\xb0\
}\x03\xf3\xda\xb7X\xff\xef\x7f\x10\xad\xa0/\xbf\xfa\xea^\x9435*\xd5*\xb5Z\
\x9d\xfc\'\xf3\xa8w+\x1a\xde\xab\x94W\xbf\x1c\xdd\x1f\xb9Q\x80\xf7\xd2*\x8b?\
\xff9\xf2\xb9\'\xd0z\x0b\xa5\xb2;\tdRll\xd4x\xe6\x99g8w\xee"\xc6\x186\xaa\
\xcf\xf1\x9f\xd6\xbe\xc2u\xa3\xa3\x89~\xe8jB=\xeaP\xfc;\x05|\x7f\x93\xe6\xe5\
+\x94~\xe6cA\xfc\xd0_6\xc8?\x93\xdf\x0b\x0e}Q\xa9V\xc9\xb9?A\xa1P\xc0\xda\
\x1d\xbc\x17<\xaa\xcfU\xa8\xfe\x8fJ\xcf\xdc;\x94`\xe1\xa7\x16\x90\xe7%\xb9\
\x0f\x06s\xdd/\xaen\xf0\xb3?\x9d#\xff\xb1\\\xe0\x98\xfa\x92\x87\xff}\x9fRi\
\xc0\xde\x84\x19B\xeb\xc0{\xe3\x1d\x03\xc5\xe2\x02\x02\xf0\xbe\xe7S\xfbf\x9d\
\xda\xefVi\xfeY\xa3\xef\xef\x06\x12\x0c\xc4\x9d\x12\xf6\xb6\xc5\xda\x1d|m\
\xf8\xf5\xf2\x02\xb5\x86\xc7/\xff\xfb\n_\xfc|\x11\xf7\xb1\'p?\xe2\xf2\x82\
\xe7\xf3C\xdf\xa7X,L\x85\xd8\xec\xae`V\x9cff\xe6=\x9c\x97;h\xff\x074\x9a/Q\
\xf9\xaf\xbf:\x90X\x88\x81\x04\xcb\xe5\x7f\x17i\xc4\x96W\xbfF\xee\x83?\x86\
\xeb^\xc0u\x1c\xcc\xa7\xe0\x1b\xf5\x06\xf5\x93g)|\xf2i\xd4#\x92|>\xcf\xa61\
\xe8\xcdM\xce\xcd\xedDK$\xa9\x92IE!\x99\xb8`\xb6;{\x9aY\xa1\x98A\xa0\x8d\xc1\
\xbb\xfc\x7f\xf9\x93\xff\xfdM\x1a\xdfi\xf0\xdd?o$\x12\xe6\xfb\x12\xfc\xc4\
\xa5\x02\x85\xe2\xe7\x91B\xa0\xc3\xfd\xb0\x8f\x9d\xa6\xb4\xb8F\xb1\x98gq!\
\x8f-\xb8l|\xfdy\x96\x7f\xe1\xe7x\xf7\xbb/\xf0\xf7\xfe\xfe?\xc1\xfd\xd0\x87q\
\x1f;\x8f\x10\xb3\x91\x82h\xd3\x18\xd8\xbd\x11|\xb1\xf7\x80 \x18kw\xf643;\
\xb7\xd8\x9d=\x1d\xe5;;{\x9a\xdd\x99w\xb1u\xdd\xc2I\x01[p\xdbj^z\xf5-*\xff\
\xe5?R\xff\xd6\xb7\xb1\xdb\x19\xd8\xe8\xff\xf1?\\\x04\x82}$\xb4\xd9\xa2\xbc\
\xfcy*_\xab\x83\xb0\xe4\\\x85\x94\x82\xa6\xa7\xa9\xfd\xceo\xe0]~\x1e\x80\xff\
\xf3G5\x84\x10\x14\x8a\x7f\x97\'\x9f\xfc8\x97.\xe5\x91\xe7%\xea\x8c@\xc8w!\
\x85\x80sA\xfa\xd1\xa2w\xe6=\xcc\x08\xb0f\x07c\xb71f\x07\xdf\xf7y\xe1\xe5&?|\
\xc5\xc3{\xa1\x81~C\xa3\xaf\xf8\xa9H\r%\xb8\xb0\xb0@\xbeP\x00D\xa0\xfe3\x16W\
\xccR\xfc\x8c\x8b\xfbT\xb0\xc3\x8f6\x16\xff\xaaA\xbf\xf1J\xc7o\xad\xb5\xd46\
\x9e\xa3\xc6s\xfc\xfaW\xe0\x8cT<z\xf1"\xea\xf1\xf7\xe2\xfcx\x8e\xf7\\\xfc1\
\x00\xde\xff>\x89\xb9\xf9\x10\xf6\xe6[\xf8?x\x1d\xff\xfbM\xfc\x17_\x06\xc0\
\x7f\xb5\x9f+\xd8\xf8\xe8!\xb8\xb8\xf4\xcf\xc0\x82\xbenxn\xa3\xc1\xc3\xe7$\
\xbf\xf7\xcd\x06g\xe7%\xa5\x85\x9fl\xc9\xa8[4\xbe\xf5\xbb};x\xff\x9d\x99o\
\xc0\xfd?\xe2Go\x04\xdf\x1a/X\xaa\x1b\xc9w\xeb*\x16\\\x84\x08\xfaa\xa3\xe1\
\x8fOpii\x89\x9c\xfb\x11\xac\xdd\xc5{>\x90\\\xec\xb6e\xf3&|\xe6S\x8f\x93\xcf\
=N\xfd;>\xfa\xca\x1bT\xd6\xbf\xd2\x93\xd8b)Oy%\xd9\x94\xe1:\x8a\x95\xd5\xd1\
\x87Y\xad,\x17;v\xf6Z)W\xa9n$\xdf\x11!\x12\xd5\x1c\xc7\xa1\xf4\x0f\x9606\x08\
c\xf5\x7fp\x8d\xbf\xe5:hs\x9dS\xc7,\x85\xdc\xd3\x81\x02\xe9\xe4\x0c\x95\xcaZ\
_w\xc84\x1b\xc5\x15\n\xc9F\xd7b\xb13\x00$\xed.y\xb1\x00\xc9\x7f\x8a|\xb7\x8c\
\x9aM\xe9\xb3y*\x1b\r\xac\xbdO\xa9\xf8q\x94\x92\xf8\xda\xd0l|\x9bF\xfd\xf7Se\
\xd2\x0f\xe11b\xc3\x9c\xec\x1cGM\xbc\x85`DP9\x9f\x06@\x1b\xc3\xfao\xd4p\x9ft\
(\x16\\\xb0.9\xf7"\xbe\xd6X\x0b\xd5\xea\xfa\xc4\xeeY!\xc6\xb1>\xa5E{\xc1\xbb\
\xbdM\xe5k\r\xbc\xbf~\x05\xe7\x03\x0e\xcd\xcbMV\x7f\xb5\x82\x94sX\xbb\x8b~\
\xd3\xe2y\r\x1a\x8d\xe1\x92\xc34a\x8cM\xbdmnT\x83\xbf\xfd;\xbf\xc7\xce]\x8b\
\x9c\x97(\xc7E\xcaGy\xdf\xfb\xee\x93\xcf=\x8e6\x16\xb4\xa5\xfc\xcb\xe5\xcc\n\
k\xadeaq=\x95\x1b\xf48>\xa3\x11\xc1\x1d\x1bh\xc7\xf4M\x83\xb5\x061\xaf\xf8\
\xae64\xbd\xc0&g\xb4\xcf\xe2\x17KC\x13Ks$J\xb3\xe9\xe3y\x81\xe2vo\xf6U\xbb\
\xdbv\x140F\x83\xd1\x88y\xc5/\x95\xd7\xf9\xdb\x9f\xfe8\xdc\xf13)\xc4^\xa3\
\x1d\xa4|\xb7\xf3\x868!\x10\xf3\x829!\xf0\xfd\x97\t\xb6(:|\x88\xa9,:\xfb\xc2\
\xec\xbcB 8uJ\xa0N%\xeb\':\x85\xcd\xb3\x9eR"\x19\x17}\x85m!\x02\xad2\x80:s?\
\xf1\xee\x92\xda@uc;\xf2\x0e\\[\x1b|\x9c\xe5\xb8N\xe6i\xd1\x97\xe0\xac\x08&\
\xd7\xf998#\xee\xf4{d ,3Qc\xd8+\x12\xc3\xd0\x1b \xd9\xaa\xbd\xf99\xf8\xc0\
\x85d\xce6\xbdi\x04#j\xa3\xb62\xb1$b\x0cx\xbeM\xbd\x9fZ\x88\x9e\x00I\xd1\xd2\
\n\x8fKNJ(\xe6\xc3\x1d*\'\xf7v\x92\x12\xf29\x81\xa3l\xea\xc34\xa0\x8b\xa0\
\x94\xc1\xc6P\xf3}b\x1e\x92"\xdc*3\xc4\xb0\xe3\xf7B\xa8\x01g\x9d\xc4\xd3QJ\
\x90s-\x03N:\x1a\x88v\x80\xa4\x90\xc1\xae\x1f\xc0{/<\x84\x10?J\x97\x12\xe0\
\xa8\xf6\xc1\x17\x00\xc5\xd2Z4\x99\x0f\xc3\xb0\x89>\x9f\x0b\x8eE\x81\xe0\xb3\
\xe9\xa5\x93\x83c\x04\x83D|\xafA\xdeM\xaf\xf3,\x97\xcb\xac,\x17)\xe4\x0b\x00\
T\xaa\xf5D\xe4F\xa1\xd1l\xef)*D\xb0]\x92\xb1\xc9\xa5\x9fv\x98\xb9\x90\x18\
\xa3\xf1\x9a\xb5\xb1\x0bS,\xb4_L\xb3\xa9;\xb6d\x98\x04Z\xb7k-\xdel\xcb\xe5r\
\xf4\x0f\xe8\x9b_G\x1f4\xda\xcf\xa4@AI\xda\xa7\x99+%{F\xd34gF\x8cB\xa2#Q\xb6\
\xad\xc5\x7f1\xc3\xa5P\xecH\x94Zu\xb9g\xb5\xbf\xb6^cm\xbd>q6\x89O3\x7f\xe5\
\xafk\xa9u\x8e\xdd\x18t\x98[?U\x86\x93\x81\xe9;\xd5\x91(\xaf\x7f\xaf\xad\xc8\
\xd9\x8b#Q\xe28:\x12e\x02\xec\t\xc1~:\x9c(2m\xca\xd8\x93\xb0\x82\xd2\xe2z\
\x8fr\xa9\x7f0s\xf6\x98\xea\xf9\x83!\x9a\x9e\xa6\x99\xc1\xa4?\x0e\xb2\x8d\
\xa3\x8f5\xbb\x8f\xe5\x9cL\xd2\x14"p\xcf\x8c\x90Re\x99)\xc1\xb8\xdd\xe0\x0b\
\x0by\x96\x97\nc\xadHB(%\xf9\xb5\xff\xb0\xd0\xe1\xdc\xe7\xa5<N2\xd3&Z\xab{x\
\xdeU\\\xf7"\x10\x1c\xaa\x91\xe5\xc1\x1a\x1b\x1b\xcd\xd4\x0e\xfd\x99\x8f\xa2\
K\xbf\xf4\xdf\xa6\xb2\x92\xf7}My-\xbd\x9c\x9c9A\xff5\xc3\xc2\xe2:\x95\xea\
\x9f\xa6nN\xfd\xa0\xb5\xa1R\xadS,\x8dg2\x98\xca(\xaa\xb5\xa1\xbc\xfa\xdb\xd3\
H:5\x1exI&\xf3\x1aLr\xf6\xae1\x96\xeaF#\xf1\x80\x91\x99\x85wR\x1ch\x0bo\x168\
\xd0\x16\xde8\xc6].\xa5\xb1.\xc5-\xbc\x83|\xc2\x83=\r;\xaf\x15\n\x05\xa4*\
\xf4}~\xea\xa7\xf6\xa4\xc5\x9eZx\x0f\x03\xacMg\xe0\x81}\xac\xc1\xb8\x85wyyy\
\xf4\x0f\x18/\xeer\xdf\x08\xc6-\xbc\xd3\xf4\x9a>TMt\x1c\x1c\x11L\x83Cc\xe1\
\x1d\x17\x87\xc6\xc2;\t\x0e\xbc\x85w\\\x94\xcbe\\\xe7"\x95\xf5/F\xe2\xd44\
\x8f\xe7K*meZ\x83\xab\xab\xbf\x90\xe8\xb4\xb9\xf0x\xbe$\x04\xc3\xe3\xf9\x00\
\x94\x08Li\xa1\x85 N\xb2\\.\x8f\x7fj\xcf4\x90\xe5\xf1|\x89\xacK\xfb\x81,\x8e\
\xe7Kl]\xca\x02\xf5z=\xcb\xe4\xfa#&\xf5\xa4\xb2.\xc5\xb1W\xd6\xa5z\xca\xe3g=\
_S\xa9\xd6\x0f\xde\xc6q\xfd\xe0yWY\\\xaeL=\x9f}\x13\xd5\x8c\xd9\x9b\x08\xd3#\
Y4\r\xd2(f\x0f\xe5\x1e\xbf\x95j\x03\xa5$R\x0cW\x0c\xed`\xf9\xeao\xd6\xb3\xcc\
z 2%h-\x94W\xc7\xf7\xb3\x99\x06\x1e\xf8>\x98i\rv\x0b\xdb\x83\x90F\xd8\xce\
\xb9\x8a\xd5\xf2\x17\x10r\x0e\xa3\rK+\xd5T\xab\x94Lk\xb0X|*\x91b6\xcdY\xd8\
\xcb\xcbE\\\xf7"\x8e\x92\xe4r\x0e\xa5\x85\xdc\xe8\x1f\xc5p \x84\xedAp\x1dI!?\
\x99\xbf\xdb\xa1\x17\xb6G\xe1\xd0\r2i\xe7\xcf}\xad\xc1q\x85\xed4x\xe0\x85\
\xed}\xb3.\xc5\x85\xedi\xc6.\x1d\xba>\x98\x16\x19\x0b\xdb\xd3yv\x12d\xda\x07\
=\x1f\xa4\xb0\x91\xd1r\x90\x83\xec\xa1\x15\xb6\x01\xe2N\x84\xe5r5\xeb\xe4Sc\
\xaa\xde\x86I<.`\xf8\xe0d\x0cL\xe2O4\x15\x829\x17r\xae\xa0T\xccFR\xb1-\x8d\
\xf68\x96\x80\xccG\xd1|\x8b\\\x96\x10\x02\x8a\x05\xc1\x80\x08\xa0\xa1\xc8\
\xf6\xbc\t\x01n\x8c\\\xbd\xee\xb1Q\x1b\xed\xd3\x12nO\xd4\r)\x83p\x9ep\xd0\
\xca\xbb\xa4\x0e\xd0\xca\x94`\xdcaB\x1b\x12K*\xb9\xfc\x00\xe7!\x1dD\xbd\x14\
\x0b\xad`M%\x10l\x07\x16\xac\x84\xc8\xd4\xba\xf4\xe5\xf2\x02\xf9\\\xb0\xce[_\
\x1f\xed\xc5\x94\x04\xe19\xf3a\xf3\x94r\x06m\xf6\xc9\xba\xf4L\xec\xe0Qcl_k\
\xcf8\x08\xb4u\xbd\xfdz\x7f\xadK\xb1\xd8\xa5\xf2J\x11\xb7\xcb\xe1gm\xbd\x9ez\
W\x83A\xd8\x1f\xebR,vi\xb1T\xe8\xb9\xfd9m2!\xb8\xa7\xd6\xa5A\xa2Y?\xccf\x10\
\xfez\xe0\xadKI\xf3;\x8a]\x1a\x82=Y\xd1\x7f\xfd\x0f\x9b<\xfd\xb4\xd3\xbe`-\
\xb5zr\xa7\xd6I\xb0\'\x04\xbf\xf4\xec\xfe\xad*\xa6\xd6D\'\xdd \xab\xb42%\
\xe8\xc5vy.\x95\xf2\xa9\\\x9c\x07\xa1\xf0I\xb7\xc3i\xd6l\xee\xd3\xe9u\x00\
\x1b5\x8f\xe5\xa5\xe0\x0ckGIj\xd5e6j\xc9\xf7O\xeb\xc6E%\xf9l\xccg\xdb\xf35\
\xfek\xfbHPk\xc3\xdaz\x9d\x95\xe5 ^I)\xd9\xe11?)V\x7f%\xbdi.\xf3Af\xbdRG\x08\
2\r\xca\xb2\xd6\xb2\xbaVKd\x8d\xea\xc6TF\xd1\xb5\xf5:\x95j\x83|.\xd8\x0bq\
\x92\x10;\xad\r\xb5\x94\x1a\xf08\xa66M\x18c\xa9\xd5=`oBY\x07aj\x04\x95\x92\
\x14\xf2\xce\xc4\xd3\x85\xef\x9bDZ\x81A\x98\n\xc1\xeep\x9c,\xd2[.WS\xc7-\xc1\
\x14&\xfa\xac\xc9A\xd0\x1a*k\x8b\xb8n\xfa \x92Lk\xb0{ZH\xaat\x1a\x04\xc7\x91\
,\x96\xf2\xd1\x89%\xe5\xe5"\xa5\xa5J\xaa42%\xb8Plkp\x9bM?\x13\xf3X\xa3\xe1S\
\xad,\x01\x81\xd9[J\x91jD\xcdt=\x98\x8f\xf9\x07X\x927\xa7Q\xf9\xc5\x95N\xab\
\xe5\x95\x81\n\xe0\xa9\x9b\xcf:\xceO\xca\xd0z4\xc9v\x9f{\xb2\\\x9a\xb6\xd2i\
\x18\xf6\x84\xe04\x95N\xa3\xb0o*\x8b,\x94NI\xf0\xc0\xebd\x1ex\x82GJ\xa7,\xf0\
@*\x9d2P\xc7D\x98\xe4\xf4\xacL\xcdgK\x8byV\x96\x03[\x9fT6\xb5X\xd5\x0fJ\xcdt\
Xv\xc3\xe4\xf6\xc5|\xe6\xb8m5\x85\xa3$\xf5\x8d\x95\xd6\x96\x0e\xc3I\x0erB\
\x10\x02\\g6\xfanL\xa7\x84\xb4\xe7\xe63k\xa1\xe9\xd9\xc8F/\xa5\xc8t\xe9\xd4\
\xf0v\xfa^\x1ff>\xcb\xbc\x0f.\x94\xca\xac\xadg\xef\x98\xdehZ\xb4\xde\xed\xb9\
>\xaa\xa9fZ\x83afi\x95N\x85Ba\xe0=cg\xd0z\xb7\xaf\xf0\x9e\xa4\x1fF\x07,nldcS\
?(\x08\x0fX<\x92d\xd2\xe0(\xac\xa0\x85\xa3\xb0\x82\x16\x8e\xc2\n\x12\xe0\xd0\
\r2Ga\x05]x\xe0\xc3\n\x8ebx\x0f;\x8ebx\xd3\xe0(\x86w\x1f\x10\x11\x1c\xe47}\
\xd8q\x02@\xa7\xdd\x8d\xed\x10\xe1\xff\x03\xac\x06\x8fmy\xb1\x05\xb4\x00\x00\
\x00\x00IEND\xaeB`\x82'
def getscanprogress11Bitmap():
return wxBitmapFromImage(getscanprogress11Image())
def getscanprogress11Image():
stream = cStringIO.StringIO(getscanprogress11Data())
return wxImageFromStream(stream)
index.append('scanprogress11')
catalog['scanprogress11'] = ImageClass()
catalog['scanprogress11'].getData = getscanprogress11Data
catalog['scanprogress11'].getImage = getscanprogress11Image
catalog['scanprogress11'].getBitmap = getscanprogress11Bitmap
#----------------------------------------------------------------------
def getscanprogress12Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x008\x00\x00\x01,\x08\x06\x00\
\x00\x00\x96\xb6p\'\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\x00\
\x1c!IDATx\x9c\xed\x9dm\x8c$\xc7y\xdf\x7f{/\xdc\xda\xbd\xd5]\x9dH\xf1\xfa(\
\xe5\xd8\xb4%\xaa\xa9\xd0R\x8bq\x88\x11O\x08\'A`\x8f\x90 \x1a\x19\x864Q\x12{\
a\x04\xc9*\x89\x81Q"C\x17\x07\x08GA\x80\xacm\xc1^\xc1\x1f\xb4\x1f\x1chbD\xc9\
*0\x921,[cI6GH"\x8f\x99Dl\xc9b\xae\x8f"y}\xd2\x1dY\xc7\x17m\xed\xf1\xee\xb6\
\x96\xe4\xdd\xe6C\xcfK\xcf{\xf7L\xcf\xec\xdde\xff\xc0\xe1f\xbb\xab\xab\xea_\
\xafO=O=Us\xa7O\x9f\xde\xe5\x0e\xc6!\x00\xcb\xb2\xf6:\x1fS\x81R*$\x08P\xa9T&\
\x8e\xb0\xb8\x92\xa5\xb8\x92\x9b8\x9eaX[\xaf\xb2\xb6^\x1b\x19.\x9f\xcf\x03p`\
\xaa\xb9I\x19Z\x1b\xea^\x90\xe8\x9bC\xa3\x83L\x07\xc6\x18\xf2\xcb\xebhmb\x7f\
\xa3\x94N\x9c\xce\x9e\x11\xf4\xbc\x00\xdfWSO\xe7\xb6j\xa2\xe3`\x9f`\x12\xd4\
\xebA\xec\xb0\xb5\x04a\'A\xaa}\xb0\xee\x05\xb8\xd9\x12B\x88\x91a\xc7\x190\
\xc6A\xea\x83L8*\xc6\x1f\x19\xa7\x8d\xbe\x04K\xa5\xd2X\x91I\x01\xb9\xac`T\
\x05j\xad)\xfej\x85\xda\xb7\xfd\x91\xe9I\tY7\x8cS\x1b\xa8\xd5\rf@\xf9\xf5\
\x8b\'\xd5>h\xdb\x8c$\x07 \xa5d\xe5\x172\xb1\xe2\xcc8!I!\xc0\x92\xe0\xd8\xc9\
\xf2\xb4g\xf3`&\xe3\x90\xcf\xb9\x04Jc\xc9\xfea\xa4\x04\xcb\x8aQbC\xb0g\x04\
\x01\xd6V\x0bSO\xe3\xf6\x9b\x07\x13\x8e_{Z\x83\xb5\x9a\x8f\xd6\x06\xd7ub\x85\
W\x1a\xfc Y\x1a{F\xd0\xf7_b\xb9X\x06\xc6\x1f\xb5\xe3`\xcf\x9a\xa8\xd6[3I\xe7\
\xf6\xeb\x83\t\x91*\xc1A\x13\xf0\xa4a\'Aj}\xb0T*!\x04\x9c)\xe6\x90b\xc0\xc4\
\xd6\xc0\x0e\x86/~\xa96qzq\x90\xea s\xe6L\x98\xa8nd\xc0q\x1c|\xdfO3\x89\x0eD\
I\x0eJo*\xa3h\xa9T\xc2\xb2$\xae#\xc8e\xb3#\xc3\x0f\x9b%\xb4\x86`\xc4\xc2\xbf\
I\xb4_a\xa6N\xb0T*q\xa6\x98ce9\x9bZ\x9c\xa6!d\xf7[a\x8dj\xaa\xa9\x0e2\xd3 \
\x07\xa1\xa0\x9d\xcb\ndW\xd7\x8e\xd3\x0fS].Y\x96\xec W\xab\xf9T\xaa\xdeXq\
\x01\xd8\xb6d\xb9\x90A6\x98Y"\xa0X*\'\x8a#\xd5&\x9a\xcf\xb5;\x93\xe7\x05-Ie\
\x12\xd4\xeb\x01\x1b\xe5\x15 \\\x81H)\x12\xa9\x1aSm\xa2\x0f\xd8m\x13\xc0$5\
\x17E\xdd\x0b\x08"\xa3\x8cm\'33\xa4Jp\x9e\xf6\xda-I)\x8f\xc2$\xfa\x9b\x99\
\x08\xdb\xa539\x9c\xae\x92_[\xaf%V\xc3\x8f\x83\x99\x10\\.d{\x9e}L\xe9\x99\
\x10\xdc3a;\xda\x9c\xa7\x89\xfd\xd5\xc4\xed\x8e\x99\xf4\xc1\xaf=\xe9\xf1\xd0\
Cv\xfb\x811Tk\xe9L#\xa30\x13\x82\x9f\xfa\xf4\xc6,\x92\xe9\x8b\xa95Qk\x90\xb2\
s\xc6q\xa5+\x8b\xda\xed\xdf\xc5\x95\x1c\x1b\x95z\xac\t\x7fXz\x965\x87m\xcf\
\xb7\xfe.\x14\x96i\x98\xdfc\xc5\x93j\x13\r\x144\x15\xf2B@u\xa3H\xa5\xeaaF\
\xe8\'\x06\xad\x07\x85\x00\'BN\xeb\xe4\xaa\x8eT\t\x1a\x03\x9eop\x9dp\x8e\xeb\
^]L\x8a\xba\xbf\x93\xf8\x9b\xd4\xfb\xa0\xe7\x87$\xd3F\xdd3(\x95|\xcf\xd2TFQ\
\xcf\x07?0\xd4\xaa\x1b\xd8\xb65\xd2\xe2\x94\x1d\xa2\xd6\xd0f\x0e\xa5v\xc7\
\xd6\xc2Mm\x9a0\x06\xaa5\x1f\x18\xadt\x92Vv\xc8\xdb\xc9v\x9aM\x8d\xa0\x10P\
\xc8\xbb\xb1\x86\xf8I\x95N\xc30\x15\x82\xae\x03\xae#(\xe4\xd21\x8f\rS:\x8dB\
\xea\x83L\xa6A.M\x0cR:\xc5A\xaa5(\x048\x11rq\x95N\xf9\x013\xb7\x94\xe0\xd8m\
\x9b\x7f\xc6\x81j=Y\x9eRU\xdd\xaf,g \x17f6\x89\xd2\xc9\xcd\x0c\x10M\x14(e\
\xc8e\x9b\xf3\xaa@\xb0\x8danoT\xf7\xf9|{+e\xa5\xea\xa5\xa2\xbaW:\x1ch\x9a\
\xcdS\xca\xb9V_\x8c\xa3\xbaO\xb5\x0fF{\x9e6;\xa9\xd9%F\x89z3U\xdd\xb7`\xda\
\xf3\xd74\x95N\xa3\x9aj\xaa\x04=\xcf\xc7\xb6\xdd\x9e\xe7\xd3R:\xcd\\u\x9f\
\x04Q\xa5\xd3\xa8\xf4\x9c\xf5e,+\x94\x06\xd6\xcbe\xbc\x04\x05s\xc7\xebd\xeex\
\x82\xfbJ\xa74\xb0\xaft\x9ab\\\xa9\x12\xf4\x83\xa0\xf5\xbbP\xc8 \xe5\xe4Bw\
\xf61\xa7\xc3d\xa67\x93-)Rm\xa2\x95\xaaOq\xc5 \x84\xc0\xb6dl\xa5\xd3 \x9c\
\xb4$\x1f\xcd\xb5\xe7U?P\x04\x17\xf6\x90\xa0R\x9a\xb5\xf5\x1ag\x8a\xa1L\x9a\
\xb6\xd2i\xf5\xd7\xab\x89\xbfI}\x90Y/\xd7\x10\x82T}\x98\x8c1\xac\xaeU[[\xa0\
\x93`*\xa3\xe8\xdaz\x8d\xf2F\x9d\x8ck\xc7R:\r\x83R\x9ajc\xdb\xe58\x98\xda4\
\xa1\xb5\x89\xadt\x9a&\xa6*\x8b\xc6\x8dg\x9a\xe9\xa5Z\x83\xddn\x05\xcb\xf9\
\xd5\xbe\xe1\xba\xdd\n\x86\xc6\x99\xc0\xad\xa0\x1f\xf6\xdd\n\xa6\x85}\xb7\
\x82\x94p\xfb-\x97\xf6\xdd\n:\xb1\xefV0-\xec\xbb\x15\xa4\x84\xd4U\xf7\xae3@\
\r\xdf\x85I\xdd\n\xf6Duo\xd99|\xdf D\xa8#\x95\x96D\xf7\xb1y\xa5\xe1V\x00{\
\xb4\xeb\xbe\xee\xcfF\xaf\x1a\xc5\xccw\xddC\xb8\xd8\xcdf\xec\xa9[xg\xaa\xbao\
&6+\xb7\x82=Q\xddO\xcb\xad \x9b\x81\xfc\xf2z\xe2\xe3!\xd2\x1dd\xa6\xe8V \x84\
\xa0T\xccQX)\'\x8ac\xdf\xad \t\xf6\xdd\n\xc6\xc4\xbe[\xc1\x10\xec\xbb\x15L\
\x0b\xfbn\x05)\xe1\x8e\'\xb8o\xe1M\x03{i\xe1MU\x16u#[d\x96\x0b\xf9\xd8\x93\
\xfd\xc8m$\x91\r~+\xcb\xcb\x03\xb7UN\xfd\xc0\x9ch\xc2\x8e#R\xb1\xf0Z\xd6\\\
\xc76\xca\xa4\xf2\xc3\xbe[A\x12\xdc\x8an\x05\xa9\x0f2\x9e\x0f\xd0&)\xc4\xbd\
\x00\x1c\x10\xa7\xb8qc\x8b\x83\x07\x8f\x01pS\xdc\xcd\x01\xf3\x1a7n\xb4\xd5\
\x87;\xaf\xff``\xbc\xa1[A\xf2\xfc\xa4\xbc\xe3w\x11)\x8f"O:\x04W\x04\xe6M0o\n\
\x94\xde\xc4l\x83\xde9\x02\xe6-\x0c;`^o|u\x00\xc4\x01\xaccG\x91\x0bY,Kp\xaf\
\x84EaP\xea\x1cZ\x9d\xc7\x0f\xc6\xd7\xc2\xa5B\xd0q\x1cl\xfb}(}\x1d\xa5\xb7\
\xa9\x9f5h\xbd\x89\x10\x0b\x18\xb3\x8d\x10\x0b\x00!1\x00s3\xfc\xef\xc6\\\xf8\
\xf7\xb5]\x82\xad\xcb\xe1\x0eL\x8f\xd6\xc6SK\x9e\xc0u?@\xd6>\x80R/\xe0y\xc9\
\xe7\xce\xb1\t\x1e\x95\x92\x9f\xb0\x1d\xa4\xf5\x00\x81\xda\xa4Z\x7f1\xccX\
\xa3\xa4\x9b\xa4\x00\x8c\xd9\xe6\x8d7\xb6\xb8\xb9+80g\xb8~\xbd\x1d\x8fX\x10\
\x98]\x10s \x0e\x0b8\xdcx\xf1\x96AiM\xa5\x1a\x0e\xcd\xf2m\x82\x8c\x9b\xc3z;|\
\xe7;u\xae\xe8xK\xa8\xc4\x04-\xcb\xc2\xb6m\x0c\x16J\x1b|\xef<\x86p\xc4k\xfe/\
\xb8\x8a\x90w#\xe7o \x16\x96\x10\xdc\x80\x06aAs0\n\xb8t\xc9\xc0\xf65\x0e\xdf\
8\xc2\x1bo^\xe7\xfa\xcd\x03\\\xb9\n\xf3\x8b\x82C\x87\x04G\x84l\x19T\xcd\x9b\
\x86j\xdd\x07\x04\x197\xc3c\x99E\xbe]\xff\xd3\x91Dc\x13\x14b\x11\xc7y\x10\
\x84E\xa0\xc1lobvvZ\x99\xe6\xadkXKob\xdb\x121wO\xf8\xfc\xaef\xf4\x87;\xe3\
\x9a\x07\xd7\xb11\xe6\x02\xd0\xac\xe9\xce\x1a7\xe65\xf4\x95K\xbc\xe3\xc8;y\
\xf3\xa6\xc4l\x83h\xb4\xdd\xba\x17P\xf7 \xe3~\x80l\xf68_\xaf~\x1dc\xae\xd3\
\x0f\xb1\x08Z\x96\x85\xfd\x1e\x17\xf5:\x18\rf{\xbb\x91\xd1y\xf4\xe5g\xdb\x19\
\xbf\xe7\x18r\xbesr\x8f\xb4\xd4\xce8\x17\x04\xb6},\x9c#\x1b\xcd\xba=\x8e\xcc\
\x03\xa1\xa2\xc9\xb1%\xc6\x80\xd2\x9aK\xca\xf0\x03\x05\xb4\x88^\xc6\xf3.\x93\
}\xfcg\xf8\xb3jy<\x82\x99\x0fg\x11K\'\t\x82\xcd\xb0d\x1b[\xb1\xafm]\xe0u\xad\
[}M\xb0\x8b\x10\xa2A\xa8\xbf\x04#D\xe7h\xe8\xf6\x18\xdc\xdb/\xcdv\xe4\xf1\
\x1c\xd8Bb[p\xd0\x0b\xf8\xdf\xdf\rX:a\xb7\xbe\xa8~\xcb\xc7:\xf98\x95J\xaec\
\xe7\xffP\x82G\xa5\xe4\xf1l\x8e\x1f\xbcdPg_\x0c3.\x00\xfd\x1aJ]D\x88\x85\x86\
(\xd6\xde|\xde\x98\xf9\x86nD\x18\xbeIA\xb4\n\xa0]\xf3\xa6\xf5\x0e\xc0\x92\
\x92\x87\xef\xbf\xc6\xcb\xfaE\xd4\x95\x03\x1c\xbd\xd7\x06B\xa5p\xb5\xbeC\xf5\
O\xea\x94\xfeUq8\xc1\xa3R\xf2\xc8#\x19\xfe"\xd0\x18\x03ra\x01\xad/\xa2\xf5k\
\x00H!@\xecv\x90B\xc0\x82\x18N\xa09\xc86\xff\xd7\x9b\x06\xb3\xd35\xc1\xcd\t\
\xd85mj\x8d\xd7s\x186u\xd8T\x11K\xdck-qTn\xf3\xdc\x05\xbfU\x9b\xd7\x8ca\xe3\
\x1b\x01+\xc55*\x1b\xab\xfd\t\x16\n\x05\xfeF.\x8f\x7f^\x03\x06\xc15\x8cy\x85\
k\xc6 \xd8m\xb5>\xd1 \x15\xcd\xfcn\xe4\xc1 \x9e\xcd\xe7\xc6\x80\xf7\xdd\xa0c\
ji\xc2\xb2,\xb4\xd1\x18m\xda\xaf\x0c4\xff\x12b\x17c\xe6\x10b\x81\x87\xef\xbf\
\xc1\xcb\xfaE6\xdfZ\xe4\xd8\x92\x85\xa1\xf3\xa8\xea\x0e\x82\x8e\xe3PX.\xe2y\
\xaa\x95\x13\xad71ZsP\xd0\xa8(\x11!9\x98@\xfb\xc1\x00\xaaF7\xde\x99\x08\xeb0\
\x02\xa5\x14\xb6m!m\x89\xd7\x10cL3\xed\x06Q\xd1hA\x86Fm\x9am\xd4\x8f\x03\xc4\
\xdb\xed\x8e~\xdeZ.e\xb3Y\xca\xe5\xff\x02\x08\xc41\x01\xe6\x1a*\xf0\xd0\xfaB\
\xdfZ\xeb%\xd2\xc8\xaf\x10\x9d\xff\x86@4\xe2\x14\x88H\x04\xe1\xcf@)\xb4\xd6d\
3n\xb8\xa1\xaf#\x1d\xd1\xfa\xbbIT\x88\x1b\xd8\xf7\x1d\xc6\xbc\xfc\x0c"\xd2$Z\
5\xb8\xb2R\xc4\x0f\xae\xb7\xd2\xd0\xfaG\x8d\x0f\x8f\x84\x91\x89Vo\xeb\xcca\
\xe4G"y\xd1\xf4zv\x86\xf5\xd9N!P\n\x0e\tl\xdb\xe6^\xcb\xe2{\xbe\xdfn\xb6B \
\x8ci\x08\x17\xbb\x18\xb3\x04\x80}/\x04/?\xdfKPH;\x1c$\x8d\xe6R\x10p\xb0\x95\
\x94 Rq}\xc95\x7f*\xa5\x11\x0b\x02q\xd7h~\xc1\x85\xcb}\xa3\x8b\xb6V\x10\x04\
\x17\x03\xc4a\xb0NXd\\\x17uY\xe1\x9f\r\xc20B \x9a#Q\xa3_"\x96\xb0O\x81\xf7b\
\x17\xc1\xa6<\xa8\xd5\xabmr#j\xad_\x03\x0c\xce\x8f\xb1\xa6\x19\x80f\xea\xc1y\
\x85\x98\x17H)\xb1NX\x1c=&\xf9\x9e\xe7\xb7\xfa&\x84\xb5\x89\xd8m\xf4\xd1\xa5\
V\x1cm\x95\xc5\x8e!\x08<\x8cy\xb5EN$$\x97&\xba\xe3\xf7\xfd\x80\xeb\x8d>\xb0(\
\x04\x99\x8c\x8b\xb4d$K\x8d<\x89P\xe8h\xa2=\x8a\xee^\xc5\x18\xd3A\xae7\xc5\
\xe1\xc4\x84\x14\xb8\x8e5\xfa\x9cmc\xf0|\x15\xcb@#\xa5\xc0\xb1O\x80\x98k\xa8\
>\xdaq\xbb\x8eC\xb0\xa4\x08\x9e\x0bZ\xfd\x12c:J\xa7U\x83\xea\xf2\xc5\xc6()zk\
-\x069\x00K\x8aX\x87\x88#Dl3\x98mK\x84\\@\x88P\x89\xa57;\xbb\x80\xfd.\x0b\
\xfb\xddvdT\xedL\xbf]\x83\xc6\xf4\'\xd7\xf8\x91f\x93\x14\x845cYr\xa8B*$\xd5\
\xb9\x89\xc1\xec\x18\xb4\xd6\x1d\xcf\xedwY\x98\xab\x06\xa5TcTm\xe7\xb6M\xb0\
\xbb\xe4S \xd7GH\t3\xd9\xf8\xdfu\xac\xb1\xceT\xf7\x83\x80LT\t\x0b8\x8e\x8d6\
\x1a\xa2\xd2\x0f]z\xd1f\'\x8d\xfe\x18\x87\\\xe7P?\xfc\xfdX\x85g\xc0\x0f~\xd4\
\xf3\xf8\xfd\x8e\xd3\x11?D\xe7\xc1\x8e7\x935\xc9\xa8@\x1d\x8d\xdft\xfd\xaf\
\xb4\xee\\\x16\x8d\x8aw\xc7\x10\x04\xe1\n^\xab\xd7\xb8n\xbd\x83\xc5H\xcb[\
\x14\xa2\'\xe1\x1ea;\xad\xde\xd6\xaf\xf6\xa2\xe4\xb41\xa1\xcc;\x01\x9e\xf5\
\x83\x8e\xbd\xa6Z\xbf\xd1\x93p\x17\xc1tg\xb7hav\xae\xea\xd29\xe3\xd7\x18\x83\
\xe7\xf9\x08y\x04\xcc\xb5p\xda\xe9\xa2\x10\x19d\xd2\xa5\xd7\xaf\x89N\x03\xc6\
\x18\x8c\x8a\xac\x1f\x11\x1d\xa5\xd7\x1ad\xd2\x96M\xbak\xac\xfb\xf9\xf4\xee\
\xa4\xe8\xac\xc5\x94\x8d/\x1a1\x1f\xaf\xa0\x92\xba\xc9\xc5A{5\xd2g\xb9\x94\n\
\x0c\xf8\xe7\xd2\x13\xb6\xc7Aw\xd7H\xdd\xf8bYr\xd4:\x17\x08k;n+\x95\r\x11\
\xd0\x183R~m\xf5\xc3\x06R%h\xdb\x12\xc7\xee\x95N\xfa\r8b^\xc4\xaam\xdb\x96\
\x1dr\xab\xef\xab\xa1;\x9f\xba\xd3\x9a\xca.\x8b\x9eeV\xe4Y\xf3\xb9u<^_\xed\
\xdeP\x9b\xd4\x171\xf5&:\xac\x01uH2B\x90\xc9\xd8]\xf3a\xe7\xd7B\xc4\\\x9dt\
\xa1\xaf\xa8\x966\x86M\xf2\xd1\xa5Mg\xfe\xd3\x9f-\xdbM4\xe5\xb8\x87\xad"\xa2\
H\x92\xec\xeev\xf2]\x8c\x91\xf5`\xa2\xef\xc6B\x94\x8c!\xdc4\x9b\xc4\x05}\x1c\
\xf1.\xb5&\x1a\'\xedh\x18\xadG\x0f\xf9i 5\x82=M\xad_q\xb7t&\x02\xb3m0C\xa4\
\xd5\xb4z\xcc\xf4\xb6r\r\x1a\xfd\x9a\xaa\xc8\x0e3\x9b\xa1\xdbH\xd8S<1\xf5B\
\xddH\x95`\x92&\xa7uT\x87\x1d\xfe\x8a\x9a\xc2\xa3o\x1a\xfa\xeb\xbe\xe4Fi\x05\
R\'X\xab\xfb\xb1J\xb9o\x0b\x8e\xfc\x1f\x9db\x86\xd5[\x7f\xf5\xc8\xb4\x84\xed\
F\xdc\xe3\x0e\x1d\xdd*\x8d$\x18\xa4\x07\x9aJ\x1f\x14\x02\xec\xfb-\xe4\xf1\
\xf1$\x91(\x8c\t\xf50\xa3v\xde\x0f*\x94\xd4\t\xc6\xd6n\xc7\x8dO\x08\x1c\xc7\
\xc2\xb2\xc4P\x1dN\xbf\xe6\rS \xd8Mn\xdc\xb3d\x80\x96\x1d\xd0\x10\x1e.`\xdbm\
\xad\xda Lu\xb9t\xd2j7\xc9\xddm\xf8\xf3s\x01&\xc1\xc8\xda\xd3\xcc\x048\xf7[X\
\'\xc3\x15\x85m\xc9\x91\x04\xa7\xba\\\x9a\x8f\xd4\xdce\xad\x13\x91\x83~\xc2B\
\x97\x86\xa0\xdf\xeabDOH\xf7\xf0\xc6\x88>fk+=\x9dKt~\x8d\xf2\x8b\xd3\xcd\xa7\
&\xc9\xecF\x1aKh\xae\xef\xccM29\xb4w\x8cl\t\x04\xa67d\x143\xd9u\x9fq\xed\x9e\
\xe2\x0e\x025\xb2?\r\x82\x184\xe9\xb5\xd0G/:U\xf4iKq\xd5\x8b\xddXh|6|pn\xc7\
\x9d:\xc1ii\xb0\x81\xf6\xde\x98\x01\xe4\xfa=\x9e\x9a\xd2i\xd8\xdfc\xc5)v\xd9\
\xdd]b{H\xcdEf\xdf\xd6\xaf\xd4\t\x0eRUt\xebc\xc6\xc1\xdc\xdc\xd5\x98!\xa74\
\xd1\x8b\xb9\xdd\xbe\x04</\xe83\x8a\xc6\x1f`\x04\r\xdf\t3\x17#\xec(Q\xad;D\n\
\x98\xf8\x02p17\x9c\\\xd4\xee\xd9eA\xebm\xa2\xa3\xf21\xa4\xad\x99\xddv&\xe4\
\xb1\xb4\x84\xed\xc6\xf6\xcd\x98Y\x13]Z\xe7^\x82\xfd\x08\xc4\\J\xeb\xadv\x16\
\xec\x932\xdc\x022\x01\xcf\xf0\xe4u\xbb3\x8d\xa1\xf3C\xc8\xae\xaf\xe2\xd7z\
\x97\x85\xba\xa8\xfa\x8f\x08\xddd\x064c\xa55\xda\xc8V\x89;\xb6\xd5\xe3\x9c\
\xdc\xc4\xb0E\xed \xdb\xa2zi\x80\xc1\xa6\x9f\x94\xde@\xab\x06\xdd\x87]D\xd3[\
l\x94\xf5\xa4;\xe2HAx\xbeb;\xc6\xc6\x82a\x15\xdb\xcf\xb6\xa1\x8d\xc1\xbf0x=h\
:\xe4\xb6>\xd3\x841;<\x9e\xc9\xb4Uy\xfdH\xf6\xcb\x85\xe9\xfc\xdbh\xc3\x9f\
\x7f\xd7\'xI\x8fhN\xcd\xef\x86_\x98a\x8c!x\xa9\xe1\xb1="\xbap\x03\x7fg\xa0V\
\x13\xadyg\xc9e\\\x1eu\xddp\'\x1f\x0clG\x1d\x15\xda\x87\xb8!\xa6!t\xa4L9\x06\
\xbatB\xad\x1a,\xae|\x92\x9a\xe7s\x9fe\x93\xcd\xfe\xf5v\xaa}\x12O\x94\x9f8\
\x83L\x8a\xf2\x9d\xa1S\x8b\xd0\xaaA\xad^\xa4\xf4\xe9OQ\xfa\xcd/\x91\xfb\x90C\
6\xfb\x11j\xb5\'G\xd7`\xdf\xfc\n\xec\xf7JNH\xc9\xc2\x00\xc7\x900`\xa3y\x0e\
\x99\xe3tL\xa5\x13\xb4\xc9Y\x91}l\x1d\x13\xbd\x7f\xf6i>\xf3\xcb\xcb\xf0\xf9u\
\xf29\x97\x9f\xcde\xf9\xd4\xcaJ\xf8\xf2\x80 \xf3\xe8#X\xf6\xa9\xa1\xe4\xa4\
\x10\xb8\xae=\xbaRF\xf4\xbdV\xb0\x88\xd2\xa9Z}\n\xcf\xfb\x0e\xe6\x8d\xc6\xcb\
\x9b\x06cB\xf7"c\xb6Y^^\xc1~\x8f\x83\xf3\x93\x0f\xb0\xb1Q\xee%\x08p\xf19\x0f\
\xdf\xfb\x9f\xacon\xb3\xfc\xb1\x0f\xb2^.\xb7J\xa4\xf2_7x\xf4t\x96\x0f\xfd\
\xb4\xcb\xf1\xe3\x7f\x89\xa3\xf2 \x8b"t\xc1i2\n\x95N\xed\xcc\x1b=\xc0k\xb3\
\xa1\xc6\xde\xdd\x86\xb9\xb9^\xb5\xad\xd1\xdb\\~\xed%\xae]\x7f\x83\x1f^\n(\
\x16KC\x0b\xa2\\\xde\xe0\xa7\\\x97\xbb\xe5q\x94jo\x97\x1e \x8b\xbe\x8e\xbe\
\xf0-\xd6\xff\xe3\x8f\xc9\xff\xed\xd3\x18c8{\xfe<\xbf\xf6\xb9\xcf\x01p\xe1B\
\xc0\x17\xbe\xb0\x86u\x9f\x8d\xf3\xa0\xdd:\xd0\xe6\xc4\t\x0b\xa3\xcf\xb5&\
\xf7 \x18~_\xd2\xdc\x9c\xe1Z\xe3\xfd\xdcM\xc3Nc\xab\xa4\xd6[\xa8\xd76\xd1\
\xaf(\x82 \x97\xfbH\xeb\xda\x94\xa6L\xdb\xb4\xfe\x16>\xb9\x8c\xfe\xb1\xc6y\
\x9f\x037\x05u\xff<\xe5\xf5\xdfn\xb5\xa0\xa1\xc2\xb6y\xf5/\xa8|\x152?\xfd0\
\x19\xf7\x01\xd6\xd6\xd6;<\xab?\xf7\xb9\x12O>Y\xc3\xf3<\xb4\xd6\xe1\xae#\xd3\
TS\xc4S\xfav\x7f\xd3,,)\xe5\xc8\x0b9<\xefY\xe6\x8f\x1d\xc6~\xd0AkMe\xe3\xcb\
\xfc\xe6\xdaopE\xabV\x81\x0c%(\xc4"\xce\xc3?I\x10l\xe2\x9d}\x91\xc2G>\x18z\
\x82\xa9\x00\xdb\xb6y\xe2\x89\x12O<\x11\x86\xfd\xbf\xbe\xcfS\xf5:J)T\xc3\xe7\
A\xeb-\xb4\xde\x1c\x12\xbf\xc0\xb2\xacp\xb3\xb9e\xe1\xba.\xa7l\x9b\xf75\xb6E\
\x0eB\xfd)\x0fq\xd7"\xb6}\ncv\xf0\x9f\xf1\xd9\xf8r\x99\x8d\xffT\xee\xd1\xc3\
\x0e=(\xa0\xb0\\\xc4}L\x90\xcb\xda\x00\xfc\xcaj\x85\xbf\xf5\xb3.\x99\x0f\xda\
hm\xf0\x9f\xf5\t^\x08(\x14\xf2\xbc\xcfqFfl\x12(\x15\xd6\xe6kZ`?\xe8 \x00\xff\
\xb9\x80\xea7kT\xffp\x03\xef\x7f\xd5\xfb~7\xb0\x06\x85\x10\xe4\xf3\x05\xcc\
\xf5p\xa4\n\x94\xe6\xb7Ky\xaau\x9f\x7f\xf9o\xcb\xfc\xe2\xc7s8\xa7\x1e\xc0y\
\xbf\xc33~\xc0\x8f\x82\x80\\.\x9b:\xb1\x8dJ\x85\xcc\x07\x1c\x8e\xc9#\xcc-\
\xbc\x83\xbb\xd9A\x05?\xa4\xee=K\xf9\xdf\x7f~ \xb1&\x06\x12,\x95\xfeMk\x03Nq\
\xf5+\xb8\x0f\xbe\x13\xc79\x81c\xdb\xe8\x0f\xc3\xd7kujw\x1d#\xfb\xd8CX\xf7J2\
\x99\x0cZ\x1bv1\xb0\xad\xf9\xee\xb9\x00\xa55\x85A\xb7!\xf6#\xb3Qi\xfd\x16B`\
\xdb?A\xe6\x91\x0c\xf2\xa8Di\x8d:\x17P\xaf\xfd\t\x7f\xf4\x8d?\xe0{\xff\xa7\
\x1e\xcb,\xd0\x97\xe0\xa3\xa7\xb3ds\x1fG\n\x81j\x1e\x93y\xe0\x08\x85\xe55r\
\xb9\x0c\xcb\xf9\x0c&\xebP\xf9\xda\xd3\x14\xff\xf1\xdf\xe5\x9e{N\xf0\xf3\x7f\
\xef\x1f\xe2\xba?\x85mI\xc4\x82E&c\xb1m\x0c\xcf\xf8\x01\xf7\x9d8\x18\xc6an\
\x00\xe1\xe8\xa9\xb4\xc1\xf7\x03\x84\x08\xa7 \xb1$p\x1c\x1b\xf9v\x8bk\xd7\rG\
d\xe8\xf9\xe9\xbf\x10\x80\xf1\xb9\xff\xc4E\x1c\xf9*\xa5\xca:O%8I\xa8/\xc1_\
\xfa\x07\xcb@x\x80\xb0\xd2[\x94\x8a\x1f\xa7\xfc\x95\x1a\x08\x83\xebXH)\xf0|E\
\xf5\xf7\x7f\x07\xff\xec\xd3\x00\xfc\x8f\xff^\xe5\x8b\xbfU@<\xf2s\xa8\xed\
\xe3\x18\xde\x86\\\x94XG\x05[;"\\B\x1d\x0f\xe3\x17\x08li\xc2{\\D8Wj\xb3\x8d\
\xd6;\xd4\xbf\xa3x\xe6\x07\x1e?z\xde\xc7\x7f\xa6\x8e\xba\xa4X\xfb\xd7Y\x8e\
\xdba\xffN*\xb6\xf6\x10\xcc\xe7\xf3d\xb2Y@P\xf7\x02\x9468b\x9e\xdc\xe3\x0e\
\xce{\xc3\xa3\xdd\x95\x0e%|u\xe9\xf9\x8eow\xb6\xc0:\xf6\x02\xd61(\x9e\xd9\
\xe0O\xeb\x8a\xfbN\x9e\xe4\x0b\x9f\xff\x15.\x1c\\d\xdb\x08\x10\x07\x81\xb7\
\xa1\xf55\x82\xe09\x82\x1f^$x\xc1#8\x17\x9e\x82\x10\x9c\x1f>5$E\x0f\xc1\xe5\
\x95\x7f\n\x06\xd4\x15\xcd\x97+u\xee>.\xf9\xe3o\xd69\xb6$)\xe4?\x84R[\xc0\
\x16\xf5o\xfd\xe1\xc8\x0e~E+\xaeh\xc5g\xff\xc5?\xefq\x04\xa9\xd5\xc7?\xbf>\t\
:\x08\xae\xac\xac\xe0:\xef\xc7\x98]\xfc\xa7\xcf\x03`\xb6\r\x9bW\xe1\xf1\x0f\
\xdfO\xc6\xbd\x9f\xdaS\x01\xea\xc5K\x94\xd7\x7fch\xc4\xd19\xde\xf3\x15^\xc23\
\t\xd3Bk\xb9d\xdb6\x85\xbf\xbf\x826\xa1\x1bk\xf0\xc3\xcb\xfce\xc7F\xe9+,\x1e\
0d\xdd\x87\xd0\xc6 \xee\x9a\xa3\\^\xeb\xab\xf6\xd3\xa6\xfd\xec\x83\xae\x9dJ\
\x06ECxo!\xa1A\xb5Epe\xe5\x9f \xef\x91-\xd1\xa9\xf0\xd1\x0c\x81\xd2\x18s\x93\
\xbf\xf33\x7f\x15\xcb\x92(e\xf0\xea\x7fF\xbd\xf6\x8d\xbe\x91\xd5\xebA\xeb\
\xf7\'\xf2\x19\x8a+\xd9\x89\x94N\x96%\xf9\xad\x7f\x97o\x89|\xc6\x18\xfc\x84\
\xd7I\xb6\x95N\xf6_\x03B\xc5\xd1\xfa\xefTq\xdec\x93\xcb:`\x1c\\\xe7$\x81R\
\x18\x03\x1b\x1b\xeb\x03\xe7\x9fj\xcd\xc7\xf7_\xc2qN\x02\xe1\xa5\x1ai^\xacQ\
\xa9x\x89\xf7\xab\xb5jPooS\xfeJ\x1d\xff\xfb\xcfc\xbf\xdb\xc6;\xeb\xb1\xfa\
\xf92R.`\xcc.\xeae\x83\xef\xd7\xa9\xd7\x87\x0f,+\x9f\xfd\x0f\x13\x9dE8\x08A\
\xa0(\xadU\x13\x7fw\xf0\xd4\xa9S\xa5\xa5\xa5%\x9e>\xb7\xc5\x85\x8b\x01J]A,\
\xdd\x03\x1c\xe0\xa1w\x1e\xe3\x97>\xf971\xc0\xab\xaf\xbc\xcag~\xf9\x9f\xa1\
\xf5\xabC#\xd4[\x86\xaf~\xf3\xfb\x1c:t\x18\xb1t\x88{\xe4\xd2\xd0\xf0\xa3\xa0\
\x94\xe6\xf7\xbeZg\xe53\xbf\x97hC\x83\xe38\\\xbdz\xb5\xddDwL\xb8\xdcQW5\xc6h\
\xc4\x92\xc5\xf7\x94\xc6\xf3\x15B\xc0\x1f\xfd\xfeF\xec9J)Mi\xf5\xbf%g3\x05\
\xb4\xa7\x89\xb7\xda\xf6\x03\xad\x15h\x85X\xb2\xf8li\x9dG\xff\x8a\xcd\xef\
\xfe\xe7\xdf\xdd\xa3,N\x86\xb6\x93\xf2[\x9d/\xc4!\x81X\n}\xdc_yES\\\xf9G\xb1\
"\xb4\xed9dCeQ\xab\xd5\xfa\x86\xd1\xda\xb0Q\xa9\xb7\x06\x8cQ\xe7\xaa5e\x84\
\xf0T\x92\xc1\xe1F\\\x89\xd2\xd9\xbe\xe7\x97,\x04\x82\xc5E\x81\xb5\x18\xaf\
\xed;6d\xdc\xf61a\xae3x\x04ul\x8b3\xab\x95\x81\xef\xdbqt^{[\xf3v\x08\x82\xf8\
\xb7+\xf75\x80\n!Yh\xcc=\xd6\xd1\x9b\xb1\xe7\xb2$s^6\x1boq\xec\xd8\x9d\x91\
\xca\x98\xda\xb8&\xfa\xae&\xe6\x1bK\x98\xa5\x058*\xde\xe8\x17dbX\x96\xa4V)\
\xa2\x94\xc6\xb6\xfb\x87\t=^&K\xa7\xd7A\xb2Q{K\x0b\xf0\xee\x13\xc9j%)l\xdbJ|\
(qRt5Q\x81h\xe8@\xa7Mn\x1c\x84\n\xafd\xdft\xd4\xa0\x94\x12AX{\xd3\x861\x86\
\xfc\xf2:Z\x1b\x8a\xc5\xe2\xe8\x0f\x98\xd0\xad@\x08\xd9R\x1f\xbc\xeb\xc4a\
\x84x3yl\t\xe0yA\xebX\xf74\xdc]\x07\xa1\xed\x01\xdah\x8f\x81_GN\x99\xdc,\x11\
!(\xd1Z\xe1{\xc9\x05\xda[\x19\x1d}P\xab`\xec\x88J\xa5\x12\x19\xd7n\x9dK?\n\
\xb5\xc8\xdaq\xdc\xf4\xe2\xa0Ep\xdb\x18\x82s\xc3\x97B\xa3\x90\xcb/\xb3Q\xd9\
\x061\xc7\xda\xda\x1a\xb6\xed\x10\x04\xfd\x05\xf44\x96T\x89\xaeDy\xfe\xfbU\
\xcc\xf6\xe4\x89\x1a\xe6(\x9d\t\x13Vj\xb2\x02\x8b\x8bXW\xa2\\|.\x9dc\xa1K\
\xa5\x12\x96%).g\xc9f\x9d\x89\xaf\xab\xd5\x1a\xfc\xc0\x0c\xbc1k\xe6W\xa28\
\x8e\xc5\xc6\xfar\xcf1)\xe3BJ\xc8\xb8\x02\xdb2T\xbb\x1a\xc4\x9e\\\x89\xb2\
\xbeZ\xe8 \x17L\xd0\xd7\xa2\x07\xf0X\x96@\xab*k\xeb\xb5Dq\xa4Z\x83\xb9l\xfbR\
\xe0\xa6\xa4\x92\xf4\x8e\x96n\xfcZ)\xcf\'\xf2\x19\x00\x96\x0b\x99\xc4\x04S\
\xdd/\x1a\xd5_nT\xea\x13\x93\x03xb\xb5\xd2\xd2\xc5H)q\xec\x93\x89\xbeO\x95`t\
\xfb\xc6\xa4G\xa94aL(\xd65!\xe4\xfc\xe0\xc0}0\x93]\xf7\x96%{FSo\x06\x97i\xc0\
\x8c\x08V7\x8a=\xc7\xc2\xaf\xad\'\x1f0\xc6\xc1L\xdc\n\xfa\x9dyo[\xd3]\xe86q\
\xc7_\xa8\xb1O0\r\xf4S\xb9GMm\xd3\xc4L\x06\x99\xc2\xf2z_\x0b\xef,0\xbd\xc3:n\
5\x0bo\x1a\xb8\xa5-\xbci\xe0\x96\xb6\xf0\xa6\x81[\xd1\xc2\x9b\xfar\xc9\x0b\
\xe2_\xdb\x9e\x04Z\x83\x11.\xa5\x92;0\xcc\x08\xebR:0\x06*5\xd3\xb8\xa8F\x0ft\
\x0cI\x12_\xa0\x0c\xf51\x15\x0eS\x19E\x8d\x81\xba\x07\xa5\xd2Z\xac\xf0w\xe4]\
\xd8\xb3B\xea5\x18\xb5\xf0\x16W\xb2}\xc3t[xG\xc6\x19\xd3\xc2\xdb\x0f\xa9\x12\
\xbcm,\xbc\xe3\xe2\xb6\xb1\xf0\xce\x02{f\xe1\x9d%\xf6\xc0\xc2{kcb\x0b\xef$(\
\x95J\x14W\xb2C\x07\x96(&\xb5\xf0&\xb6.\xa5\x81l6\x1b;l\x1a\x16\xde8\xd6\xa5\
\xdb\xaa\x89\x0e\xc20\xeb\xd2mOpTSM\x95\xe0\xeaj9v\xd8I-\xbc\x10\xaf\x1f\xce\
\x9d>}z\xd7\xb2,*\x95\xd1RE\x1c\xc8\x98W2Lc\xd3l\x14\xf9|\x1e\xa5T\xfa\xf3\
\xe0\xc4\xc7:\xa4\x8c\x94e\xd1\x93\x94\xd7\x7fq\xa4UWkM\xf1W+\xd4\xbe=Z\xb3\
\xe6:\x16\xab\xa5O \xe4\x02ZiV\xcel$\xaa\xfdT\xfb`.\xf7\xdeX&k)%+\xbf\x90\
\x89\x15g\xb1\x98\xc3qNb[\x12\xd7\xb5)\xe4\x07\xaf\xe8\xfba\xcfD\xb5L\xc6!\
\x9fs\x87Z\x80\x1d[\x92\xcdL\xe6\x93\xb8\xa7\xb2\xe8\xdaja\xeai\xdcv\xf3\xe0\
\x9e^4\x9c\x14\xb5Z2\x07-?P\x947j\x89\xd2\xd83\x82\xbe\xff\x12\xcb\xc5\xf2\
\xd4\xd3\xd9\xb3&\xaa\xf5\xd6\xe8@)\xe0\xb6\xeb\x83I\x91*\xc1i\x1f\n>\x0eR\
\xed\x83\xe5\x8d:\x96%\x91b\xf8d\xbf\x83\xe1\x8b_\xaa\xa5\x99\xf4@\xa4~]{i\
\xf5\xd6\xdaP{\xc7\xf7\xc1T\xadKR@.;Z\xd5\xd7-l\x0fKOJ\xc8\xbaa\x9c\xda@\xad\
n\x06\xf6\xdf~\xf1\xa4Z\x83q\xcdfI\x84\xed\x8c\x13\x92\x14\x02,\x19j\xcf\x93\
\xe0\x96\x10\xb6\x07-@\xa4\x0c\xb7QN\x82}a\xfb\x96\xc3\xed(lGo\xa0\x1b\x06\
\xa5\x19\xb8w{\x10n\ta\xfb\x8e\xb4\xf0\xee\x0b\xdb)!ea{:a\'A\xaa\xd6%!\xe0L1\
7\x13a{O\xacKg\x1a.=\x9a\xc1\xd6\x9e4\x91\xc8w)\xed\x84-K\xe2:\x82\\\x0c\x93\
\xda\xb0YBk\x18\xb5=-\x96\xefRZ(\x95J\x9c)\xe6XY\xce\xa6\x16\xa7i\x08\xd9\
\xfdT\xa83\xb5.M\x83\x1c\x84\x82v.+\xe8v\x87\x9a\xb9\xef\x92e\xc9\x0er\xb5\
\x9aO\xa5:\xbeW\x9bmK\x96\x0b\x99\x96/\x94%\x02\x8a\xa5r\xa28Rm\xa2\xf9\\\
\xbb3y^\x90\x8aZ\xb0^\x0fZ^\xa5\x99\x8c\x13\xdeh\x9e@\x97\x9aj\x13} \xb2%d\
\x92\x9a\x8b\xa2\xee\x05\x04\x91Q&\xe9\xb6\x93t\xef|\x89\x9c\x0b\x9b\xe6\x91\
b\x93\x18Kg"l\x97\xce\xe4z\xf6\x8d\xae\xad\xd7\xc2\xa3\xa5\xa7\x8c\x99\x10\\\
.d{\x9e}L\xe9\x99\x10\xdc3a{>\xcd\xb3\xa7\x87`\x7f5q\xbbc&}\xf0kOz<\xf4\x90\
\xdd~`\x0c\xd5Z:\xd3\xc8(\xcc\x84\xe0\xa7>\xbd1\x8bd\xfabjMt\xd2\x03\x02\xd2\
\x8a+U\x82~\x10\xb4~\x17\n\x99\xbe\x9e\x9fI\x91}\xcc\xe9\x90^\xf4f\xb2I?\xd5\
&Z\xa9\xfa\x14WLx\x00\xb1%\xa9n\x14\xa9T\xbd\xb1\xef <iI>\x9ak\xef\x8b\xf1\
\x03Epa\x0f\t*\xa5Y[\xafq\xa6\x18n\x8a\xed^]L\x8a\xd5_On\x9aK}\x90Y/\xd7\x10\
\x82T\x9d\xb2\x8c1\xac\xaeUcm\xfd\xeaF\xea\xceYMlT\r\xb5\xea\x06\xb6m\x8d\
\xb48\r\xdb)\xac\xcd\x1cJ\x81\xed\xe4(\x95\x86\x17\xdaL\x9c\xb3\x9a0&t\xb7\
\x83\xd1\xa5.\xad\xec\x90\xb7\xc9\xfc$\xba1U\x17\xd7B\xde\x8d5\xc4O\xaat\x1a\
\x86\xa9\x10l\xba\xe3\x14r\xe9\x98\xc7\x86)\x9dF!\xf5\x89>\xd3\xe5k\x94\x06\
\x06)\x9d\xe2 \xdd\xdb\xeb\x048QG\xaa\x98J\xa7\xfc\x80\x03\xff\xa5\x0c}\x97\
\x9a\x83T\xc6\xa1\xe7T\xa0QH\x95`t\xd1\xae4\xb1\x95Nnf\xc0\x8d\x06\n\x942\
\xe4\xb2\xedS\x81\x04\xdb\x18F_\xd3\xd7D\xaaM4\xda\x84\x82 \x1d\x9d\x8c\xd2\
\xe1@\xd3N#>9H\xdb\xbd.\xf2;M\xeb\xd1$\xd7M\xef+\x9d\xe2\xa2T*\xb1V*`\xdb\
\xa1p\x1c\xf5\xc3\x98\x86\xd2iO\xccg\xdd\x9b\t\x86\x99\xcf\xd2P:\xed\xads\
\x96\x98\x9b\xaam0\x8a\xbdq\xce2\x93\xc9\x90q1\xd3\xa3\xff<\xcfo\xf5\xc1(\
\xa6\xa5t\xda\x93\xa3\xff\xfaa\x94\xd2iTz\xce\xfarx\x01\x0e\xb0^.\':\xb2l_\
\xe9\x94\x04\xfbJ\xa7\x84\xd8W:\xc5\xc0\xff\x9fJ\xa7I\xb1\xb6^\xa3\xbcQ\'\
\xe3\xda\xb1\x94N\xc3\xa0\x94\xa6\x9a\xd0\xc7)\x8a\xa9\t\xdbZ\x9b\xd8J\xa7ib\
\xdf|\x96\x04\xfb>\xbc\r\xec\xfb\xf06\x90\x86\x0fo\xeb\xa0\x80;\x11\xad\x83\
\x02T\xd2CZn#\xfc?.P\xa9\xa6\xb8\x88\x1d\xa2\x00\x00\x00\x00IEND\xaeB`\x82'
def getscanprogress12Bitmap():
return wxBitmapFromImage(getscanprogress12Image())
def getscanprogress12Image():
stream = cStringIO.StringIO(getscanprogress12Data())
return wxImageFromStream(stream)
index.append('scanprogress12')
catalog['scanprogress12'] = ImageClass()
catalog['scanprogress12'].getData = getscanprogress12Data
catalog['scanprogress12'].getImage = getscanprogress12Image
catalog['scanprogress12'].getBitmap = getscanprogress12Bitmap
#----------------------------------------------------------------------
def getupdate01Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00.\x00\x00\x01,\x08\x06\x00\
\x00\x00\xbc\x06\x81\x88\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\
\x00\x08\x9aIDATx\x9c\xed\x99\xb1n\xe3H\x12@\x1f\x07\x1bL \xe0\xb4\xa1\x94\n\
X\x7f@9\xf7~\xc0\xd8\xd9\xee\xfcA\xf3\x0f6=\xcc\x07\\x\x01\xfb\x03\x16\xd8\t\
\xed\xcd\xf6\x92\xb9M]\xd8x\x06p*]tC`\x1d8\x1a^\xd0l\x92\xa2$\xdb#\xc95\xeb\
\xb9z\x80`\x89l\x92\xaf\x8b\xd5\xd5M\xba\x98N\xa7<G^|i\x81}qqk\\\xdc\x1a\x17\
\xb7\xc6\xc5\xadq\xf1\xfbX,\xea&\x7f\xea:}\x16\x8b\x1f\xdb\xbfuS\xb7\x9f\xcf\
9\xe77O%\x0b\xb0X\xfc\xd8\xa8F\x94\x00H\xda(\xda\xee\x8d \x81\xf4+\xed\xab\
\xeb\xb2\x11\t\xdc\xdc\xbc-\x1e:w\xf1\x14\x8b\xac\xc5\xa2nT\x01\t\xf7\xb4\
\x92\xed\x9bU\x81\x88\x08\xdc\xdcLwv\xe0\xe8\xe2u]7\x9b\xc2;$\x1fBK\xa6\xd3\
\xed\xf2G\xcb\xf1\x9c\xbbIz(\x9a\xbfk\xfby\x80\xb5N7)\x10[8J\xc4\xb7G\x19v\
\xa7C\x00I9\x8e\xc6\xe1\x8e\xed\xc7i\t\xb0\x16\xfd\xe3\x0c\xce\xbd\xa5KD@B/\
\xaf\x9a\x8eT\x05\xd5\x00*@:\xffb\x11\x9b\x9c\xf7\x07\x8b\xa7[)\xac\xa7\xc1\
\x16i\x1dtN\x02\xa2\x05\x04\x92\xb8\x0c\xa4\x07\x87\nq\xedt\x1a!\'\xc8\xb3\
\x9d\x80\x0e\xca\xf1\xc5\xa2n\x94\x07*H\x8e\xf40\xe0\x14 \xeb\xd1\x1e7J\xf5\
\xbf=\x9b\xe6m\xe9\xfb\xcd\xcd\xb48(UR\xad\x1e\xa7\xc9\x00\t\xe4\x89f}{:,\
\xc6T0BH\x83/H\x9f\xeb"\x105\x1d\xaf\x83\xce\xe5t\xd9[\xbc\x8f\xf6\xaeJ\xd0J\
\x8f\xfb\xa2\x05\xd2\xe6v\x10\x88\x9a\xa2\xdbwt\xd0V"\xaa\xa1\xcfuH7E\x7fl\
\x0e\xc8\xf1\xfbfE\x06\x95#\xb5\x13\xca\xf4\xa9R\xa456(I\xbe\'R\xc6\xaa\xfb\
\x9e\xae\x11\x89\xa7\x10OAc\xa0\n\r\x10\xf7\x8f\xb8\xea0\x056\xab\x88\x88"R\
\xa0\x15\xa0}\xd3\x18\x9b.\xaf\x05P):y%\x10B.\xd5\xf9\x8e\r\xee\x9cD\x90\n\
\xd5\x07Re\xd7\xac5R\x1cw\xa9\x1dt1\xe5r\xdbDZs\x91^Xh\x10\x1ab\x97\t\x05!\
\x04\xcaX1\xff\xf5\x82\xab+\x80Hu\xbd\xa9\xf1u\x96\xc3\xba\xae\x9b]!/\xa0\
\xab\x0e\x89\x9c6ie\xd7\xadb\x05dp\xbb\xcb.U\n\xa0Y\xbb_Q\x01)\x08\x12\xfa\
\x01\x0b@[}\xda\xc6\xa7E\xf1\xb8\x1c\xdf\xb5\xb6\xec*a`}\xe4\xe7\xcd\x92:\
\xa4\xc4V~\xb0S\x9a\x8d$\xabB\xeaX\xd4\xa2\x9fMi\xb6\x96\x81GE\xbc\x80\x8d\
\xe5e.\x87"\xeb\xd3\xb2\x0c\xbfK*eJ\x1fA\x8d\x15\x84@\xbe)\xc3\xf6\x99R\x01M\
w\xa4\x1aY\xabB,\x8bG\xe4\xf8\xced\t\xed\x89\xd6\xcf\xac\xed]P\xf2\xec\xd7\
\xd6\xe1\xbc\xaa\xd5\xb0\xb6\xb2Q\xf2\x82\xaa\x17K\xfdHK \x1d\xccm\xfd\xbe\
\x03\x06\xe7|\xbe\xea\x96\xa4i\x15\x07\xc3%w\x1f\xc4\xd8N2\r\xaa\ry\xce\xda6\
\xdf\x0e%E\xd2S\x9e\x8c\xee\xc6\xe9iq\xd8\x94\xff\xfb\xef\xbf\x17"4]\xe4\xb2\
p\x97\x9b\xc3\xe5\xe9\xfa\xfc\xaa\xd2wl\xd7\xa3E\xde^\x96\x83\x8dR\xb4\x8ft\
\xc7(\x87\x1a[\x9d\x90>\xb9\x03YVA\xcb\xa6\xcf\xe7l\xa5\xa3t\x19\t\xcb\xb0GR\
\x80\x14h\xd9?\x87\x1e$~\xdf\xc3\xecSsp\xc4e\xb8\xee$\xe5\xbb\x0e\xf2\\5\xac\
/k\xb4\xff#m\xa8\x87wC\x86\xa9\xd5.\xc0\xf2\xf2W\x06\x8b\xb0\x87\xcba\xd3P\
\x14\xc5\xce\xa7\xed\xdcn\xbcn\x11)\x81\x80\xaa\xa4\'\x9e\xa1\xf8\xb0\x16n\
\x8c\xd0\xbc>!\xd5\x7f)\xf3#\xe7\x9a\xc3Q\xc4;y\x00\xa9\xd6w\x8c"\xbeV$\xda\
\xc9k(\xaf\x11\xa4* V e\xbb\xfe\xde\xbc\xf6\xd1\xc4\xd7:0\x96\xcf\x1d\x80v\
\x96\xed\xbd\x13\xa9\xac\x8a\x94\xa8V\x08e\xd7\x89]\xd7=\xba8\x0c\xdfdU}\xc4\
\xf3\x8a@b\xb7M\xa4H\x193\\\rD\x88%\x87\xbd\xc9\xdaW|k\x07:\xb3\x80\\\xb7\
\xc2\x92$\x87\xd2\x0f\tg\xbe\xe2e\xed\x01\x11\xcf,\x06\xaf\x90uTE\xa4]A\x02\
\x8fzK\x9b1\x11\x7f\n\x9em\xaa\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\
\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\
\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\
\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\
\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\
\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\
\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\
\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\
\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\
\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\
\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\
\xe3\xe2\xd6\xb8\xb85.n\x8d\x8b[\xe3\xe2\xd6\xb8\xb85\xcfV\xfc\x9b}\x0e\xaa\
\xeb\xba\xf9\xe1\x87\x1f\xb8\xbb\xbbc6\x9b\xb1Z\xadx\xf9\xf2\xe5Z\x9b\xfb\
\xf6\r\xdb\x00\xdd\xfe\xbb\xbb\xbb\x8d\xef\xf9<@w\xae\xdf~\xfb\xad\xd8K<\xf3\
\xea\xd5+@\x81W[\xf6* \xa3\xdf\x8c\xb6\x81\xac\xff\x04\x02\x10\xbb_\x1f>\xbd\
f\xf2\x9f\x7f\x030\x9f\x9f\xf3\xe6\xcd\x05\xb0g\xc4\xfb\x8b\x86-\x17~<eY2\
\x9b\xbdb>?\x1fl\x8d\xdcNO\xf8\xfe\xf5{Rg\xfb\x00\\W}\x87\x9em\x8e\x1f$~;}w\
\xd0\xc5?~\xfc\xc8|~\x8ej\x89j\x89\x08D\x85I\x9d\xa3\x9d\xd1\xeew\x1e\x03\
\x07\xa5\xca\xa4>;\xe4p\xbe\xfd\xf6[\x96\xcb+B\xa8\x00\x88\xb1\xa4\n\x15\xaa\
\x00e\xdbJ\xe8;\x11\x80\x7f\x01\x07\x8a/\x97W\x88\x9c?\xdcp\x07\xab\xd5\x8a\
\xf9|E\x19S\xeeV\xa1"\x96\x11\t\xc3V\xc3\xc8\xc7\xae\x12\x1d\x94*\xeb\x83\
\xea\xf3Ie.\x10\x04\x82\x80j\xe4\xe4\xa7\x13\x96\xcb\x19}\xf5\xe9G\xff\x87O\
\xaf\x8f\x93*)\xc7\xf7O\x97\xd5j\xc5\xed\xf4\x1d\xcb\xff\xbe\x06\xe0\xbb\x17\
\x7fcR\xc3d\x0e}\xaa\xf4\x11\xff\xee\xc5/\xc7\x89\xf8\xa19>\x9b\xcd\x98\xd4g\
\xfc\xf9\xc7O\xfc\xf9\xc7O\x88\x80j\xc9ry\xd5\xb6\x18F]\x80\xd0E\xfc\xd9\x96\
\xc3/\x9e*\x10\xbb\xaaR\x96%UU\xa1\x9a\'\x1a\x1d\xfd\xed\x07\xe7\x17-\x87\
\xb3\xd9\x8c\xdb\xe9\t1W\x95\xaa"\xc6\xd8\x0e\xdaa\x19L)\x93\x06m\xe2\xa0T\
\xe9sq?V\xab\x15\x93\xfa\x0c\x91\xb4fyw\xf3\x0e\x91\xd0V\xab\xcd\th>??NU9F9\
\xd4X\xa2mDE\xde\x83\xfe\x8cv\x15p\x18\xf5\x94\x9aGI\x95c\xe4\xb8\xfc\xfd\
\x92\x93v\xe90\xa9\xdf\x83T\x08\x10)\x19\xa7\xca\xa4>\xe3\xe5\xcb\x7f\x02\
\x7f\x81\x1c\x87\xc8\xa4\xce[\xd2\x92\xf6vz2j\x99:0\\\x1b}\xf1\x88C\xe0\xf4\
\xb4h\xb7\xa4I\xe7\xfa\xba\x01~f\x9c*\x93\xfa\x8c\xbb\xbb\x7f\x00\xff\xafu\
\xfc8\xe5\xf0\x1dM\xd3t\xdb\x8a\xa2\xa0\x7f\x02\x1a\x97\xc3\xab\xc3g\xce4\
\xba\xe3\x83\xed\xee#\x97\xc31}\x8e\xe7\xf2\xd2\x97\xc3\xcc\xde\xe2yew\x08yp\
\x8e\xe9;3\x8c\xb8\xecW\x0e\xeb\xban\x86\xbf\xf3t}\x88|\x1e\x9c\x9b\x8c;\x93\
:0\xa9\xf9\xbcTi\x9af\xed\x03)Z\x1f>\xbd\xde\xd7\xb9;\xc7\xb6\x88o\xa6J&<>\
\xe2i\xb0l\xb2Z\xad\xf8\xee\xc5/<E\xc4\'\xf5\x19\xe8\xf7\\^^\x02\xaf\xb8x\
\xb3j\xf7|\x05O\xf9\xf7F|:\x9dn\rw]\xd7MN\x95\x03^\xabt\xe5ps\x12\xeb#;\x9f\
\xaf\xd8\xf62i\xef\x88\xf7\xa9\xb2?\x0f\x95\xc3\x8b\x8b\x0bNO\xcb\xc1\x9e#<\
\x01Y\x94\xc3\xcb\xcb\xcbv\xfa\xcf\x1c\xe1)\xff\xe3\xc7\x8f[/\xfa9<T\x0e\xd3\
\x843\xbcF\xdf\xf6\xc0\xc1\xf94\x11\xcf\xa9rzZ\xac\xa5\xcary\xd5\xbd\xb9=P\
\xfci"\x9eS\xe5\xfa\xbajKbb>?o\x8f\xf9\x0bD|\xdb\xfb\xc7\xfcH\xb8\\\xce6\xde\
\xe4f\xbe\xce:\xfe0\xb1}A\x19\xba\xc8\xa57\xad\xe9I\xa6\x8fX\xffT\x93\xf7\
\xdfN\xdf\x11\x82@}\xd6\xbd\x8e\xb8\x9d\x9e\xb4U$\xfd\xbe\xb8H/\xf1\xfb\x97\
\x89\x01\xf8\x15\x80b:\x9d~\xb6n]\xd7\xcd\xf9\xf9a\x0f\xca\x8f%\xff;%3\x9b\
\xcdx\xfb\xf6m\xb1\xb7\xf8\xe8\xa9j\x93\xf1\xfe\xe1\xd2\xfa\xb1\xec\xb8\xc6t\
:\xddO\xfc\xaf\xc0\xb3\x1d\x9c.n\x8d\x8b[\xf3l\xc5\xff\x07\x1c8\xab\xde\x1b\
\x0e\xd5\x8c\x00\x00\x00\x00IEND\xaeB`\x82'
def getupdate01Bitmap():
return wxBitmapFromImage(getupdate01Image())
def getupdate01Image():
stream = cStringIO.StringIO(getupdate01Data())
return wxImageFromStream(stream)
index.append('update01')
catalog['update01'] = ImageClass()
catalog['update01'].getData = getupdate01Data
catalog['update01'].getImage = getupdate01Image
catalog['update01'].getBitmap = getupdate01Bitmap
#----------------------------------------------------------------------
def getupdate02Data():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00.\x00\x00\x01,\x08\x06\x00\
\x00\x00\xbc\x06\x81\x88\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\
\x00\x08\x9bIDATx\x9c\xed\x9a\xb1n\xe4F\x12@\x1f\x17\x0e6\x18\xe0f\xc3\x99t\
\x00\xeb\x03J\xb9\xfc\x01+e\xf6\xfeA\xf3\x0f\x9c\x1e\xfc\x01\x17^\xc0\xfe\
\x00\x03\xdePr\xe6K\xf6\x9c\xaa\xe0\xd8\x06\x94\xce\\tK\xc0\n\x14\x99\x174\
\x9b\xe4pF\xd2\xeepT\xbb\xda\xab\x07\x0c4C6\xc9\xd7\xc5\xea\xea&w\x8b\xf9|\
\xces\xe4\xc5\xa7\x168\x14\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\x7f\x88\xd5\
\xaan\xf2\xa7\xae\xd3g\xb5\xfa\xae\xfd[7u\xfb\xf9\x98s~\xf5T\xb2\x00\xab\xd5\
w\x8djD\t\x80\xa4\x8d\xa2\xed\xde\x08\x12H\xbf\xd2\xbe\xba.\x1b\x91\xc0\xcd\
\xcd\xdb\xe2\xb1s\x17O\xb1\xc8Z\xad\xeaF\x15\x90\xf0@+\xd9\xbfY\x15\x88\x88\
\xc0\xcd\xcd\xfc\xde\x0e\x1c]\xbc\xae\xebfW\xf8\x1e\xc9\xc7\xd0\x92\xf9|\xbf\
\xfc\xd1r<\xe7n\x92\x1e\x8a\xe6\xef\xda~\x1ea\xab\xd3M\n\xc4\x1e\x8e\x12\xf1\
\xfdQ\x86\xfb\xd3!\x80\xa4\x1cG\xe3p\xc7\xfe\xe3\xb4\x04\xd8\x8a\xfeq\x06\
\xe7\xc1\xd2%" \xa1\x97WMG\xaa\x82j\x00\x15 \x9d\x7f\xb5\x8aM\xce\xfb\xc9\
\xe2\xe9V\n\xdbi\xb0GZ\x07\x9d\x93\x80h\x01\x81$.\x03\xe9\xc1\xa1B\xdc:\x9dF\
\xc8\t\xf2l\'\xa0I9\xbeZ\xd5\x8d\xf2H\x05\xc9\x91\x1e\x06\x9c\x02d;\xda\xe3F\
\xa9\xfe\xb7g\xd3\xbc-}\xbf\xb9\x99\x17\x93R%\xd5\xeaq\x9a\x0c\x90@\x9eh\xb6\
\xb7\xa7\xc3bL\x05#\x844\xf8\x82\xf4\xb9.\x02Q\xd3\xf1:\xe8\\N\x97\x83\xc5\
\xfbh\xdfW\tZ\xe9q_\xb4@\xda\xdc\x0e\x02QSt\xfb\x8e\x0e\xdaJD5\xf4\xb9\x0e\
\xe9\xa6\xe8w\xcd\x84\x1c\x7fhVdP9R;\xa1L\x9f*EZc\x83\x92\xe4{"e\xac\xba\xef\
\xe9\x1a\x91x\n\xf1\x144\x06\xaa\xd0\x00\xf1\xf0\x88\xab\x0eS`\xb7\x8a\x88("\
\x05Z\x01\xda7\x8d\xb1\xe9\xf2Z\x00\x95\xa2\x93W\x02!\xe4R\x9d\xef\xd8\xe0\
\xceI\x04\xa9P}$U\xee\x9b\xb5F\x8a\xe3.\xb5\x83.\xa6\\n\x9bHk.\xd2\x0b\x0b\r\
BC\xec2\xa1 \x84@\x19+\x96?_pu\x05\x10\xa9\xaew5\xbe\xccrX\xd7us\xdf"\'M\xf3\
\xc3r\x96\xd3&\xad\xec\xbaU\xac\x80\x0cnw\xd9\xa5J\x014[\xf7+* \x05AB?`\x01h\
\xabO\xdb\xf8\xb4(>,\xc7\xefK\x99\xae\x12\x06\xb6G~\xde,\xa9CJl\xe5\x07;\xa5\
\xd9I\xb2*\xa4\x8eE-\xfa\xd9\x94fo\x19x4\xe2\r)6\xe3\xc8\xe7r(\xb2=-\xcb\xf0\
\xbb\xa4R\xa6\xf4\x11\xd4XA\x08\xe4\x9b2l\x9f)\x15\xd0tG\xaa\x91\xb5*\xc4\
\xb2\x98\x92\xe3\xa1=\xd1\xf6\x99\xb5\xbd\x0bJ\x9e\xfd\xda:\x9cW\xb5\x1a\xb6\
V6J^P\xf5b\xa9\x1fi\t\xa4\x83\xb9\xad\xdf7ap.\x97\x9bnI\x9aVq0\\r\xf7A\x8c\
\xed$\xd3\xa0\xda\x90\xe7\xac}\xf3\xedPR$=\xe5\xc9\xe8n\x9c\x9e\x16\xd3\xa6\
\xfc_\x7f\xfd\xb5\x10\xa1\xe9"\x97\x85\xbb\xdc\x1c.O\xb7\xe7W\x95\xbec\xf7=Z\
\xe4\xede9\xd8(E\xfbHw\x8cr\xa8\xb1\xd5\t\xe9\x93;\x90e\x15\xb4l\xfa|\xceV:J\
\x97\x91\xb0\x0c{$\x05H\x81\x96\xfds\xe8$\xf1\x87\x1ef\x9f\x9a\xc9\x11\x97\
\xe1\xba\x93\x94\xef:\xc8s\xd5\xb0\xbd\xac\xd1\xfe\x8f\xb4\xa1\x1e\xde\r\x19\
\xa6V\xbb\x00\xcb\xcb_\x19,\xc2\x0e.\x87\xe3v\xe3u\x8bH\t\x04T%=\xf1\x0c\xc5\
\x87\xb5pg\x84\xe6\xf5\t\xa9\xfeK\x99\x1f9\xb7\x1c\x8e"\xde\xc9\x03H\xb5\xbd\
c\x14\xf1\xad"\xd1N^Cy\x8d U\x01\xb1\x02)\xdb\xf5\xf7\xee\xb5\x8f&\xbe\xd5\
\x81\xb1|\xee\x00\xb4\xb3l\xef\x9dHeU\xa4D\xb5B(\xbbN\xdcw\xdd\xa3\x8b\xc3\
\xf0MV\xd5G<\xaf\x08$v\xdbD\x8a\x941\xc3\xd5@\x84X2\xedM\xd6\xa1\xe2{;\xd0\
\x99\x05\xe4\xba\x15\x96$9\x94~L8\xf3\xe5.k\xa7D<\xb3\x1a\xbcB\xd6Q\x15\x91v\
\x05\t|\xd0[\xda\x8c\x89\xf8S\xf0lS\xc5\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\
\xadqqk\\\xdc\x9a/X\xbc\xf9\xa8\xff\xffb\xc6\x17\x1c\xf1\xcf\x14\x17\xb7\xc6\
\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqq\
k\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\
\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\
\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\
\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqq\
k\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\
\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\
\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\
\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqq\
k\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\
\x1a\x17\xb7\xc6\xc5\xadqqk\\\xdc\x9ag+\xfe\xd5!\x07\xd5u\xdd|\xfb\xed\xb7\
\xdc\xdd\xdd\xb1X,\xd8l6\xbc|\xf9r\xab\xcdC\xfb\x86m\x80n\xff\xdd\xdd\xdd\
\xce\xf7|\x1e\xa0;\xd7/\xbf\xfcR\x1c$\x9ey\xfd\xfa5\xa0\xc0\xeb={\x15\x90\
\xd1oF\xdb@\xb6\x7f\x02\x01\x88\xdd\xaf?\xfez\xc3\xec?\xff\x06`\xb9<\xe7\x87\
\x1f.\x80\x03#\xde_4\xec\xb9\xf0\x87S\x96%\x8b\xc5k\x96\xcb\xf3\xc1\xd6\xc8\
\xed\xfc\x84o\xde\xfcN\xeal\x1f\x80\xeb\xaa\xef\xd0\xb3\xcd\xf1I\xe2\xb7\xf3\
w\x93.\xfe\xfe\xfd{\x96\xcbsTKTKD *\xcc\xea\x1c\xed\x8cv\xbf\xf3\x18\x98\x94\
*\xb3\xfal\xca\xe1\xbcz\xf5\x8a\xf5\xfa\x8a\x10*\x00b,\xa9B\x85*@\xd9\xb6\
\x12\xfaN\x04\xe0_\xc0D\xf1\xf5\xfa\n\x91\xf3\xc7\x1b\xde\xc3f\xb3a\xb9\xdcP\
\xc6\x94\xbbU\xa8\x88eD\xc2\xb0\xd50\xf2\xb1\xabD\x93Re{P}<\xa9\xcc\x05\x82@\
\x10P\x8d\x9c|\x7f\xc2z\xbd\xa0\xaf>\xfd\xe8\xff\xe3\xaf7\xc7I\x95\x94\xe3\
\x87\xa7\xcbf\xb3\xe1v\xfe\x8e\xf5\x7f\xdf\x00\xf0\xf5\x8b\xbf1\xaba\xb6\x84\
>U\xfa\x88\x7f\xfd\xe2\xa7\xe3D|j\x8e/\x16\x0bf\xf5\x19\x7f\xfe\xf6=\x7f\xfe\
\xf6="\xa0Z\xb2^_\xb5-\x86Q\x17 t\x11\x7f\xb6\xe5\xf0\x93\xa7\n\xc4\xae\xaa\
\x94eIUU\xa8\xe6\x89FG\x7f\xfb\xc1\xf9I\xcb\xe1b\xb1\xe0v~B\xccU\xa5\xaa\x88\
1\xb6\x83vX\x06S\xca\xa4A\x9b\x98\x94*}.\x1e\xc6f\xb3aV\x9f!\x92\xd6,\xefn\
\xde!\x12\xdaj\xb5;\x01-\x97\xe7\xc7\xa9*\xc7(\x87\x1aK\xb4\x8d\xa8\xc8\xef\
\xa0?\xa2]\x05\x1cF=\xa5\xe6QR\xe5\x189.\x7f\xbf\xe4\xa4]:\xcc\xea\xdfA*\x04\
\x88\x94\x8cSeV\x9f\xf1\xf2\xe5?\x81\xcf \xc7!2\xab\xf3\x96\xb4\xa4\xbd\x9d\
\x9f\x8cZ\xa6\x0e\x0c\xd7F\x9f<\xe2\x108=-\xda-i\xd2\xb9\xben\x80\x1f\x19\
\xa7\xca\xac>\xe3\xee\xee\x1f\xc0\xffk\x1d?N9|G\xd34\xdd\xb6\xa2(\xe8\x9f\
\x80\xc6\xe5\xf0j\xfa\xcc\x99Fw|\xb4\xddC\xe4r8\xa6\xcf\xf1\\^\xfar\x989X<\
\xaf\xec\xa6\x90\x07\xe7\x98\xbe3\xc3\x88\xcba\xe5\xb0\xae\xebf\xf8;O\xd7S\
\xe4\xf3\xe0\xdce\xdc\x99\xd4\x81Y\xcd\xc7\xa5J\xd34[\x1fH\xd1\xfa\xe3\xaf7\
\x87:w\xe7\xd8\x17\xf1\xddT\xc9\x84\x0f\x8fx\x1a,\xbbl6\x1b\xbe~\xf1\x13O\
\x11\xf1Y}\x06\xfa\r\x97\x97\x97\xc0k.~\xd8\xb4{\xbe\x80\xa7\xfc\x07#>\x9f\
\xcf\xf7\x86\xbb\xae\xeb&\xa7\xca\x84\xd7*]9\xdc\x9d\xc4\xfa\xc8.\x97\x1b\
\xf6\xbdL:8\xe2}\xaa\x1c\xcec\xe5\xf0\xe2\xe2\x82\xd3\xd3r\xb0\xe7\x08O@\x16\
\xe5\xf0\xf2\xf2\xb2\x9d\xfe3Gx\xca\x7f\xff\xfe\xfd\xde\x8b~\x0c\x8f\x95\xc3\
4\xe1\x0c\xaf\xd1\xb7\x9d88\x9f&\xe29UNO\x8b\xadTY\xaf\xaf\xba7\xb7\x13\xc5\
\x9f&\xe29U\xae\xaf\xab\xb6$&\x96\xcb\xf3\xf6\x98\xcf \xe2\xfb\xde?\xe6G\xc2\
\xf5z\xb1\xf3&7\xf3e\xd6\xf1\xc7\x89\xed\x0b\xca\xd0E.\xbdiMO2}\xc4\xfa\xa7\
\x9a\xbc\xffv\xfe\x8e\x10\x04\xea\xb3\xeeu\xc4\xed\xfc\xa4\xad"\xe9\xf7\xc5E\
z\x89\xdf\xbfL\x0c\xc0\xcf\x00\x14\xf3\xf9\xfc\xa3u\xeb\xban\xce\xcf\xa7=(\
\x7f(\xf9\x9fS2\x8b\xc5\x82\xb7o\xdf\x16\x07\x8b\x8f\x9e\xaav\x19\xef\x1f.\
\xad?\x94{\xae1\x9f\xcf\x0f\x13\xff\x1cx\xb6\x83\xd3\xc5\xadqqk\x9e\xad\xf8\
\xff\x00.6\xa4\xac;\xb4\x08\x88\x00\x00\x00\x00IEND\xaeB`\x82'
def getupdate02Bitmap():
return wxBitmapFromImage(getupdate02Image())
def getupdate02Image():
stream = cStringIO.StringIO(getupdate02Data())
return wxImageFromStream(stream)
index.append('update02')
catalog['update02'] = ImageClass()
catalog['update02'].getData = getupdate02Data
catalog['update02'].getImage = getupdate02Image
catalog['update02'].getBitmap = getupdate02Bitmap
| gpl-2.0 |
MPIBGC-TEE/CompartmentalSystems | notebooks/ELM_dask.py | 1 | 1730 | #from dask.distributed import Client
import xarray as xr
import numpy as np
import pandas as pd
import importlib
import ELMlib
importlib.reload(ELMlib)
#client = Client(n_workers=2, threads_per_worker=2, memory_limit='1GB')
#client
#ds = xr.open_dataset('../Data/14C_spinup_holger_fire.2x2_small.nc')
from netCDF4 import Dataset
ds = Dataset('../Data/14C_spinup_holger_fire.2x2_small.nc')
#lat, lon = ds.coords['lat'], ds.coords['lon']
lat, lon = ds['lat'][:], ds['lon'][:]
lat_indices, lon_indices = np.meshgrid(
range(len(lat)),
range(len(lon)),
indexing='ij'
)
lats, lons = np.meshgrid(lat, lon, indexing='ij')
df_pd = pd.DataFrame(
{
'cell_nr': range(len(lat)*len(lon)),
'lat_index': lat_indices.flatten(),
'lon_index': lon_indices.flatten(),
'lat': lats.flatten(),
'lon': lons.flatten()
}
)
import dask.array as da
import dask.dataframe as dask_df
df_dask = dask_df.from_pandas(df_pd, npartitions=4)
df_dask
parameter_set = ELMlib.load_parameter_set(
ds_filename = '../Data/14C_spinup_holger_fire.2x2_small.nc',
time_shift = -198*365,
nstep = 10
)
def func(line):
location_dict = {
'cell_nr': int(line.cell_nr),
'lat_index': int(line.lat_index),
'lon_index': int(line.lon_index)
}
cell_nr, log, xs_12C_data, us_12C_data, rs_12C_data= ELMlib.load_model_12C_data(parameter_set, location_dict)
return cell_nr, log, xs_12C_data, us_12C_data, rs_12C_data
df_dask_2 = df_dask.apply(func, axis=1, meta=('A', 'object'))
df_dask_2.compute()
type(df_dask_2)
df_dask_2
list(df_dask_2)
pd.DataFrame(list(df_dask_2), columns=('cell_nr', 'log', 'xs_12C_data', 'us_12C_data', 'rs_12C_data'))
| mit |
gusgollings/scbdo | scbdo/tod.py | 1 | 16340 |
# SCBdo : DISC Track Racing Management Software
# Copyright (C) 2010 Nathan Fraser
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Time of Day (ToD) functions and abstract class.
This module defines the tod class and some utility functions.
ToD records are used to establish net times
Time of Day quantities are stored as a positive decimal number of
seconds in the range [0, 86400). The overflow value '24:00:00'
(equivalent to 86400 seconds) is forbidden and its presence
flags a programming error or an error in the attached timing device.
All time of day and net time values must be less than 24hrs.
'rounding' is by truncation toward zero. If a negative value is
specified by manually setting the timeval attribute, the resulting
timestring may not be what is expected. Arithmetic will still be
exact, however a negative result may not display as expected.
A time of day object includes:
- timeval : decimal tod in seconds (eg 1.2345, 4506.9023, etc)
- index : 4 character identifier string (eg '1' to '9999')
- chan : 3 character channel string from source (eg 'C0', 'C2M', etc)
- refid : string reference id, used for RFID tag events (eg '75ae7f')
Supported ToD String Patterns:
[[HH:]MM:]SS[.dcmz] Canonical
[[HH-]MM-]SS[.dcmz] Keypad
[[HHh]MM:]SS[.dcmz] Result
Arithmetic operations on ToD types:
The only supported arithmetic operations on ToD objects are
subtraction and addition. Subtraction obtains a net time from
two time of day values, while addition obtains a time of day
from a time of day and a net time. These conventions are assumed
and have the following peculiarities:
Given two tod objects a and b, the statement:
c = a - b
Creates a "net time" c such that:
c.timeval == (a.timeval - b.timeval) if a.timeval >= b.timeval
OR
c.timeval == (86400 - b.timeval + a.timeval) if a.timeval < b.timeval
'c' is a new tod object, whose timeval is the exact number of
seconds between tod 'b' and tod 'a'. 'b' is always
assumed to have happened before 'a', and so if the value of
'a.timeval' is less than the value of 'b.timeval', overflow
is assumed.
Given a tod object a and a "net time" b, the statement:
c = a + b
Creates a new tod c such that:
c.timeval == (a.timeval + b.timeval) % 86400
'c' is a new tod object, whose timeval is exactly the number of
seconds in net time 'b' after tod 'a'.
In both cases, the index chan and refid are set on 'c' as follows:
index = ''
chan = 'NET'
refid = ''
Normalised tod strings are printed as on the Timy receipt:
'NNNN CCC HH:MM:SS.dcmz REFID'
Where 'NNNN' is the index, 'CCC' is the chan and the time is
printed, space padded, according to the requested precision.
"""
import decimal # ToD internal representation
import re # used to scan ToD string: HH:MM:SS.dcmz
import time
QUANT_5PLACES = decimal.Decimal('0.00001') # does not work with Timy printer
QUANT_4PLACES = decimal.Decimal('0.0001')
QUANT_3PLACES = decimal.Decimal('0.001')
QUANT_2PLACES = decimal.Decimal('0.01')
QUANT_1PLACE = decimal.Decimal('0.1')
QUANT_0PLACES = decimal.Decimal('1')
QUANT = [QUANT_0PLACES, QUANT_1PLACE, QUANT_2PLACES,
QUANT_3PLACES, QUANT_4PLACES, QUANT_5PLACES]
QUANT_FW = [2, 4, 5, 6, 7, 8]
QUANT_TWID = [8, 10, 11, 12, 13, 14]
QUANT_PAD = [' ', ' ', ' ', ' ', '', '']
TOD_RE=re.compile(r'^(?:(?:(\d{1,2})[h:-])?(\d{1,2})[:-])?(\d{1,2}(?:\.\d+)?)$')
def str2tod(timeval=''):
"""Return tod for given string without fail."""
ret = None
if timeval is not None and timeval != '':
try:
ret = tod(timeval)
except:
pass
return ret
def dec2str(dectod=None, places=4, zeros=False):
"""Return formatted string for given tod decimal value.
Convert the decimal number dectod to a time string with the
supplied number of decimal places.
Note: negative timevals match case one or three depending on
value of zeros flag, and are truncated toward zero.
Oversized timevals will grow in width
optional argument 'zeros' will use leading zero chars. eg:
'00h00:01.2345' zeros=True
'1.2345' zeros=False
"""
strtod = None
assert places >= 0 and places <= 5, 'places not in range [0, 5]'
if dectod is not None: # conditional here?
if zeros or dectod >= 3600: # NOTE: equal compares fine w/decimal
fmt = '{0}h{1:02}:{2:0{3}}' # 'HHhMM:SS.dcmz'
if zeros:
fmt = '{0:02}:{1:02}:{2:0{3}}' # '00h00:0S.dcmz'
strtod = fmt.format(int(dectod)//3600,
(int(dectod)%3600)//60,
dectod.quantize(QUANT[places],
rounding=decimal.ROUND_FLOOR)%60,
QUANT_FW[places])
elif dectod >= 60: # MM:SS.dcmz
strtod = '{0}:{1:0{2}}'.format(int(dectod)//60,
dectod.quantize(QUANT[places],
rounding=decimal.ROUND_FLOOR)%60,
QUANT_FW[places])
else: # SS.dcmz or -SSSSS.dcmz
strtod = '{0}'.format(dectod.quantize(QUANT[places],
rounding=decimal.ROUND_FLOOR))
return strtod
def str2dec(timestr=''):
"""Return decimal for given string.
Convert the time of day value represented by the string supplied
to a decimal number of seconds.
Attempts to match against the common patterns:
HHhMM:SS.dcmz Result style
HH:MM:SS.dcmz Canonical
HH-MM-SS.dcmz Keypad
In optional groups as follows:
[[HH:]MM:]SS[.dcmz]
NOTE: Now truncates all incoming times to 4 places to avoid
inconsistencies.
"""
dectod=None
timestr=timestr.strip()
if timestr == 'now':
ltoft = time.localtime().tm_isdst * 3600 # DST Hack
dectod = decimal.Decimal(str(
(time.time() - (time.timezone - ltoft)) % 86400))
# !!ERROR!! 2038, UTC etc -> check def Unix time
else:
m = TOD_RE.match(timestr)
if m is not None:
dectod = decimal.Decimal(m.group(3))
dectod += decimal.Decimal(m.group(2) or 0) * 60
dectod += decimal.Decimal(m.group(1) or 0) * 3600
else:
# last attempt - try and handle as other decimal constructor
dectod = decimal.Decimal(timestr)
return dectod.quantize(QUANT[4], rounding=decimal.ROUND_FLOOR)
class tod(object):
"""A class for representing time of day and RFID events."""
def __init__(self, timeval=0, index='', chan='', refid=''):
"""Construct tod object.
Keyword arguments:
timeval -- time value to be represented (string/int/decimal/tod)
index -- tod index identifier string
chan -- channel string
refed -- a reference identifier string
"""
self.index = str(index)[0:4]
self.chan = str(chan)[0:3]
self.refid = refid
if type(timeval) is str:
self.timeval = str2dec(timeval)
elif type(timeval) is tod:
self.timeval = timeval.timeval
else:
self.timeval = decimal.Decimal(timeval)
assert self.timeval >= 0 and self.timeval < 86400, 'timeval not in range [0, 86400)'
def __str__(self):
"""Return a normalised tod string."""
return self.refstr()
def __repr__(self):
"""Return object representation string."""
return "tod('{0}', '{1}', '{2}', '{3}')".format(str(self.timeval),
str(self.index), str(self.chan), str(self.refid))
def refstr(self, places=4):
"""Return 'normalised' string form.
'NNNN CCC HHhMM:SS.dcmz REFID'
to the specified number of decimal places in the set
[0, 1, 2, 3, 4, 5]
"""
return '{0: >4} {1: <3} {2} {3}'.format(self.index, self.chan,
self.timestr(places), self.refid)
def truncate(self, places=4):
"""Return a new ToD object with a truncated time value."""
return tod(timeval=self.timeval.quantize(QUANT[places],
rounding=decimal.ROUND_FLOOR), index='', chan='ToD', refid='')
def as_hours(self, places=0):
"""Return the tod value in hours, truncated to the desired places."""
return (self.timeval / 3600).quantize(QUANT[places],
rounding=decimal.ROUND_FLOOR)
def as_seconds(self, places=0):
"""Return the tod value in seconds, truncated to the desired places."""
return self.timeval.quantize(QUANT[places],
rounding=decimal.ROUND_FLOOR)
def as_minutes(self, places=0):
"""Return the tod value in minutes, truncated to the desired places."""
return (self.timeval / 60).quantize(QUANT[places],
rounding=decimal.ROUND_FLOOR)
def timestr(self, places=4, zeros=False):
"""Return time string component of the tod, whitespace padded."""
return '{0: >{1}}{2}'.format(dec2str(self.timeval, places, zeros),
QUANT_TWID[places], QUANT_PAD[places])
def rawtime(self, places=4, zeros=False):
"""Return time string component of the tod, without padding."""
return dec2str(self.timeval, places, zeros)
def speedstr(self, dist=200):
"""Return an average speed estimate for the provided distance."""
if self.timeval == 0:
return '---.--- km/h'
return '{0:7.3f} km/h'.format(3.6 * float(dist) / float(self.timeval))
def copy(self):
"""Return a copy of the supplied tod."""
return tod(self.timeval, self.index, self.chan, self.refid)
def __lt__(self, other):
if type(other) is tod:
return self.timeval < other.timeval
else:
return self.timeval < other
def __le__(self, other):
if type(other) is tod:
return self.timeval <= other.timeval
else:
return self.timeval <= other
def __eq__(self, other):
if type(other) is tod:
return self.timeval == other.timeval
else:
return self.timeval == other
def __ne__(self, other):
if type(other) is tod:
return self.timeval != other.timeval
else:
return self.timeval != other
def __gt__(self, other):
if type(other) is tod:
return self.timeval > other.timeval
else:
return self.timeval > other
def __ge__(self, other):
if type(other) is tod:
return self.timeval >= other.timeval
else:
return self.timeval >= other
def __sub__(self, other):
"""Compute time of day subtraction and return a NET tod object.
NOTE: 'other' always happens _before_ self, so a smaller value
for self implies rollover of the clock. This mods all net
times by 24Hrs.
"""
if type(other) is tod:
oft = None
if self.timeval >= other.timeval:
oft = self.timeval - other.timeval
else:
oft = 86400 - other.timeval + self.timeval
return tod(timeval=oft, index='', chan='NET', refid='')
else:
raise TypeError('Cannot subtract {0} from tod.'.format(
str(type(other).__name__)))
def __add__(self, other):
"""Compute time of day addition and return a new tod object.
NOTE: 'other' is assumed to be a NET time interval. The returned
tod will have a timeval mod 86400.
"""
if type(other) is tod:
oft = (self.timeval + other.timeval) % 86400
return tod(timeval=oft, index='', chan='ToD', refid='')
else:
raise TypeError('Cannot add {0} to tod.'.format(
str(type(other).__name__)))
# ToD 'constants'
ZERO = tod()
MAX = tod('23h59:59.9999')
# Fake times for special cases
FAKETIMES = {
'catch':ZERO,
'max':MAX.copy(),
'caught':MAX.copy(),
'abort':MAX.copy(),
'dsq':MAX.copy(),
'dnf':MAX.copy(),
'dns':MAX.copy()}
extra = decimal.Decimal('0.00001')
cof = decimal.Decimal('0.00001')
for c in ['caught', 'abort', 'dsq', 'dnf', 'dns']:
FAKETIMES[c].timeval += cof
cof += extra
class todlist():
"""ToD list helper class for managing splits and ranks."""
def __init__(self, lbl=''):
self.__label = lbl
self.__store = []
def __iter__(self):
return self.__store.__iter__()
def __len__(self):
return len(self.__store)
def __getitem__(self, key):
return self.__store[key]
def rank(self, bib, series=''):
"""Return current 0-based rank for given bib."""
ret = None
i = 0
last = None
for lt in self.__store:
if last is not None:
if lt != last:
i += 1
if lt.refid == bib and lt.index == series:
ret = i
break
last = lt
return ret
def clear(self):
self.__store = []
def remove(self, bib, series=''):
i = 0
while i < len(self.__store):
if self.__store[i].refid == bib and self.__store[i].index == series:
del self.__store[i]
else:
i += 1
def insert(self, t, bib=None, series=''):
"""Insert t into ordered list."""
ret = None
if t in FAKETIMES: # re-assign a coded 'finish'
t = FAKETIMES[t]
if type(t) is tod:
if bib is None:
bib = t.index
rt = tod(timeval=t.timeval, chan=self.__label,
refid=bib, index=series)
last = None
i = 0
found = False
for lt in self.__store:
if rt < lt:
self.__store.insert(i, rt)
found = True
break
i += 1
if not found:
self.__store.append(rt)
if __name__ == "__main__":
srcs = ['1:23:45.6789', '1:23-45.6789', '1-23-45.6789',
'1:23:45', '1:23-45', '1-23-45',
'3:45.6789', '3-45.6789',
'3:45', '3-45',
'45.6789', '5.6',
'45',
1.4, float('1.4'), decimal.Decimal('1.4'), '1.4',
10123, float('10123'), decimal.Decimal('10123'), '10123',
10123.456, float('10123.456'),
decimal.Decimal('10123.456'), '10123.456',
'-10234', '87012', '0', '86400', '86399.9999',
'inf', 'nan', 'zero', 'now', '-inf',
tod(0, 'ZERO'), tod('now', 'NOW') ]
print ('1: Check Source Formats')
for src in srcs:
try:
print ('\t' + repr(src) + ' =>\t' + str(tod(src)) + '/' + str(str2tod(src)))
except Exception as e:
print ('\t' + repr(src) + ' =>\t' + str(e) + '/' + str(str2tod(src)))
print ('2: ToD Subtraction')
a = tod(0, '1', 'C0')
print ('\t a: '+ str(a))
b = tod('12.1234', '2', 'C1')
print ('\t b: '+ str(b))
print ('\t [b-a]: '+ str(b-a))
print ('\t [b+a]: '+ str(b+a))
print ('\t1/100s: '+ (b-a).refstr(2))
print ('\t1/100s: '+ (b+a).refstr(2))
print ('\t NET: '+ (b-a).timestr(2))
print ('\t ToD: '+ (b+a).timestr(2))
print ('\t [a-b]: '+ str(a-b))
print ('\t [a+b]: '+ str(a+b))
print ('\t1/100s: '+ (a-b).refstr(2))
print ('\t1/100s: '+ (a+b).refstr(2))
print ('3: Copy & Speedstr')
c = b.copy()
print ('\t c: '+ str(c))
print ('\t avg: '+ (b-a).speedstr())
| gpl-3.0 |
CraigHarris/gpdb | src/test/tinc/tincrepo/mpp/lib/mpp_tl.py | 9 | 1172 | """
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mpp.models.mpp_tc import _MPPMetaClassType
class MPPTestLib(object):
# MPPTestLib class is of type MPPMetaClassType
# MPPMetaClassType will take of reconfiguring the bases of all the derived classes that have product-specific hidden libraries
__metaclass__ = _MPPMetaClassType
def __init__(self):
self.make_me_product_agnostic()
super(MPPTestLib, self).__init__()
class __gpdbMPPTestLib__(MPPTestLib):
pass
class __hawqMPPTestLib__(MPPTestLib):
pass
| apache-2.0 |
minhphung171093/GreenERP_V7 | openerp/addons/account_voucher/report/account_voucher_print.py | 61 | 3718 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.report import report_sxw
from openerp.tools import amount_to_text_en
class report_voucher_print(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(report_voucher_print, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'get_title': self.get_title,
'get_lines':self.get_lines,
'get_on_account':self.get_on_account,
'convert':self.convert
})
def convert(self, amount, cur):
amt_en = amount_to_text_en.amount_to_text(amount, 'en', cur)
return amt_en
def get_lines(self, voucher):
result = []
if voucher.type in ('payment','receipt'):
type = voucher.line_ids and voucher.line_ids[0].type or False
for move in voucher.move_ids:
res = {}
amount = move.credit
if type == 'dr':
amount = move.debit
if amount > 0.0:
res['pname'] = move.partner_id.name
res['ref'] = 'Agst Ref'+" "+str(move.name)
res['aname'] = move.account_id.name
res['amount'] = amount
result.append(res)
else:
type = voucher.line_ids and voucher.line_ids[0].type or False
for move in voucher.move_ids:
res = {}
amount = move.credit
if type == 'dr':
amount = move.debit
if amount > 0.0:
res['pname'] = move.partner_id.name
res['ref'] = move.name
res['aname'] = move.account_id.name
res['amount'] = amount
result.append(res)
return result
def get_title(self, type):
title = ''
if type:
title = type[0].swapcase() + type[1:] + " Voucher"
return title
def get_on_account(self, voucher):
name = ""
if voucher.type == 'receipt':
name = "Received cash from "+str(voucher.partner_id.name)
elif voucher.type == 'payment':
name = "Payment from "+str(voucher.partner_id.name)
elif voucher.type == 'sale':
name = "Sale to "+str(voucher.partner_id.name)
elif voucher.type == 'purchase':
name = "Purchase from "+str(voucher.partner_id.name)
return name
report_sxw.report_sxw(
'report.voucher.print',
'account.voucher',
'addons/account_voucher/report/account_voucher_print.rml',
parser=report_voucher_print,header="external"
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
chouseknecht/ansible | lib/ansible/modules/cloud/amazon/ecs_service_info.py | 8 | 9276 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ecs_service_info
short_description: list or describe services in ecs
description:
- Lists or describes services in ecs.
- This module was called C(ecs_service_facts) before Ansible 2.9, returning C(ansible_facts).
Note that the M(ecs_service_info) module no longer returns C(ansible_facts)!
version_added: "2.1"
author:
- "Mark Chance (@Java1Guy)"
- "Darek Kaczynski (@kaczynskid)"
requirements: [ json, botocore, boto3 ]
options:
details:
description:
- Set this to true if you want detailed information about the services.
required: false
default: 'false'
type: bool
events:
description:
- Whether to return ECS service events. Only has an effect if C(details) is true.
required: false
default: 'true'
type: bool
version_added: "2.6"
cluster:
description:
- The cluster ARNS in which to list the services.
required: false
default: 'default'
service:
description:
- One or more services to get details for
required: false
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Basic listing example
- ecs_service_info:
cluster: test-cluster
service: console-test-service
details: true
register: output
# Basic listing example
- ecs_service_info:
cluster: test-cluster
register: output
'''
RETURN = '''
services:
description: When details is false, returns an array of service ARNs, otherwise an array of complex objects as described below.
returned: success
type: complex
contains:
clusterArn:
description: The Amazon Resource Name (ARN) of the of the cluster that hosts the service.
returned: always
type: str
desiredCount:
description: The desired number of instantiations of the task definition to keep running on the service.
returned: always
type: int
loadBalancers:
description: A list of load balancer objects
returned: always
type: complex
contains:
loadBalancerName:
description: the name
returned: always
type: str
containerName:
description: The name of the container to associate with the load balancer.
returned: always
type: str
containerPort:
description: The port on the container to associate with the load balancer.
returned: always
type: int
pendingCount:
description: The number of tasks in the cluster that are in the PENDING state.
returned: always
type: int
runningCount:
description: The number of tasks in the cluster that are in the RUNNING state.
returned: always
type: int
serviceArn:
description: The Amazon Resource Name (ARN) that identifies the service. The ARN contains the arn:aws:ecs namespace, followed by the region of the service, the AWS account ID of the service owner, the service namespace, and then the service name. For example, arn:aws:ecs:region :012345678910 :service/my-service .
returned: always
type: str
serviceName:
description: A user-generated string used to identify the service
returned: always
type: str
status:
description: The valid values are ACTIVE, DRAINING, or INACTIVE.
returned: always
type: str
taskDefinition:
description: The ARN of a task definition to use for tasks in the service.
returned: always
type: str
deployments:
description: list of service deployments
returned: always
type: list of complex
events:
description: list of service events
returned: when events is true
type: list of complex
''' # NOQA
try:
import botocore
except ImportError:
pass # handled by AnsibleAWSModule
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import ec2_argument_spec, AWSRetry
class EcsServiceManager:
"""Handles ECS Services"""
def __init__(self, module):
self.module = module
self.ecs = module.client('ecs')
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def list_services_with_backoff(self, **kwargs):
paginator = self.ecs.get_paginator('list_services')
try:
return paginator.paginate(**kwargs).build_full_result()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'ClusterNotFoundException':
self.module.fail_json_aws(e, "Could not find cluster to list services")
else:
raise
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def describe_services_with_backoff(self, **kwargs):
return self.ecs.describe_services(**kwargs)
def list_services(self, cluster):
fn_args = dict()
if cluster and cluster is not None:
fn_args['cluster'] = cluster
try:
response = self.list_services_with_backoff(**fn_args)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self.module.fail_json_aws(e, msg="Couldn't list ECS services")
relevant_response = dict(services=response['serviceArns'])
return relevant_response
def describe_services(self, cluster, services):
fn_args = dict()
if cluster and cluster is not None:
fn_args['cluster'] = cluster
fn_args['services'] = services
try:
response = self.describe_services_with_backoff(**fn_args)
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self.module.fail_json_aws(e, msg="Couldn't describe ECS services")
running_services = [self.extract_service_from(service) for service in response.get('services', [])]
services_not_running = response.get('failures', [])
return running_services, services_not_running
def extract_service_from(self, service):
# some fields are datetime which is not JSON serializable
# make them strings
if 'deployments' in service:
for d in service['deployments']:
if 'createdAt' in d:
d['createdAt'] = str(d['createdAt'])
if 'updatedAt' in d:
d['updatedAt'] = str(d['updatedAt'])
if 'events' in service:
if not self.module.params['events']:
del service['events']
else:
for e in service['events']:
if 'createdAt' in e:
e['createdAt'] = str(e['createdAt'])
return service
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
""" https://stackoverflow.com/a/312464 """
for i in range(0, len(l), n):
yield l[i:i + n]
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
details=dict(type='bool', default=False),
events=dict(type='bool', default=True),
cluster=dict(),
service=dict(type='list')
))
module = AnsibleAWSModule(argument_spec=argument_spec, supports_check_mode=True)
is_old_facts = module._name == 'ecs_service_facts'
if is_old_facts:
module.deprecate("The 'ecs_service_facts' module has been renamed to 'ecs_service_info', "
"and the renamed one no longer returns ansible_facts", version='2.13')
show_details = module.params.get('details')
task_mgr = EcsServiceManager(module)
if show_details:
if module.params['service']:
services = module.params['service']
else:
services = task_mgr.list_services(module.params['cluster'])['services']
ecs_info = dict(services=[], services_not_running=[])
for chunk in chunks(services, 10):
running_services, services_not_running = task_mgr.describe_services(module.params['cluster'], chunk)
ecs_info['services'].extend(running_services)
ecs_info['services_not_running'].extend(services_not_running)
else:
ecs_info = task_mgr.list_services(module.params['cluster'])
if is_old_facts:
module.exit_json(changed=False, ansible_facts=ecs_info, **ecs_info)
else:
module.exit_json(changed=False, **ecs_info)
if __name__ == '__main__':
main()
| gpl-3.0 |
SteveHNH/ansible | lib/ansible/modules/packaging/os/portinstall.py | 29 | 6800 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, berenddeboer
# Written by berenddeboer <berend@pobox.com>
# Based on pkgng module written by bleader <bleader at ratonland.org>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: portinstall
short_description: Installing packages from FreeBSD's ports system
description:
- Manage packages for FreeBSD using 'portinstall'.
version_added: "1.3"
options:
name:
description:
- name of package to install/remove
required: true
state:
description:
- state of the package
choices: [ 'present', 'absent' ]
required: false
default: present
use_packages:
description:
- use packages instead of ports whenever available
choices: [ 'yes', 'no' ]
required: false
default: yes
author: "berenddeboer (@berenddeboer)"
'''
EXAMPLES = '''
# Install package foo
- portinstall:
name: foo
state: present
# Install package security/cyrus-sasl2-saslauthd
- portinstall:
name: security/cyrus-sasl2-saslauthd
state: present
# Remove packages foo and bar
- portinstall:
name: foo,bar
state: absent
'''
import os
import re
import sys
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import shlex_quote
def query_package(module, name):
pkg_info_path = module.get_bin_path('pkg_info', False)
# Assume that if we have pkg_info, we haven't upgraded to pkgng
if pkg_info_path:
pkgng = False
pkg_glob_path = module.get_bin_path('pkg_glob', True)
rc, out, err = module.run_command("%s -e `pkg_glob %s`" % (pkg_info_path, shlex_quote(name)), use_unsafe_shell=True)
else:
pkgng = True
pkg_info_path = module.get_bin_path('pkg', True)
pkg_info_path = pkg_info_path + " info"
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name))
found = rc == 0
if not found:
# databases/mysql55-client installs as mysql-client, so try solving
# that the ugly way. Pity FreeBSD doesn't have a fool proof way of checking
# some package is installed
name_without_digits = re.sub('[0-9]', '', name)
if name != name_without_digits:
if pkgng:
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))
else:
rc, out, err = module.run_command("%s %s" % (pkg_info_path, name_without_digits))
found = rc == 0
return found
def matching_packages(module, name):
ports_glob_path = module.get_bin_path('ports_glob', True)
rc, out, err = module.run_command("%s %s" % (ports_glob_path, name))
# counts the number of packages found
occurrences = out.count('\n')
if occurrences == 0:
name_without_digits = re.sub('[0-9]', '', name)
if name != name_without_digits:
rc, out, err = module.run_command("%s %s" % (ports_glob_path, name_without_digits))
occurrences = out.count('\n')
return occurrences
def remove_packages(module, packages):
remove_c = 0
pkg_glob_path = module.get_bin_path('pkg_glob', True)
# If pkg_delete not found, we assume pkgng
pkg_delete_path = module.get_bin_path('pkg_delete', False)
if not pkg_delete_path:
pkg_delete_path = module.get_bin_path('pkg', True)
pkg_delete_path = pkg_delete_path + " delete -y"
# Using a for loop in case of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, package):
continue
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path, shlex_quote(package)), use_unsafe_shell=True)
if query_package(module, package):
name_without_digits = re.sub('[0-9]', '', package)
rc, out, err = module.run_command("%s `%s %s`" % (pkg_delete_path, pkg_glob_path,
shlex_quote(name_without_digits)),
use_unsafe_shell=True)
if query_package(module, package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, packages, use_packages):
install_c = 0
# If portinstall not found, automagically install
portinstall_path = module.get_bin_path('portinstall', False)
if not portinstall_path:
pkg_path = module.get_bin_path('pkg', False)
if pkg_path:
module.run_command("pkg install -y portupgrade")
portinstall_path = module.get_bin_path('portinstall', True)
if use_packages == "yes":
portinstall_params="--use-packages"
else:
portinstall_params=""
for package in packages:
if query_package(module, package):
continue
# TODO: check how many match
matches = matching_packages(module, package)
if matches == 1:
rc, out, err = module.run_command("%s --batch %s %s" % (portinstall_path, portinstall_params, package))
if not query_package(module, package):
module.fail_json(msg="failed to install %s: %s" % (package, out))
elif matches == 0:
module.fail_json(msg="no matches for package %s" % (package))
else:
module.fail_json(msg="%s matches found for package name %s" % (matches, package))
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)" % (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default="present", choices=["present","absent"]),
name = dict(aliases=["pkg"], required=True),
use_packages = dict(type='bool', default='yes')))
p = module.params
pkgs = p["name"].split(",")
if p["state"] == "present":
install_packages(module, pkgs, p["use_packages"])
elif p["state"] == "absent":
remove_packages(module, pkgs)
if __name__ == '__main__':
main()
| gpl-3.0 |
jgeskens/django | tests/generic_inline_admin/admin.py | 150 | 1100 | from __future__ import absolute_import
from django.contrib import admin
from django.contrib.contenttypes import generic
from .models import (Media, PhoneNumber, Episode, EpisodeExtra, Contact,
Category, EpisodePermanent, EpisodeMaxNum)
site = admin.AdminSite(name="admin")
class MediaInline(generic.GenericTabularInline):
model = Media
class EpisodeAdmin(admin.ModelAdmin):
inlines = [
MediaInline,
]
class MediaExtraInline(generic.GenericTabularInline):
model = Media
extra = 0
class MediaMaxNumInline(generic.GenericTabularInline):
model = Media
extra = 5
max_num = 2
class PhoneNumberInline(generic.GenericTabularInline):
model = PhoneNumber
class MediaPermanentInline(generic.GenericTabularInline):
model = Media
can_delete = False
site.register(Episode, EpisodeAdmin)
site.register(EpisodeExtra, inlines=[MediaExtraInline])
site.register(EpisodeMaxNum, inlines=[MediaMaxNumInline])
site.register(Contact, inlines=[PhoneNumberInline])
site.register(Category)
site.register(EpisodePermanent, inlines=[MediaPermanentInline])
| bsd-3-clause |
piotroxp/scibibscan | scib/lib/python3.5/site-packages/astropy/coordinates/baseframe.py | 1 | 45786 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Framework and base classes for coordinate frames/"low-level" coordinate
classes.
"""
from __future__ import (absolute_import, unicode_literals, division,
print_function)
# Standard library
import inspect
import warnings
from copy import deepcopy
from collections import namedtuple
# Dependencies
import numpy as np
# Project
from ..utils.compat.misc import override__dir__
from ..extern import six
from ..utils.exceptions import AstropyDeprecationWarning, AstropyWarning
from .. import units as u
from ..utils import OrderedDict
from .transformations import TransformGraph
from .representation import (BaseRepresentation, CartesianRepresentation,
SphericalRepresentation,
UnitSphericalRepresentation,
REPRESENTATION_CLASSES)
__all__ = ['BaseCoordinateFrame', 'frame_transform_graph', 'GenericFrame',
'FrameAttribute', 'TimeFrameAttribute', 'QuantityFrameAttribute',
'EarthLocationAttribute', 'RepresentationMapping']
# the graph used for all transformations between frames
frame_transform_graph = TransformGraph()
def _get_repr_cls(value):
"""
Return a valid representation class from ``value`` or raise exception.
"""
if value in REPRESENTATION_CLASSES:
value = REPRESENTATION_CLASSES[value]
try:
# value might not be a class, so use try
assert issubclass(value, BaseRepresentation)
except (TypeError, AssertionError):
raise ValueError(
'Representation is {0!r} but must be a BaseRepresentation class '
'or one of the string aliases {1}'.format(
value, list(REPRESENTATION_CLASSES)))
return value
class FrameMeta(type):
def __new__(mcls, name, bases, members):
if 'default_representation' in members:
default_repr = members.pop('default_representation')
found_default_repr = True
else:
default_repr = None
found_default_repr = False
if 'frame_specific_representation_info' in members:
repr_info = members.pop('frame_specific_representation_info')
found_repr_info = True
else:
repr_info = None
found_repr_info = False
# somewhat hacky, but this is the best way to get the MRO according to
# https://mail.python.org/pipermail/python-list/2002-December/167861.html
tmp_cls = super(FrameMeta, mcls).__new__(mcls, name, bases, members)
# now look through the whole MRO for the class attributes, raw for
# frame_attr_names, and leading underscore for others
for m in (c.__dict__ for c in tmp_cls.__mro__):
if not found_default_repr and '_default_representation' in m:
default_repr = m['_default_representation']
found_default_repr = True
if (not found_repr_info and
'_frame_specific_representation_info' in m):
repr_info = m['_frame_specific_representation_info']
found_repr_info = True
if found_default_repr and found_repr_info:
break
else:
raise ValueError(
'Could not find all expected BaseCoordinateFrame class '
'attributes. Are you mis-using FrameMeta?')
# Make read-only properties for the frame class attributes that should
# be read-only to make them immutable after creation.
# We copy attributes instead of linking to make sure there's no
# accidental cross-talk between classes
mcls.readonly_prop_factory(members, 'default_representation',
default_repr)
mcls.readonly_prop_factory(members,
'frame_specific_representation_info',
deepcopy(repr_info))
# now set the frame name as lower-case class name, if it isn't explicit
if 'name' not in members:
members['name'] = name.lower()
return super(FrameMeta, mcls).__new__(mcls, name, bases, members)
@staticmethod
def readonly_prop_factory(members, attr, value):
private_attr = '_' + attr
def getter(self):
return getattr(self, private_attr)
members[private_attr] = value
members[attr] = property(getter)
class FrameAttribute(object):
"""A non-mutable data descriptor to hold a frame attribute.
This class must be used to define frame attributes (e.g. ``equinox`` or
``obstime``) that are included in a frame class definition.
Examples
--------
The `~astropy.coordinates.FK4` class uses the following class attributes::
class FK4(BaseCoordinateFrame):
equinox = TimeFrameAttribute(default=_EQUINOX_B1950)
obstime = TimeFrameAttribute(default=None,
secondary_attribute='equinox')
This means that ``equinox`` and ``obstime`` are available to be set as
keyword arguments when creating an ``FK4`` class instance and are then
accessible as instance attributes. The instance value for the attribute
must be stored in ``'_' + <attribute_name>`` by the frame ``__init__``
method.
Note in this example that ``equinox`` and ``obstime`` are time attributes
and use the ``TimeAttributeFrame`` class. This subclass overrides the
``convert_input`` method to validate and convert inputs into a ``Time``
object.
Parameters
----------
default : object
Default value for the attribute if not provided
secondary_attribute : str
Name of a secondary instance attribute which supplies the value if
``default is None`` and no value was supplied during initialization.
"""
_nextid = 1
"""
Used to ascribe some ordering to FrameAttribute instances so that the
order they were assigned in a class body can be determined.
"""
def __init__(self, default=None, secondary_attribute=''):
self.default = default
self.secondary_attribute = secondary_attribute
# Use FrameAttribute._nextid explicitly so that subclasses of
# FrameAttribute use the same counter
self._order = FrameAttribute._nextid
FrameAttribute._nextid += 1
def convert_input(self, value):
"""
Validate the input ``value`` and convert to expected attribute class.
The base method here does nothing, but subclasses can implement this
as needed. The method should catch any internal exceptions and raise
ValueError with an informative message.
The method returns the validated input along with a boolean that
indicates whether the input value was actually converted. If the input
value was already the correct type then the ``converted`` return value
should be ``False``.
Parameters
----------
value : object
Input value to be converted.
Returns
-------
output_value
The ``value`` converted to the correct type (or just ``value`` if
``converted`` is False)
converted : bool
True if the conversion was actually performed, False otherwise.
Raises
------
ValueError
If the input is not valid for this attribute.
"""
return value, False
def __get__(self, instance, frame_cls=None):
if not hasattr(self, 'name'):
# Find attribute name of self by finding this object in the frame
# class which is requesting this attribute or any of its
# superclasses.
for mro_cls in frame_cls.__mro__:
for name, val in mro_cls.__dict__.items():
if val is self:
self.name = name
break
if hasattr(self, 'name'): # Can't nicely break out of two loops
break
else:
# Cannot think of a way to actually raise this exception. This
# instance containing this code must be in the class dict in
# order to get excecuted by attribute access. But leave this
# here just in case...
raise AttributeError(
'Unexpected inability to locate descriptor')
out = None
if instance is not None:
out = getattr(instance, '_' + self.name, None)
if out is None and self.default is None:
out = getattr(instance, self.secondary_attribute, None)
if out is None:
out = self.default
out, converted = self.convert_input(out)
if instance is not None and converted:
setattr(instance, '_' + self.name, out)
return out
def __set__(self, instance, val):
raise AttributeError('Cannot set frame attribute')
class TimeFrameAttribute(FrameAttribute):
"""
Frame attribute descriptor for quantities that are Time objects.
See the `~astropy.coordinates.FrameAttribute` API doc for further
information.
Parameters
----------
default : object
Default value for the attribute if not provided
secondary_attribute : str
Name of a secondary instance attribute which supplies the value if
``default is None`` and no value was supplied during initialization.
"""
def convert_input(self, value):
"""
Convert input value to a Time object and validate by running through
the Time constructor. Also check that the input was a scalar.
Parameters
----------
value : object
Input value to be converted.
Returns
-------
out, converted : correctly-typed object, boolean
Tuple consisting of the correctly-typed object and a boolean which
indicates if conversion was actually performed.
Raises
------
ValueError
If the input is not valid for this attribute.
"""
from ..time import Time
if value is None:
return None, False
if isinstance(value, Time):
out = value
converted = False
else:
try:
out = Time(value)
except Exception as err:
raise ValueError(
'Invalid time input {0}={1!r}\n{2}'.format(self.name,
value, err))
converted = True
return out, converted
class QuantityFrameAttribute(FrameAttribute):
"""
A frame attribute that is a quantity with specified units and shape
(optionally).
Parameters
----------
default : object
Default value for the attribute if not provided
secondary_attribute : str
Name of a secondary instance attribute which supplies the value if
``default is None`` and no value was supplied during initialization.
unit : unit object or None
Name of a unit that the input will be converted into. If None, no
unit-checking or conversion is performed
shape : tuple or None
If given, specifies the shape the attribute must be
"""
def __init__(self, default=None, secondary_attribute='', unit=None, shape=None):
super(QuantityFrameAttribute, self).__init__(default, secondary_attribute)
self.unit = unit
self.shape = shape
def convert_input(self, value):
"""
Checks that the input is a Quantity with the necessary units (or the
special value ``0``).
Parameters
----------
value : object
Input value to be converted.
Returns
-------
out, converted : correctly-typed object, boolean
Tuple consisting of the correctly-typed object and a boolean which
indicates if conversion was actually performed.
Raises
------
ValueError
If the input is not valid for this attribute.
"""
if np.all(value == 0) and self.unit is not None and self.unit is not None:
return u.Quantity(np.zeros(self.shape), self.unit), True
else:
converted = True
if not (hasattr(value, 'unit') ):
raise TypeError('Tried to set a QuantityFrameAttribute with '
'something that does not have a unit.')
oldvalue = value
value = u.Quantity(oldvalue, copy=False).to(self.unit)
if self.shape is not None and value.shape != self.shape:
raise ValueError('The provided value has shape "{0}", but '
'should have shape "{1}"'.format(value.shape,
self.shape))
if (oldvalue.unit == value.unit and hasattr(oldvalue, 'value') and
np.all(oldvalue.value == value.value)):
converted = False
return value, converted
class EarthLocationAttribute(FrameAttribute):
"""
A frame attribute that can act as a `~astropy.coordinates.EarthLocation`.
It can be created as anything that can be transformed to the
`~astropy.coordinates.ITRS` frame, but always presents as an `EarthLocation`
when accessed after creation.
Parameters
----------
default : object
Default value for the attribute if not provided
secondary_attribute : str
Name of a secondary instance attribute which supplies the value if
``default is None`` and no value was supplied during initialization.
"""
def convert_input(self, value):
"""
Checks that the input is a Quantity with the necessary units (or the
special value ``0``).
Parameters
----------
value : object
Input value to be converted.
Returns
-------
out, converted : correctly-typed object, boolean
Tuple consisting of the correctly-typed object and a boolean which
indicates if conversion was actually performed.
Raises
------
ValueError
If the input is not valid for this attribute.
"""
if value is None:
return None, False
elif isinstance(value, EarthLocation):
return value, False
else:
#we have to do the import here because of some tricky circular deps
from .builtin_frames import ITRS
if not hasattr(value, 'transform_to'):
raise ValueError('"{0}" was passed into an '
'EarthLocationAttribute, but it does not have '
'"transform_to" method'.format(value))
itrsobj = value.transform_to(ITRS)
return itrsobj.earth_location, True
_RepresentationMappingBase = \
namedtuple('RepresentationMapping',
('reprname', 'framename', 'defaultunit'))
class RepresentationMapping(_RepresentationMappingBase):
"""
This `~collections.namedtuple` is used with the
``frame_specific_representation_info`` attribute to tell frames what
attribute names (and default units) to use for a particular representation.
``reprname`` and ``framename`` should be strings, while ``defaultunit`` can
be either an astropy unit, the string ``'recommended'`` (to use whatever
the representation's ``recommended_units`` is), or None (to indicate that
no unit mapping should be done).
"""
def __new__(cls, reprname, framename, defaultunit='recommended'):
# this trick just provides some defaults
return super(RepresentationMapping, cls).__new__(cls, reprname,
framename,
defaultunit)
@six.add_metaclass(FrameMeta)
class BaseCoordinateFrame(object):
"""
The base class for coordinate frames.
This class is intended to be subclassed to create instances of specific
systems. Subclasses can implement the following attributes:
* `default_representation`
A subclass of `~astropy.coordinates.BaseRepresentation` that will be
treated as the default representation of this frame. This is the
representation assumed by default when the frame is created.
* `~astropy.coordinates.FrameAttribute` class attributes
Frame attributes such as ``FK4.equinox`` or ``FK4.obstime`` are defined
using a descriptor class. See the narrative documentation or
built-in classes code for details.
* `frame_specific_representation_info`
A dictionary mapping the name or class of a representation to a list of
`~astropy.coordinates.RepresentationMapping` objects that tell what
names and default units should be used on this frame for the components
of that representation.
"""
default_representation = None
# specifies special names/units for representation attributes
frame_specific_representation_info = {}
# This __new__ provides for backward-compatibility with pre-0.4 API.
# TODO: remove in 1.0
def __new__(cls, *args, **kwargs):
# Only do backward-compatibility if frame is previously defined one
frame_name = cls.__name__.lower()
if frame_name not in ['altaz', 'fk4', 'fk4noeterms', 'fk5',
'galactic', 'icrs']:
return super(BaseCoordinateFrame, cls).__new__(cls)
use_skycoord = False
if (len(args) > 1 or (len(args) == 1 and
not isinstance(args[0], BaseRepresentation))):
for arg in args:
if (not isinstance(arg, u.Quantity)
and not isinstance(arg, BaseRepresentation)):
msg = ('Initializing frame classes like "{0}" using string '
'or other non-Quantity arguments is deprecated, and '
'will be removed in the next version of Astropy. '
'Instead, you probably want to use the SkyCoord '
'class with the "frame={1}" keyword, or if you '
'really want to use the low-level frame classes, '
'create it with an Angle or Quantity.')
warnings.warn(msg.format(cls.__name__,
cls.__name__.lower()),
AstropyDeprecationWarning)
use_skycoord = True
break
if 'unit' in kwargs and not use_skycoord:
warnings.warn(
"Initializing frames using the ``unit`` argument is "
"now deprecated. Use SkyCoord or pass Quantity "
"instances to frames instead.", AstropyDeprecationWarning)
use_skycoord = True
if not use_skycoord:
representation = kwargs.get('representation',
cls._default_representation)
representation = _get_repr_cls(representation)
repr_info = cls._get_representation_info()
for key in repr_info[representation]['names']:
if key in kwargs:
if not isinstance(kwargs[key], u.Quantity):
warnings.warn(
"Initializing frames using non-Quantity arguments "
"is now deprecated. Use SkyCoord or pass Quantity "
"instances instead.", AstropyDeprecationWarning)
use_skycoord = True
break
if use_skycoord:
kwargs['frame'] = frame_name
from .sky_coordinate import SkyCoord
return SkyCoord(*args, **kwargs)
else:
return super(BaseCoordinateFrame, cls).__new__(cls)
def __init__(self, *args, **kwargs):
self._attr_names_with_defaults = []
if 'representation' in kwargs:
self.representation = kwargs.pop('representation')
# if not set below, this is a frame with no data
representation_data = None
for fnm, fdefault in self.get_frame_attr_names().items():
# Read-only frame attributes are defined as FrameAttribue
# descriptors which are not settable, so set 'real' attributes as
# the name prefaced with an underscore.
if fnm in kwargs:
value = kwargs.pop(fnm)
setattr(self, '_' + fnm, value)
else:
setattr(self, '_' + fnm, fdefault)
self._attr_names_with_defaults.append(fnm)
# Validate input by getting the attribute here.
getattr(self, fnm)
pref_rep = self.representation
args = list(args) # need to be able to pop them
if (len(args) > 0) and (isinstance(args[0], BaseRepresentation) or
args[0] is None):
representation_data = args.pop(0)
if len(args) > 0:
raise TypeError(
'Cannot create a frame with both a representation and '
'other positional arguments')
elif self.representation:
repr_kwargs = {}
for nmkw, nmrep in self.representation_component_names.items():
if len(args) > 0:
#first gather up positional args
repr_kwargs[nmrep] = args.pop(0)
elif nmkw in kwargs:
repr_kwargs[nmrep] = kwargs.pop(nmkw)
#special-case the Spherical->UnitSpherical if no `distance`
#TODO: possibly generalize this somehow?
if repr_kwargs:
if repr_kwargs.get('distance', True) is None:
del repr_kwargs['distance']
if (issubclass(self.representation, SphericalRepresentation) and
'distance' not in repr_kwargs):
representation_data = UnitSphericalRepresentation(**repr_kwargs)
else:
representation_data = self.representation(**repr_kwargs)
if len(args) > 0:
raise TypeError(
'{0}.__init__ had {1} remaining unhandled arguments'.format(
self.__class__.__name__, len(args)))
if kwargs:
raise TypeError(
'Coordinate frame got unexpected keywords: {0}'.format(
list(kwargs)))
self._data = representation_data
# We do ``is not None`` because self._data might evaluate to false for
# empty arrays or data == 0
if self._data is not None:
self._rep_cache = dict()
self._rep_cache[self._data.__class__.__name__, False] = self._data
@property
def data(self):
"""
The coordinate data for this object. If this frame has no data, an
`~.exceptions.ValueError` will be raised. Use `has_data` to
check if data is present on this frame object.
"""
if self._data is None:
raise ValueError('The frame object "{0}" does not have associated '
'data'.format(repr(self)))
return self._data
@property
def has_data(self):
"""
True if this frame has `data`, False otherwise.
"""
return self._data is not None
def __len__(self):
return len(self.data)
def __nonzero__(self): # Py 2.x
return self.isscalar or len(self) != 0
def __bool__(self): # Py 3.x
return self.isscalar or len(self) != 0
@property
def shape(self):
return self.data.shape
@property
def isscalar(self):
return self.data.isscalar
@classmethod
def get_frame_attr_names(cls):
seen = set()
attributes = []
for mro_cls in cls.__mro__:
for name, val in mro_cls.__dict__.items():
if isinstance(val, FrameAttribute) and name not in seen:
seen.add(name)
# Add the sort order, name, and actual value of the frame
# attribute in question
attributes.append((val._order, name,
getattr(mro_cls, name)))
# Sort by the frame attribute order
attributes.sort(key=lambda a: a[0])
return OrderedDict((a[1], a[2]) for a in attributes)
@property
def representation(self):
"""
The representation of the data in this frame, as a class that is
subclassed from `~astropy.coordinates.BaseRepresentation`. Can
also be *set* using the string name of the representation.
"""
if not hasattr(self, '_representation'):
self._representation = self.default_representation
return self._representation
@representation.setter
def representation(self, value):
self._representation = _get_repr_cls(value)
@classmethod
def _get_representation_info(cls):
# This exists as a class method only to support handling frame inputs
# without units, which are deprecated and will be removed. This can be
# moved into the representation_info property at that time.
repr_attrs = {}
for repr_cls in REPRESENTATION_CLASSES.values():
repr_attrs[repr_cls] = {'names': [], 'units': []}
for c in repr_cls.attr_classes.keys():
repr_attrs[repr_cls]['names'].append(c)
rec_unit = repr_cls.recommended_units.get(c, None)
repr_attrs[repr_cls]['units'].append(rec_unit)
for repr_cls, mappings in cls._frame_specific_representation_info.items():
# keys may be a class object or a name
repr_cls = _get_repr_cls(repr_cls)
# take the 'names' and 'units' tuples from repr_attrs,
# and then use the RepresentationMapping objects
# to update as needed for this frame.
nms = repr_attrs[repr_cls]['names']
uns = repr_attrs[repr_cls]['units']
comptomap = dict([(m.reprname, m) for m in mappings])
for i, c in enumerate(repr_cls.attr_classes.keys()):
if c in comptomap:
mapp = comptomap[c]
nms[i] = mapp.framename
# need the isinstance because otherwise if it's a unit it
# will try to compare to the unit string representation
if not (isinstance(mapp.defaultunit, six.string_types) and
mapp.defaultunit == 'recommended'):
uns[i] = mapp.defaultunit
# else we just leave it as recommended_units says above
# Convert to tuples so that this can't mess with frame internals
repr_attrs[repr_cls]['names'] = tuple(nms)
repr_attrs[repr_cls]['units'] = tuple(uns)
return repr_attrs
@property
def representation_info(self):
"""
A dictionary with the information of what attribute names for this frame
apply to particular representations.
"""
return self._get_representation_info()
@property
def representation_component_names(self):
out = OrderedDict()
if self.representation is None:
return out
data_names = self.representation.attr_classes.keys()
repr_names = self.representation_info[self.representation]['names']
for repr_name, data_name in zip(repr_names, data_names):
out[repr_name] = data_name
return out
@property
def representation_component_units(self):
out = OrderedDict()
if self.representation is None:
return out
repr_attrs = self.representation_info[self.representation]
repr_names = repr_attrs['names']
repr_units = repr_attrs['units']
for repr_name, repr_unit in zip(repr_names, repr_units):
if repr_unit:
out[repr_name] = repr_unit
return out
def realize_frame(self, representation):
"""
Generates a new frame *with new data* from another frame (which may or
may not have data).
Parameters
----------
representation : BaseRepresentation
The representation to use as the data for the new frame.
Returns
-------
frameobj : same as this frame
A new object with the same frame attributes as this one, but
with the ``representation`` as the data.
"""
frattrs = dict([(attr, getattr(self, attr))
for attr in self.get_frame_attr_names()
if attr not in self._attr_names_with_defaults])
return self.__class__(representation, **frattrs)
def represent_as(self, new_representation, in_frame_units=False):
"""
Generate and return a new representation of this frame's `data`
as a Representation object.
Note: In order to make an in-place change of the representation
of a Frame or SkyCoord object, set the ``representation``
attribute of that object to the desired new representation.
Parameters
----------
new_representation : subclass of BaseRepresentation or string
The type of representation to generate. May be a *class*
(not an instance), or the string name of the representation
class.
in_frame_units : bool
Force the representation units to match the specified units
particular to this frame
Returns
-------
newrep : BaseRepresentation-derived object
A new representation object of this frame's `data`.
Raises
------
AttributeError
If this object had no `data`
Examples
--------
>>> from astropy import units as u
>>> from astropy.coordinates import SkyCoord, CartesianRepresentation
>>> coord = SkyCoord(0*u.deg, 0*u.deg)
>>> coord.represent_as(CartesianRepresentation)
<CartesianRepresentation (x, y, z) [dimensionless]
(1.0, 0.0, 0.0)>
>>> coord.representation = CartesianRepresentation
>>> coord
<SkyCoord (ICRS): (x, y, z) [dimensionless]
(1.0, 0.0, 0.0)>
"""
new_representation = _get_repr_cls(new_representation)
cached_repr = self._rep_cache.get((new_representation.__name__,
in_frame_units))
if not cached_repr:
data = self.data.represent_as(new_representation)
# If the new representation is known to this frame and has a defined
# set of names and units, then use that.
new_attrs = self.representation_info.get(new_representation)
if new_attrs and in_frame_units:
datakwargs = dict((comp, getattr(data, comp))
for comp in data.components)
for comp, new_attr_unit in zip(data.components, new_attrs['units']):
if new_attr_unit:
datakwargs[comp] = datakwargs[comp].to(new_attr_unit)
data = data.__class__(**datakwargs)
self._rep_cache[new_representation.__name__, in_frame_units] = data
return self._rep_cache[new_representation.__name__, in_frame_units]
def transform_to(self, new_frame):
"""
Transform this object's coordinate data to a new frame.
Parameters
----------
new_frame : class or frame object or SkyCoord object
The frame to transform this coordinate frame into.
Returns
-------
transframe
A new object with the coordinate data represented in the
``newframe`` system.
Raises
------
ValueError
If there is no possible transformation route.
"""
from .errors import ConvertError
if self._data is None:
raise ValueError('Cannot transform a frame with no data')
if inspect.isclass(new_frame):
#means use the defaults for this class
new_frame = new_frame()
if hasattr(new_frame, '_sky_coord_frame'):
# Input new_frame is not a frame instance or class and is most
# likely a SkyCoord object.
new_frame = new_frame._sky_coord_frame
trans = frame_transform_graph.get_transform(self.__class__,
new_frame.__class__)
if trans is None:
if new_frame is self.__class__:
# no special transform needed, but should update frame info
return new_frame.realize_frame(self.data)
msg = 'Cannot transform from {0} to {1}'
raise ConvertError(msg.format(self.__class__, new_frame.__class__))
return trans(self, new_frame)
def is_transformable_to(self, new_frame):
"""
Determines if this coordinate frame can be transformed to another
given frame.
Parameters
----------
new_frame : class or frame object
The proposed frame to transform into.
Returns
-------
transformable : bool or str
`True` if this can be transformed to ``new_frame``, `False` if
not, or the string 'same' if ``new_frame`` is the same system as
this object but no transformation is defined.
Notes
-----
A return value of 'same' means the transformation will work, but it will
just give back a copy of this object. The intended usage is::
if coord.is_transformable_to(some_unknown_frame):
coord2 = coord.transform_to(some_unknown_frame)
This will work even if ``some_unknown_frame`` turns out to be the same
frame class as ``coord``. This is intended for cases where the frame
is the same regardless of the frame attributes (e.g. ICRS), but be
aware that it *might* also indicate that someone forgot to define the
transformation between two objects of the same frame class but with
different attributes.
"""
new_frame_cls = new_frame if inspect.isclass(new_frame) else new_frame.__class__
trans = frame_transform_graph.get_transform(self.__class__, new_frame_cls)
if trans is None:
if new_frame_cls is self.__class__:
return 'same'
else:
return False
else:
return True
def is_frame_attr_default(self, attrnm):
"""
Determine whether or not a frame attribute has its value because it's
the default value, or because this frame was created with that value
explicitly requested.
Parameters
----------
attrnm : str
The name of the attribute to check.
Returns
-------
isdefault : bool
True if the attribute ``attrnm`` has its value by default, False if
it was specified at creation of this frame.
"""
return attrnm in self._attr_names_with_defaults
def is_equivalent_frame(self, other):
"""
Checks if this object is the same frame as the ``other`` object.
To be the same frame, two objects must be the same frame class and have
the same frame attributes. Note that it does *not* matter what, if any,
data either object has.
Parameters
----------
other : BaseCoordinateFrame
the other frame to check
Returns
-------
isequiv : bool
True if the frames are the same, False if not.
Raises
------
TypeError
If ``other`` isn't a `BaseCoordinateFrame` or subclass.
"""
if self.__class__ == other.__class__:
for frame_attr_name in self.get_frame_attr_names():
if getattr(self, frame_attr_name) != getattr(other, frame_attr_name):
return False
return True
elif not isinstance(other, BaseCoordinateFrame):
raise TypeError("Tried to do is_equivalent_frame on something that "
"isn't a frame")
else:
return False
def __repr__(self):
frameattrs = self._frame_attrs_repr()
data_repr = self._data_repr()
if frameattrs:
frameattrs = ' ({0})'.format(frameattrs)
if data_repr:
return '<{0} Coordinate{1}: {2}>'.format(self.__class__.__name__,
frameattrs, data_repr)
else:
return '<{0} Frame{1}>'.format(self.__class__.__name__,
frameattrs)
def _data_repr(self):
"""Returns a string representation of the coordinate data."""
if not self.has_data:
return ''
if self.representation:
if (issubclass(self.representation, SphericalRepresentation) and
isinstance(self.data, UnitSphericalRepresentation)):
data = self.represent_as(self.data.__class__,
in_frame_units=True)
else:
data = self.represent_as(self.representation,
in_frame_units=True)
data_repr = repr(data)
for nmpref, nmrepr in self.representation_component_names.items():
data_repr = data_repr.replace(nmrepr, nmpref)
else:
data = self.data
data_repr = repr(self.data)
if data_repr.startswith('<' + data.__class__.__name__):
# remove both the leading "<" and the space after the name, as well
# as the trailing ">"
data_repr = data_repr[(len(data.__class__.__name__) + 2):-1]
else:
data_repr = 'Data:\n' + data_repr
return data_repr
def _frame_attrs_repr(self):
"""
Returns a string representation of the frame's attributes, if any.
"""
return ', '.join([attrnm + '=' + str(getattr(self, attrnm))
for attrnm in self.get_frame_attr_names()])
def __getitem__(self, view):
if self.has_data:
out = self.realize_frame(self.data[view])
out.representation = self.representation
return out
else:
raise ValueError('Cannot index a frame with no data')
@override__dir__
def __dir__(self):
"""
Override the builtin `dir` behavior to include representation
names.
TODO: dynamic representation transforms (i.e. include cylindrical et al.).
"""
dir_values = set(self.representation_component_names)
return dir_values
def __getattr__(self, attr):
"""
Allow access to attributes defined in
``self.representation_component_names``.
TODO: dynamic representation transforms (i.e. include cylindrical et
al.).
"""
# attr == '_representation' is likely from the hasattr() test in the
# representation property which is used for
# self.representation_component_names.
#
# Prevent infinite recursion here.
if (attr == '_representation' or
attr not in self.representation_component_names):
raise AttributeError("'{0}' object has no attribute '{1}'"
.format(self.__class__.__name__, attr))
rep = self.represent_as(self.representation, in_frame_units=True)
val = getattr(rep, self.representation_component_names[attr])
return val
def __setattr__(self, attr, value):
repr_attr_names = []
if hasattr(self, 'representation_info'):
for representation_attr in self.representation_info.values():
repr_attr_names.extend(representation_attr['names'])
if attr in repr_attr_names:
raise AttributeError(
'Cannot set any frame attribute {0}'.format(attr))
else:
super(BaseCoordinateFrame, self).__setattr__(attr, value)
def separation(self, other):
"""
Computes on-sky separation between this coordinate and another.
Parameters
----------
other : `~astropy.coordinates.BaseCoordinateFrame`
The coordinate to get the separation to.
Returns
-------
sep : `~astropy.coordinates.Angle`
The on-sky separation between this and the ``other`` coordinate.
Notes
-----
The separation is calculated using the Vincenty formula, which
is stable at all locations, including poles and antipodes [1]_.
.. [1] http://en.wikipedia.org/wiki/Great-circle_distance
"""
from .angle_utilities import angular_separation
from .angles import Angle
self_unit_sph = self.represent_as(UnitSphericalRepresentation)
other_transformed = other.transform_to(self)
other_unit_sph = other_transformed.represent_as(UnitSphericalRepresentation)
# Get the separation as a Quantity, convert to Angle in degrees
sep = angular_separation(self_unit_sph.lon, self_unit_sph.lat,
other_unit_sph.lon, other_unit_sph.lat)
return Angle(sep, unit=u.degree)
def separation_3d(self, other):
"""
Computes three dimensional separation between this coordinate
and another.
Parameters
----------
other : `~astropy.coordinates.BaseCoordinateFrame`
The coordinate system to get the distance to.
Returns
-------
sep : `~astropy.coordinates.Distance`
The real-space distance between these two coordinates.
Raises
------
ValueError
If this or the other coordinate do not have distances.
"""
from .distances import Distance
if self.data.__class__ == UnitSphericalRepresentation:
raise ValueError('This object does not have a distance; cannot '
'compute 3d separation.')
# do this first just in case the conversion somehow creates a distance
other_in_self_system = other.transform_to(self)
if other_in_self_system.__class__ == UnitSphericalRepresentation:
raise ValueError('The other object does not have a distance; '
'cannot compute 3d separation.')
dx = self.cartesian.x - other_in_self_system.cartesian.x
dy = self.cartesian.y - other_in_self_system.cartesian.y
dz = self.cartesian.z - other_in_self_system.cartesian.z
distval = (dx.value ** 2 + dy.value ** 2 + dz.value ** 2) ** 0.5
return Distance(distval, dx.unit)
@property
def cartesian(self):
"""
Shorthand for a cartesian representation of the coordinates in this
object.
"""
# TODO: if representations are updated to use a full transform graph,
# the representation aliases should not be hard-coded like this
return self.represent_as(CartesianRepresentation, in_frame_units=True)
@property
def spherical(self):
"""
Shorthand for a spherical representation of the coordinates in this object.
"""
# TODO: if representations are updated to use a full transform graph,
# the representation aliases should not be hard-coded like this
return self.represent_as(SphericalRepresentation, in_frame_units=True)
class GenericFrame(BaseCoordinateFrame):
"""
A frame object that can't store data but can hold any arbitrary frame
attributes. Mostly useful as a utility for the high-level class to store
intermediate frame attributes.
Parameters
----------
frame_attrs : dict
A dictionary of attributes to be used as the frame attributes for this
frame.
"""
name = None # it's not a "real" frame so it doesn't have a name
def __init__(self, frame_attrs):
super(GenericFrame, self).__setattr__('_frame_attr_names', frame_attrs)
super(GenericFrame, self).__init__(None)
for attrnm, attrval in frame_attrs.items():
setattr(self, '_' + attrnm, attrval)
def get_frame_attr_names(self):
return self._frame_attr_names
def __getattr__(self, name):
if '_' + name in self.__dict__:
return getattr(self, '_' + name)
else:
raise AttributeError('no {0}'.format(name))
def __setattr__(self, name, value):
if name in self._frame_attr_names:
raise AttributeError("can't set frame attribute '{0}'".format(name))
else:
super(GenericFrame, self).__setattr__(name, value)
# doing this import at the bottom prevents a circular import issue that is
# otherwise present due to EarthLocation needing to import ITRS
from .earth import EarthLocation
| mit |
tiagochiavericosta/edx-platform | lms/djangoapps/courseware/tests/test_credit_requirements.py | 41 | 5422 | """
Tests for credit requirement display on the progress page.
"""
import datetime
from mock import patch
from pytz import UTC
from django.conf import settings
from django.core.urlresolvers import reverse
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from util.date_utils import get_time_display, DEFAULT_SHORT_DATE_FORMAT
from openedx.core.djangoapps.credit import api as credit_api
from openedx.core.djangoapps.credit.models import CreditCourse
@patch.dict(settings.FEATURES, {"ENABLE_CREDIT_ELIGIBILITY": True})
class ProgressPageCreditRequirementsTest(ModuleStoreTestCase):
"""
Tests for credit requirement display on the progress page.
"""
USERNAME = "bob"
PASSWORD = "test"
USER_FULL_NAME = "Bob"
MIN_GRADE_REQ_DISPLAY = "Final Grade Credit Requirement"
VERIFICATION_REQ_DISPLAY = "Midterm Exam Credit Requirement"
def setUp(self):
super(ProgressPageCreditRequirementsTest, self).setUp()
# Create a course and configure it as a credit course
self.course = CourseFactory.create()
CreditCourse.objects.create(course_key=self.course.id, enabled=True)
# Configure credit requirements (passing grade and in-course reverification)
credit_api.set_credit_requirements(
self.course.id,
[
{
"namespace": "grade",
"name": "grade",
"display_name": self.MIN_GRADE_REQ_DISPLAY,
"criteria": {
"min_grade": 0.8
}
},
{
"namespace": "reverification",
"name": "midterm",
"display_name": self.VERIFICATION_REQ_DISPLAY,
"criteria": {}
}
]
)
# Create a user and log in
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
self.user.profile.name = self.USER_FULL_NAME
self.user.profile.save()
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result, msg="Could not log in")
# Enroll the user in the course as "verified"
self.enrollment = CourseEnrollmentFactory(
user=self.user,
course_id=self.course.id,
mode="verified"
)
def test_credit_requirements_maybe_eligible(self):
# The user hasn't satisfied any of the credit requirements yet, but she
# also hasn't failed any.
response = self._get_progress_page()
# Expect that the requirements are displayed
self.assertContains(response, self.MIN_GRADE_REQ_DISPLAY)
self.assertContains(response, self.VERIFICATION_REQ_DISPLAY)
self.assertContains(response, "Upcoming")
self.assertContains(
response,
"{}, you have not yet met the requirements for credit".format(self.USER_FULL_NAME)
)
def test_credit_requirements_eligible(self):
# Mark the user as eligible for all requirements
credit_api.set_credit_requirement_status(
self.user.username, self.course.id,
"grade", "grade",
status="satisfied",
reason={"final_grade": 0.95}
)
credit_api.set_credit_requirement_status(
self.user.username, self.course.id,
"reverification", "midterm",
status="satisfied", reason={}
)
# Check the progress page display
response = self._get_progress_page()
self.assertContains(response, self.MIN_GRADE_REQ_DISPLAY)
self.assertContains(response, self.VERIFICATION_REQ_DISPLAY)
self.assertContains(
response,
"{}, you have met the requirements for credit in this course.".format(self.USER_FULL_NAME)
)
self.assertContains(response, "Verified on {date}".format(date=self._now_formatted_date()))
self.assertContains(response, "95%")
def test_credit_requirements_not_eligible(self):
# Mark the user as having failed both requirements
credit_api.set_credit_requirement_status(
self.user.username, self.course.id,
"reverification", "midterm",
status="failed", reason={}
)
# Check the progress page display
response = self._get_progress_page()
self.assertContains(response, self.MIN_GRADE_REQ_DISPLAY)
self.assertContains(response, self.VERIFICATION_REQ_DISPLAY)
self.assertContains(
response,
"{}, you are no longer eligible for credit in this course.".format(self.USER_FULL_NAME)
)
self.assertContains(response, "Verification Failed")
def _get_progress_page(self):
"""Load the progress page for the course the user is enrolled in. """
url = reverse("progress", kwargs={"course_id": unicode(self.course.id)})
return self.client.get(url)
def _now_formatted_date(self):
"""Retrieve the formatted current date. """
return get_time_display(
datetime.datetime.now(UTC),
DEFAULT_SHORT_DATE_FORMAT,
settings.TIME_ZONE
)
| agpl-3.0 |
Nephos/gitinspector | setup.py | 50 | 1900 | # coding: utf-8
#
# Copyright © 2013 Ejwa Software. All rights reserved.
#
# This file is part of gitinspector.
#
# gitinspector is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# gitinspector is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gitinspector. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
from gitinspector.version import __version__
from glob import glob
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "gitinspector",
version = __version__,
author = "Ejwa Software",
author_email = "gitinspector@ejwa.se",
description = ("A statistical analysis tool for git repositories."),
license = "GNU GPL v3",
keywords = "analysis analyzer git python statistics stats vc vcs timeline",
url = "http://gitinspector.googlecode.com",
long_description = read("DESCRIPTION.txt"),
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Topic :: Software Development :: Version Control",
"Topic :: Utilities"
],
packages = find_packages(exclude = ['tests']),
package_data = {"": ["html/*", "translations/*"]},
data_files = [("share/doc/gitinspector", glob("*.txt"))],
entry_points = {"console_scripts": ["gitinspector = gitinspector.gitinspector:main"]},
zip_safe = False
)
| gpl-3.0 |
code-sauce/tensorflow | tensorflow/python/kernel_tests/conv_ops_3d_test.py | 20 | 15925 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for 3d convolutional operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import math
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class Conv3DTest(test.TestCase):
def _VerifyValues(self, tensor_in_sizes, filter_in_sizes, stride, padding,
expected):
total_size_1 = 1
total_size_2 = 1
for s in tensor_in_sizes:
total_size_1 *= s
for s in filter_in_sizes:
total_size_2 *= s
if isinstance(stride, collections.Iterable):
strides = [1] + list(stride) + [1]
else:
strides = [1, stride, stride, stride, 1]
# Initializes the input tensor with array containing incrementing
# numbers from 1.
x1 = [f * 1.0 for f in range(1, total_size_1 + 1)]
x2 = [f * 1.0 for f in range(1, total_size_2 + 1)]
with self.test_session(use_gpu=True) as sess:
t1 = constant_op.constant(x1, shape=tensor_in_sizes)
t2 = constant_op.constant(x2, shape=filter_in_sizes)
conv = nn_ops.conv3d(t1, t2, strides, padding=padding)
value = sess.run(conv)
print("expected = ", expected)
print("actual = ", value)
self.assertArrayNear(expected, value.flatten(), 1e-5)
def testConv3D1x1x1Filter(self):
expected_output = [
30.0, 36.0, 42.0, 66.0, 81.0, 96.0, 102.0, 126.0, 150.0, 138.0, 171.0,
204.0, 174.0, 216.0, 258.0, 210.0, 261.0, 312.0
]
# These are equivalent to the Conv2D1x1 case.
self._VerifyValues(
tensor_in_sizes=[1, 2, 3, 1, 3],
filter_in_sizes=[1, 1, 1, 3, 3],
stride=1,
padding="VALID",
expected=expected_output)
self._VerifyValues(
tensor_in_sizes=[1, 2, 1, 3, 3],
filter_in_sizes=[1, 1, 1, 3, 3],
stride=1,
padding="VALID",
expected=expected_output)
self._VerifyValues(
tensor_in_sizes=[1, 1, 2, 3, 3],
filter_in_sizes=[1, 1, 1, 3, 3],
stride=1,
padding="VALID",
expected=expected_output)
# Expected values computed using scipy's correlate function.
def testConv3D2x2x2Filter(self):
expected_output = [
19554., 19962., 20370., 22110., 22590., 23070., 34890., 35730., 36570.,
37446., 38358., 39270., 50226., 51498., 52770., 52782., 54126., 55470.
]
# expected_shape = [1, 3, 1, 2, 5]
self._VerifyValues(
tensor_in_sizes=[1, 4, 2, 3, 3], # b, z, y, x, fin
filter_in_sizes=[2, 2, 2, 3, 3], # z, y, x, fin, fout
stride=1,
padding="VALID",
expected=expected_output)
def testConv3DStrides(self):
expected_output = [
102.,
151.,
172.,
193.,
214.,
235.,
142.,
438.,
592.,
613.,
634.,
655.,
676.,
394.,
774.,
1033.,
1054.,
1075.,
1096.,
1117.,
646.,
1894.,
2503.,
2524.,
2545.,
2566.,
2587.,
1486.,
2230.,
2944.,
2965.,
2986.,
3007.,
3028.,
1738.,
2566.,
3385.,
3406.,
3427.,
3448.,
3469.,
1990.,
3686.,
4855.,
4876.,
4897.,
4918.,
4939.,
2830.,
4022.,
5296.,
5317.,
5338.,
5359.,
5380.,
3082.,
4358.,
5737.,
5758.,
5779.,
5800.,
5821.,
3334.,
]
self._VerifyValues(
tensor_in_sizes=[1, 5, 8, 7, 1],
filter_in_sizes=[1, 2, 3, 1, 1],
stride=[2, 3, 1], # different stride for each spatial dimension
padding="SAME",
expected=expected_output)
def testConv3D2x2x2FilterStride2(self):
expected_output = [19554., 19962., 20370., 50226., 51498., 52770.]
self._VerifyValues(
tensor_in_sizes=[1, 4, 2, 3, 3],
filter_in_sizes=[2, 2, 2, 3, 3],
stride=2,
padding="VALID",
expected=expected_output)
def testConv3DStride3(self):
expected_output = [
36564., 38022., 39480., 37824., 39354., 40884., 39084., 40686., 42288.,
46644., 48678., 50712., 47904., 50010., 52116., 49164., 51342., 53520.,
107124., 112614., 118104., 108384., 113946., 119508., 109644., 115278.,
120912., 117204., 123270., 129336., 118464., 124602., 130740., 119724.,
125934., 132144.
]
self._VerifyValues(
tensor_in_sizes=[1, 6, 7, 8, 2],
filter_in_sizes=[3, 2, 1, 2, 3],
stride=3,
padding="VALID",
expected=expected_output)
def testConv3D2x2x2FilterStride2Same(self):
expected_output = [
19554., 19962., 20370., 10452., 10710., 10968., 50226., 51498., 52770.,
23844., 24534., 25224.
]
self._VerifyValues(
tensor_in_sizes=[1, 4, 2, 3, 3],
filter_in_sizes=[2, 2, 2, 3, 3],
stride=2,
padding="SAME",
expected=expected_output)
def testKernelSmallerThanStride(self):
expected_output = [1., 3., 7., 9., 19., 21., 25., 27.]
self._VerifyValues(
tensor_in_sizes=[1, 3, 3, 3, 1],
filter_in_sizes=[1, 1, 1, 1, 1],
stride=2,
padding="SAME",
expected=expected_output)
self._VerifyValues(
tensor_in_sizes=[1, 3, 3, 3, 1],
filter_in_sizes=[1, 1, 1, 1, 1],
stride=2,
padding="VALID",
expected=expected_output)
expected_output = [
1484., 1592., 770., 2240., 2348., 1106., 1149., 1191., 539., 6776.,
6884., 3122., 7532., 7640., 3458., 3207., 3249., 1421., 3005., 3035.,
1225., 3215., 3245., 1309., 1013., 1022., 343.
]
self._VerifyValues(
tensor_in_sizes=[1, 7, 7, 7, 1],
filter_in_sizes=[2, 2, 2, 1, 1],
stride=3,
padding="SAME",
expected=expected_output)
expected_output = [1484., 1592., 2240., 2348., 6776., 6884., 7532., 7640.]
self._VerifyValues(
tensor_in_sizes=[1, 7, 7, 7, 1],
filter_in_sizes=[2, 2, 2, 1, 1],
stride=3,
padding="VALID",
expected=expected_output)
def testKernelSizeMatchesInputSize(self):
self._VerifyValues(
tensor_in_sizes=[1, 2, 1, 2, 1],
filter_in_sizes=[2, 1, 2, 1, 2],
stride=1,
padding="VALID",
expected=[50, 60])
def ConstructAndTestGradient(self, batch, input_planes, input_rows,
input_cols, filter_planes, filter_rows,
filter_cols, in_depth, out_depth, stride,
padding, test_input):
input_shape = [batch, input_planes, input_rows, input_cols, in_depth]
filter_shape = [
filter_planes, filter_rows, filter_cols, in_depth, out_depth
]
if isinstance(stride, collections.Iterable):
strides = [1] + list(stride) + [1]
else:
strides = [1, stride, stride, stride, 1]
if padding == "VALID":
output_planes = int(
math.ceil((input_planes - filter_planes + 1.0) / strides[1]))
output_rows = int(
math.ceil((input_rows - filter_rows + 1.0) / strides[2]))
output_cols = int(
math.ceil((input_cols - filter_cols + 1.0) / strides[3]))
else:
output_planes = int(math.ceil(float(input_planes) / strides[1]))
output_rows = int(math.ceil(float(input_rows) / strides[2]))
output_cols = int(math.ceil(float(input_cols) / strides[3]))
output_shape = [batch, output_planes, output_rows, output_cols, out_depth]
input_size = 1
for x in input_shape:
input_size *= x
filter_size = 1
for x in filter_shape:
filter_size *= x
input_data = [x * 1.0 / input_size for x in range(0, input_size)]
filter_data = [x * 1.0 / filter_size for x in range(0, filter_size)]
if test.is_gpu_available():
data_type = dtypes.float32
if test.is_gpu_available():
tolerance = 4e-3
else:
# As of Aug 2016, higher tolerance is needed for some CPU architectures.
# Runs on a single machine can also generate slightly different errors
# because of multithreading.
tolerance = 8e-3
else:
data_type = dtypes.float64
tolerance = 1e-8
with self.test_session(use_gpu=True):
input_tensor = constant_op.constant(
input_data, shape=input_shape, dtype=data_type, name="input")
filter_tensor = constant_op.constant(
filter_data, shape=filter_shape, dtype=data_type, name="filter")
conv = nn_ops.conv3d(
input_tensor, filter_tensor, strides, padding, name="conv")
if test_input:
err = gradient_checker.compute_gradient_error(input_tensor, input_shape,
conv, output_shape)
else:
err = gradient_checker.compute_gradient_error(filter_tensor,
filter_shape, conv,
output_shape)
print("conv3d gradient error = ", err)
self.assertLess(err, tolerance)
def testInputGradientValidPaddingStrideOne(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=3,
input_rows=5,
input_cols=4,
filter_planes=3,
filter_rows=3,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=1,
padding="VALID",
test_input=True)
def testFilterGradientValidPaddingStrideOne(self):
self.ConstructAndTestGradient(
batch=4,
input_planes=4,
input_rows=6,
input_cols=5,
filter_planes=2,
filter_rows=2,
filter_cols=2,
in_depth=2,
out_depth=3,
stride=1,
padding="VALID",
test_input=False)
def testInputGradientValidPaddingStrideTwo(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=6,
input_rows=3,
input_cols=5,
filter_planes=3,
filter_rows=3,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=2,
padding="VALID",
test_input=True)
def testFilterGradientValidPaddingStrideTwo(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=7,
input_rows=6,
input_cols=5,
filter_planes=2,
filter_rows=2,
filter_cols=2,
in_depth=2,
out_depth=3,
stride=2,
padding="VALID",
test_input=False)
def testInputGradientValidPaddingStrideThree(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=3,
input_rows=7,
input_cols=6,
filter_planes=3,
filter_rows=3,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=3,
padding="VALID",
test_input=True)
def testFilterGradientValidPaddingStrideThree(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=4,
input_rows=4,
input_cols=7,
filter_planes=4,
filter_rows=4,
filter_cols=4,
in_depth=2,
out_depth=3,
stride=3,
padding="VALID",
test_input=False)
def testInputGradientSamePaddingStrideOne(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=3,
input_rows=2,
input_cols=2,
filter_planes=3,
filter_rows=2,
filter_cols=1,
in_depth=2,
out_depth=1,
stride=1,
padding="SAME",
test_input=True)
def testFilterGradientSamePaddingStrideOne(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=3,
input_rows=6,
input_cols=5,
filter_planes=2,
filter_rows=2,
filter_cols=2,
in_depth=2,
out_depth=3,
stride=1,
padding="SAME",
test_input=False)
def testInputGradientSamePaddingStrideTwo(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=6,
input_rows=3,
input_cols=4,
filter_planes=3,
filter_rows=3,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=2,
padding="SAME",
test_input=True)
def testFilterGradientSamePaddingStrideTwo(self):
self.ConstructAndTestGradient(
batch=4,
input_planes=7,
input_rows=3,
input_cols=5,
filter_planes=2,
filter_rows=2,
filter_cols=2,
in_depth=2,
out_depth=3,
stride=2,
padding="SAME",
test_input=False)
def testInputGradientSamePaddingStrideThree(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=9,
input_rows=3,
input_cols=6,
filter_planes=3,
filter_rows=3,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=3,
padding="SAME",
test_input=True)
def testFilterGradientSamePaddingStrideThree(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=9,
input_rows=4,
input_cols=7,
filter_planes=4,
filter_rows=4,
filter_cols=4,
in_depth=2,
out_depth=3,
stride=3,
padding="SAME",
test_input=False)
def testInputGradientSamePaddingDifferentStrides(self):
self.ConstructAndTestGradient(
batch=1,
input_planes=5,
input_rows=8,
input_cols=7,
filter_planes=1,
filter_rows=2,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=[2, 3, 1],
padding="SAME",
test_input=True)
def testFilterGradientKernelSizeMatchesInputSize(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=5,
input_rows=4,
input_cols=3,
filter_planes=5,
filter_rows=4,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=1,
padding="VALID",
test_input=False)
def testInputGradientKernelSizeMatchesInputSize(self):
self.ConstructAndTestGradient(
batch=2,
input_planes=5,
input_rows=4,
input_cols=3,
filter_planes=5,
filter_rows=4,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=1,
padding="VALID",
test_input=True)
def disabledtestFilterGradientSamePaddingDifferentStrides(self):
self.ConstructAndTestGradient(
batch=1,
input_planes=5,
input_rows=8,
input_cols=7,
filter_planes=1,
filter_rows=2,
filter_cols=3,
in_depth=2,
out_depth=3,
stride=[2, 3, 1],
padding="SAME",
test_input=False)
if __name__ == "__main__":
test.main()
| apache-2.0 |
bigendiansmalls/capstone | bindings/python/test_x86.py | 32 | 5316 | #!/usr/bin/env python
# Capstone Python bindings, by Nguyen Anh Quynnh <aquynh@gmail.com>
from __future__ import print_function
from capstone import *
from capstone.x86 import *
from xprint import to_hex, to_x, to_x_32
X86_CODE64 = b"\x55\x48\x8b\x05\xb8\x13\x00\x00"
X86_CODE16 = b"\x8d\x4c\x32\x08\x01\xd8\x81\xc6\x34\x12\x00\x00\x05\x23\x01\x00\x00\x36\x8b\x84\x91\x23\x01\x00\x00\x41\x8d\x84\x39\x89\x67\x00\x00\x8d\x87\x89\x67\x00\x00\xb4\xc6"
X86_CODE32 = b"\x8d\x4c\x32\x08\x01\xd8\x81\xc6\x34\x12\x00\x00\x05\x23\x01\x00\x00\x36\x8b\x84\x91\x23\x01\x00\x00\x41\x8d\x84\x39\x89\x67\x00\x00\x8d\x87\x89\x67\x00\x00\xb4\xc6"
all_tests = (
(CS_ARCH_X86, CS_MODE_16, X86_CODE16, "X86 16bit (Intel syntax)", 0),
(CS_ARCH_X86, CS_MODE_32, X86_CODE32, "X86 32 (AT&T syntax)", CS_OPT_SYNTAX_ATT),
(CS_ARCH_X86, CS_MODE_32, X86_CODE32, "X86 32 (Intel syntax)", 0),
(CS_ARCH_X86, CS_MODE_64, X86_CODE64, "X86 64 (Intel syntax)", 0),
)
def print_insn_detail(mode, insn):
def print_string_hex(comment, str):
print(comment, end=' '),
for c in str:
print("0x%02x " % c, end=''),
print()
# print address, mnemonic and operands
print("0x%x:\t%s\t%s" % (insn.address, insn.mnemonic, insn.op_str))
# "data" instruction generated by SKIPDATA option has no detail
if insn.id == 0:
return
# print instruction prefix
print_string_hex("\tPrefix:", insn.prefix)
# print instruction's opcode
print_string_hex("\tOpcode:", insn.opcode)
# print operand's REX prefix (non-zero value is relavant for x86_64 instructions)
print("\trex: 0x%x" % (insn.rex))
# print operand's address size
print("\taddr_size: %u" % (insn.addr_size))
# print modRM byte
print("\tmodrm: 0x%x" % (insn.modrm))
# print displacement value
print("\tdisp: 0x%s" % to_x_32(insn.disp))
# SIB is not available in 16-bit mode
if (mode & CS_MODE_16 == 0):
# print SIB byte
print("\tsib: 0x%x" % (insn.sib))
if (insn.sib):
if insn.sib_base != 0:
print("\t\tsib_base: %s" % (insn.reg_name(insn.sib_base)))
if insn.sib_index != 0:
print("\t\tsib_index: %s" % (insn.reg_name(insn.sib_index)))
if insn.sib_scale != 0:
print("\t\tsib_scale: %d" % (insn.sib_scale))
# SSE CC type
if insn.sse_cc != X86_SSE_CC_INVALID:
print("\tsse_cc: %u" % (insn.sse_cc))
# AVX CC type
if insn.avx_cc != X86_AVX_CC_INVALID:
print("\tavx_cc: %u" % (insn.avx_cc))
# AVX Suppress All Exception
if insn.avx_sae:
print("\tavx_sae: TRUE")
# AVX Rounding Mode type
if insn.avx_rm != X86_AVX_RM_INVALID:
print("\tavx_rm: %u" % (insn.avx_rm))
count = insn.op_count(X86_OP_IMM)
if count > 0:
print("\timm_count: %u" % count)
for i in range(count):
op = insn.op_find(X86_OP_IMM, i + 1)
print("\t\timms[%u]: 0x%s" % (i + 1, to_x(op.imm)))
if len(insn.operands) > 0:
print("\top_count: %u" % len(insn.operands))
c = -1
for i in insn.operands:
c += 1
if i.type == X86_OP_REG:
print("\t\toperands[%u].type: REG = %s" % (c, insn.reg_name(i.reg)))
if i.type == X86_OP_IMM:
print("\t\toperands[%u].type: IMM = 0x%s" % (c, to_x(i.imm)))
if i.type == X86_OP_FP:
print("\t\toperands[%u].type: FP = %f" % (c, i.fp))
if i.type == X86_OP_MEM:
print("\t\toperands[%u].type: MEM" % c)
if i.mem.segment != 0:
print("\t\t\toperands[%u].mem.segment: REG = %s" % (c, insn.reg_name(i.mem.segment)))
if i.mem.base != 0:
print("\t\t\toperands[%u].mem.base: REG = %s" % (c, insn.reg_name(i.mem.base)))
if i.mem.index != 0:
print("\t\t\toperands[%u].mem.index: REG = %s" % (c, insn.reg_name(i.mem.index)))
if i.mem.scale != 1:
print("\t\t\toperands[%u].mem.scale: %u" % (c, i.mem.scale))
if i.mem.disp != 0:
print("\t\t\toperands[%u].mem.disp: 0x%s" % (c, to_x(i.mem.disp)))
# AVX broadcast type
if i.avx_bcast != X86_AVX_BCAST_INVALID:
print("\t\toperands[%u].avx_bcast: %u" % (c, i.avx_bcast))
# AVX zero opmask {z}
if i.avx_zero_opmask:
print("\t\toperands[%u].avx_zero_opmask: TRUE" % (c))
print("\t\toperands[%u].size: %u" % (c, i.size))
# ## Test class Cs
def test_class():
for (arch, mode, code, comment, syntax) in all_tests:
print("*" * 16)
print("Platform: %s" % comment)
print("Code: %s" % to_hex(code))
print("Disasm:")
try:
md = Cs(arch, mode)
md.detail = True
if syntax != 0:
md.syntax = syntax
for insn in md.disasm(code, 0x1000):
print_insn_detail(mode, insn)
print ()
print ("0x%x:\n" % (insn.address + insn.size))
except CsError as e:
print("ERROR: %s" % e)
if __name__ == '__main__':
test_class()
| bsd-3-clause |
sonofeft/ODSCharts | docs/fulltoc.py | 10 | 3362 | # -*- encoding: utf-8 -*-
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Author: Doug Hellmann <doug.hellmann@dreamhost.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sphinx import addnodes
def html_page_context(app, pagename, templatename, context, doctree):
"""Event handler for the html-page-context signal.
Modifies the context directly.
- Replaces the 'toc' value created by the HTML builder with one
that shows all document titles and the local table of contents.
- Sets display_toc to True so the table of contents is always
displayed, even on empty pages.
- Replaces the 'toctree' function with one that uses the entire
document structure, ignores the maxdepth argument, and uses
only prune and collapse.
"""
rendered_toc = get_rendered_toctree(app.builder, pagename)
context['toc'] = rendered_toc
context['display_toc'] = True # force toctree to display
# Commented out the following on Sept 5, 2015 (sonofeft)
# On ReadTheDocs it was causing:
# "TypeError: <function make_toctree at 0x7f200cb11b90> is not JSON serializable"
#def make_toctree(collapse=True):
# return get_rendered_toctree(app.builder,
# pagename,
# prune=False,
# collapse=collapse,
# )
#context['toctree'] = make_toctree
def get_rendered_toctree(builder, docname, prune=False, collapse=True):
"""Build the toctree relative to the named document,
with the given parameters, and then return the rendered
HTML fragment.
"""
fulltoc = build_full_toctree(builder,
docname,
prune=prune,
collapse=collapse,
)
rendered_toc = builder.render_partial(fulltoc)['fragment']
return rendered_toc
def build_full_toctree(builder, docname, prune, collapse):
"""Return a single toctree starting from docname containing all
sub-document doctrees.
"""
env = builder.env
doctree = env.get_doctree(env.config.master_doc)
toctrees = []
for toctreenode in doctree.traverse(addnodes.toctree):
toctree = env.resolve_toctree(docname, builder, toctreenode,
collapse=collapse,
prune=prune,
)
toctrees.append(toctree)
if not toctrees:
return None
result = toctrees[0]
for toctree in toctrees[1:]:
if toctree:
result.extend(toctree.children)
env.resolve_references(result, docname, builder)
return result
def setup(app):
app.connect('html-page-context', html_page_context)
| gpl-3.0 |
atilag/qiskit-sdk-py | qiskit/_jobprocessor.py | 1 | 4559 | # -*- coding: utf-8 -*-
# Copyright 2017 IBM RESEARCH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Processor for running Quantum Jobs in the different backends."""
import logging
import pprint
from concurrent import futures
from threading import Lock
from ._qiskiterror import QISKitError
from ._compiler import compile_circuit
from ._result import Result
logger = logging.getLogger(__name__)
def run_backend(q_job):
"""Run a program of compiled quantum circuits on a backend.
Args:
q_job (QuantumJob): job object
Returns:
Result: Result object.
Raises:
QISKitError: if the backend is malformed
"""
backend = q_job.backend
qobj = q_job.qobj
backend_name = qobj['config']['backend_name']
if not backend:
raise QISKitError("No backend instance to run on.")
if backend_name != backend.configuration['name']:
raise QISKitError('non-matching backends specified in Qobj '
'object and json')
if backend.configuration.get('local'): # remove condition when api gets qobj
for circuit in qobj['circuits']:
if circuit['compiled_circuit'] is None:
compiled_circuit = compile_circuit(circuit['circuit'], format='json')
circuit['compiled_circuit'] = compiled_circuit
return backend.run(q_job)
class JobProcessor:
"""
Process a series of jobs and collect the results
"""
def __init__(self, q_jobs, callback, max_workers=1):
"""
Args:
q_jobs (list(QuantumJob)): List of QuantumJob objects.
callback (fn(results)): The function that will be called when all
jobs finish. The signature of the function must be:
fn(results)
results: A list of Result objects.
max_workers (int): The maximum number of workers to use.
Raises:
QISKitError: if any of the job backends could not be found.
"""
self.q_jobs = q_jobs
self.max_workers = max_workers
# check whether any jobs are remote
self.online = any(not q_job.backend.configuration.get('local')
for q_job in q_jobs)
self.futures = {}
self.lock = Lock()
# Set a default dummy callback just in case the user doesn't want
# to pass any callback.
self.callback = (lambda rs: ()) if callback is None else callback
self.num_jobs = len(self.q_jobs)
self.jobs_results = []
if self.online:
# I/O intensive -> use ThreadedPoolExecutor
self.executor_class = futures.ThreadPoolExecutor
else:
# CPU intensive -> use ProcessPoolExecutor
self.executor_class = futures.ProcessPoolExecutor
def _job_done_callback(self, future):
try:
result = future.result()
except Exception as ex: # pylint: disable=broad-except
result = Result({'job_id': '0', 'status': 'ERROR',
'result': ex},
future.qobj)
with self.lock:
logger.debug("Have a Result: %s", pprint.pformat(result))
self.jobs_results.append(result)
if self.num_jobs != 0:
self.num_jobs -= 1
logger.debug("Jobs left count decreased: %d", self.num_jobs)
# Call the callback when all jobs have finished
if self.num_jobs == 0:
logger.debug("No more jobs in queue, returning results")
self.callback(self.jobs_results)
def submit(self):
"""Process/submit jobs"""
executor = self.executor_class(max_workers=self.max_workers)
for q_job in self.q_jobs:
future = executor.submit(run_backend, q_job)
future.qobj = q_job.qobj
self.futures[future] = q_job.qobj
future.add_done_callback(self._job_done_callback)
| apache-2.0 |
sfrenza/test-for-bot | venv/Lib/site-packages/nltk/compat.py | 2 | 11863 | # -*- coding: utf-8 -*-
# Natural Language Toolkit: Compatibility
#
# Copyright (C) 2001-2017 NLTK Project
#
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import absolute_import, print_function
import os
import sys
from functools import update_wrapper, wraps
import fractions
import unicodedata
from six import string_types, text_type
# Python 2/3 compatibility layer. Based on six.
PY3 = sys.version_info[0] == 3
if PY3:
def get_im_class(meth):
return meth.__self__.__class__
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
from datetime import timezone
UTC = timezone.utc
from tempfile import TemporaryDirectory
else:
def get_im_class(meth):
return meth.im_class
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
BytesIO = StringIO
from datetime import tzinfo, timedelta
ZERO = timedelta(0)
HOUR = timedelta(hours=1)
# A UTC class for python 2.7
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
UTC = UTC()
import csv
import codecs
import cStringIO
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
see https://docs.python.org/2/library/csv.html
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8",
errors='replace', **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
encoder_cls = codecs.getincrementalencoder(encoding)
self.encoder = encoder_cls(errors=errors)
def encode(self, data):
if isinstance(data, string_types):
return data.encode("utf-8")
else:
return data
def writerow(self, row):
self.writer.writerow([self.encode(s) for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data, 'replace')
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
import warnings as _warnings
import os as _os
from tempfile import mkdtemp
class TemporaryDirectory(object):
"""Create and return a temporary directory. This has the same
behavior as mkdtemp but can be used as a context manager. For
example:
with TemporaryDirectory() as tmpdir:
...
Upon exiting the context, the directory and everything contained
in it are removed.
http://stackoverflow.com/questions/19296146/tempfile-temporarydirectory-context-manager-in-python-2-7
"""
def __init__(self, suffix="", prefix="tmp", dir=None):
self._closed = False
self.name = None # Handle mkdtemp raising an exception
self.name = mkdtemp(suffix, prefix, dir)
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def cleanup(self, _warn=False):
if self.name and not self._closed:
try:
self._rmtree(self.name)
except (TypeError, AttributeError) as ex:
# Issue #10188: Emit a warning on stderr
# if the directory could not be cleaned
# up due to missing globals
if "None" not in str(ex):
raise
print("ERROR: {!r} while cleaning up {!r}".format(ex,
self),
file=sys.stderr)
return
self._closed = True
if _warn:
self._warn("Implicitly cleaning up {!r}".format(self),
Warning)
def __exit__(self, exc, value, tb):
self.cleanup()
def __del__(self):
# Issue a Warning if implicit cleanup needed
self.cleanup(_warn=True)
# XXX (ncoghlan): The following code attempts to make
# this class tolerant of the module nulling out process
# that happens during CPython interpreter shutdown
# Alas, it doesn't actually manage it. See issue #10188
_listdir = staticmethod(_os.listdir)
_path_join = staticmethod(_os.path.join)
_isdir = staticmethod(_os.path.isdir)
_islink = staticmethod(_os.path.islink)
_remove = staticmethod(_os.remove)
_rmdir = staticmethod(_os.rmdir)
_warn = _warnings.warn
def _rmtree(self, path):
# Essentially a stripped down version of shutil.rmtree. We can't
# use globals because they may be None'ed out at shutdown.
for name in self._listdir(path):
fullname = self._path_join(path, name)
try:
isdir = (self._isdir(fullname) and not
self._islink(fullname))
except OSError:
isdir = False
if isdir:
self._rmtree(fullname)
else:
try:
self._remove(fullname)
except OSError:
pass
try:
self._rmdir(path)
except OSError:
pass
# ======= Compatibility for datasets that care about Python versions ========
# The following datasets have a /PY3 subdirectory containing
# a full copy of the data which has been re-encoded or repickled.
DATA_UPDATES = [("chunkers", "maxent_ne_chunker"),
("help", "tagsets"),
("taggers", "maxent_treebank_pos_tagger"),
("tokenizers", "punkt")]
_PY3_DATA_UPDATES = [os.path.join(*path_list) for path_list in DATA_UPDATES]
def add_py3_data(path):
if PY3:
for item in _PY3_DATA_UPDATES:
if item in str(path) and "/PY3" not in str(path):
pos = path.index(item) + len(item)
if path[pos:pos + 4] == ".zip":
pos += 4
path = path[:pos] + "/PY3" + path[pos:]
break
return path
# for use in adding /PY3 to the second (filename) argument
# of the file pointers in data.py
def py3_data(init_func):
def _decorator(*args, **kwargs):
args = (args[0], add_py3_data(args[1])) + args[2:]
return init_func(*args, **kwargs)
return wraps(init_func)(_decorator)
# ======= Compatibility layer for __str__ and __repr__ ==========
def remove_accents(text):
if isinstance(text, bytes):
text = text.decode('ascii')
category = unicodedata.category # this gives a small (~10%) speedup
return ''.join(
c for c in unicodedata.normalize('NFKD', text) if category(c) != 'Mn'
)
# Select the best transliteration method:
try:
# Older versions of Unidecode are licensed under Artistic License;
# assume an older version is installed.
from unidecode import unidecode as transliterate
except ImportError:
try:
# text-unidecode implementation is worse than Unidecode
# implementation so Unidecode is preferred.
from text_unidecode import unidecode as transliterate
except ImportError:
# This transliteration method should be enough
# for many Western languages.
transliterate = remove_accents
def python_2_unicode_compatible(klass):
"""
This decorator defines __unicode__ method and fixes
__repr__ and __str__ methods under Python 2.
To support Python 2 and 3 with a single code base,
define __str__ and __repr__ methods returning unicode
text and apply this decorator to the class.
Original __repr__ and __str__ would be available
as unicode_repr and __unicode__ (under both Python 2
and Python 3).
"""
if not issubclass(klass, object):
raise ValueError("This decorator doesn't work for old-style classes")
# both __unicode__ and unicode_repr are public because they
# may be useful in console under Python 2.x
# if __str__ or __repr__ are not overriden in a subclass,
# they may be already fixed by this decorator in a parent class
# and we shouldn't them again
if not _was_fixed(klass.__str__):
klass.__unicode__ = klass.__str__
if not PY3:
klass.__str__ = _7bit(_transliterated(klass.__unicode__))
if not _was_fixed(klass.__repr__):
klass.unicode_repr = klass.__repr__
if not PY3:
klass.__repr__ = _7bit(klass.unicode_repr)
return klass
def unicode_repr(obj):
"""
For classes that was fixed with @python_2_unicode_compatible
``unicode_repr`` returns ``obj.unicode_repr()``; for unicode strings
the result is returned without "u" letter (to make output the
same under Python 2.x and Python 3.x); for other variables
it is the same as ``repr``.
"""
if PY3:
return repr(obj)
# Python 2.x
if hasattr(obj, 'unicode_repr'):
return obj.unicode_repr()
if isinstance(obj, text_type):
return repr(obj)[1:] # strip "u" letter from output
return repr(obj)
def _transliterated(method):
def wrapper(self):
return transliterate(method(self))
update_wrapper(wrapper, method, ["__name__", "__doc__"])
if hasattr(method, "_nltk_compat_7bit"):
wrapper._nltk_compat_7bit = method._nltk_compat_7bit
wrapper._nltk_compat_transliterated = True
return wrapper
def _7bit(method):
def wrapper(self):
return method(self).encode('ascii', 'backslashreplace')
update_wrapper(wrapper, method, ["__name__", "__doc__"])
if hasattr(method, "_nltk_compat_transliterated"):
wrapper._nltk_compat_transliterated = (
method._nltk_compat_transliterated
)
wrapper._nltk_compat_7bit = True
return wrapper
def _was_fixed(method):
return (getattr(method, "_nltk_compat_7bit", False) or
getattr(method, "_nltk_compat_transliterated", False))
class Fraction(fractions.Fraction):
"""
This is a simplified backwards compatible version of fractions.Fraction
from Python >=3.5. It adds the `_normalize` parameter such that it does
not normalize the denominator to the Greatest Common Divisor (gcd) when
the numerator is 0.
This is most probably only used by the nltk.translate.bleu_score.py where
numerator and denominator of the different ngram precisions are mutable.
But the idea of "mutable" fraction might not be applicable to other usages,
See http://stackoverflow.com/questions/34561265
This objects should be deprecated once NLTK stops supporting Python < 3.5
See https://github.com/nltk/nltk/issues/1330
"""
def __new__(cls, numerator=0, denominator=None, _normalize=True):
cls = super(Fraction, cls).__new__(cls, numerator, denominator)
# To emulate fraction.Fraction.from_float across Python >=2.7,
# check that numerator is an integer and denominator is not None.
if not _normalize and type(numerator) == int and denominator:
cls._numerator = numerator
cls._denominator = denominator
return cls
| mit |
resmo/ansible | lib/ansible/modules/network/check_point/cp_mgmt_network.py | 20 | 7591 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage Check Point Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cp_mgmt_network
short_description: Manages network objects on Check Point over Web Services API
description:
- Manages network objects on Check Point devices including creating, updating and removing objects.
- All operations are performed over Web Services API.
version_added: "2.9"
author: "Or Soffer (@chkp-orso)"
options:
name:
description:
- Object name.
type: str
required: True
subnet:
description:
- IPv4 or IPv6 network address. If both addresses are required use subnet4 and subnet6 fields explicitly.
type: str
subnet4:
description:
- IPv4 network address.
type: str
subnet6:
description:
- IPv6 network address.
type: str
mask_length:
description:
- IPv4 or IPv6 network mask length. If both masks are required use mask-length4 and mask-length6 fields explicitly. Instead of IPv4 mask length
it is possible to specify IPv4 mask itself in subnet-mask field.
type: int
mask_length4:
description:
- IPv4 network mask length.
type: int
mask_length6:
description:
- IPv6 network mask length.
type: int
subnet_mask:
description:
- IPv4 network mask.
type: str
nat_settings:
description:
- NAT settings.
type: dict
suboptions:
auto_rule:
description:
- Whether to add automatic address translation rules.
type: bool
ip_address:
description:
- IPv4 or IPv6 address. If both addresses are required use ipv4-address and ipv6-address fields explicitly. This parameter is not
required in case "method" parameter is "hide" and "hide-behind" parameter is "gateway".
type: str
ipv4_address:
description:
- IPv4 address.
type: str
ipv6_address:
description:
- IPv6 address.
type: str
hide_behind:
description:
- Hide behind method. This parameter is not required in case "method" parameter is "static".
type: str
choices: ['gateway', 'ip-address']
install_on:
description:
- Which gateway should apply the NAT translation.
type: str
method:
description:
- NAT translation method.
type: str
choices: ['hide', 'static']
tags:
description:
- Collection of tag identifiers.
type: list
broadcast:
description:
- Allow broadcast address inclusion.
type: str
choices: ['disallow', 'allow']
color:
description:
- Color of the object. Should be one of existing colors.
type: str
choices: ['aquamarine', 'black', 'blue', 'crete blue', 'burlywood', 'cyan', 'dark green', 'khaki', 'orchid', 'dark orange', 'dark sea green',
'pink', 'turquoise', 'dark blue', 'firebrick', 'brown', 'forest green', 'gold', 'dark gold', 'gray', 'dark gray', 'light green', 'lemon chiffon',
'coral', 'sea green', 'sky blue', 'magenta', 'purple', 'slate blue', 'violet red', 'navy blue', 'olive', 'orange', 'red', 'sienna', 'yellow']
comments:
description:
- Comments string.
type: str
details_level:
description:
- The level of detail for some of the fields in the response can vary from showing only the UID value of the object to a fully detailed
representation of the object.
type: str
choices: ['uid', 'standard', 'full']
groups:
description:
- Collection of group identifiers.
type: list
ignore_warnings:
description:
- Apply changes ignoring warnings.
type: bool
ignore_errors:
description:
- Apply changes ignoring errors. You won't be able to publish such a changes. If ignore-warnings flag was omitted - warnings will also be ignored.
type: bool
extends_documentation_fragment: checkpoint_objects
"""
EXAMPLES = """
- name: add-network
cp_mgmt_network:
name: New Network 1
state: present
subnet: 192.0.2.0
subnet_mask: 255.255.255.0
- name: set-network
cp_mgmt_network:
color: green
mask_length: 16
name: New Network 1
new_name: New Network 2
state: present
subnet: 192.0.0.0
- name: delete-network
cp_mgmt_network:
name: New Network 2
state: absent
"""
RETURN = """
cp_mgmt_network:
description: The checkpoint object created or updated.
returned: always, except when deleting the object.
type: dict
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.checkpoint.checkpoint import checkpoint_argument_spec_for_objects, api_call
def main():
argument_spec = dict(
name=dict(type='str', required=True),
subnet=dict(type='str'),
subnet4=dict(type='str'),
subnet6=dict(type='str'),
mask_length=dict(type='int'),
mask_length4=dict(type='int'),
mask_length6=dict(type='int'),
subnet_mask=dict(type='str'),
nat_settings=dict(type='dict', options=dict(
auto_rule=dict(type='bool'),
ip_address=dict(type='str'),
ipv4_address=dict(type='str'),
ipv6_address=dict(type='str'),
hide_behind=dict(type='str', choices=['gateway', 'ip-address']),
install_on=dict(type='str'),
method=dict(type='str', choices=['hide', 'static'])
)),
tags=dict(type='list'),
broadcast=dict(type='str', choices=['disallow', 'allow']),
color=dict(type='str', choices=['aquamarine', 'black', 'blue', 'crete blue', 'burlywood', 'cyan', 'dark green',
'khaki', 'orchid', 'dark orange', 'dark sea green', 'pink', 'turquoise', 'dark blue', 'firebrick', 'brown',
'forest green', 'gold', 'dark gold', 'gray', 'dark gray', 'light green', 'lemon chiffon', 'coral', 'sea green',
'sky blue', 'magenta', 'purple', 'slate blue', 'violet red', 'navy blue', 'olive', 'orange', 'red', 'sienna',
'yellow']),
comments=dict(type='str'),
details_level=dict(type='str', choices=['uid', 'standard', 'full']),
groups=dict(type='list'),
ignore_warnings=dict(type='bool'),
ignore_errors=dict(type='bool')
)
argument_spec.update(checkpoint_argument_spec_for_objects)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
api_call_object = 'network'
result = api_call(module, api_call_object)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Darkmer/masterchief | CourseBuilderenv/lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/langhebrewmodel.py | 2763 | 11318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
win1255_CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HebrewLangModel = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = {
'charToOrderMap': win1255_CharToOrderMap,
'precedenceMatrix': HebrewLangModel,
'mTypicalPositiveRatio': 0.984004,
'keepEnglishLetter': False,
'charsetName': "windows-1255"
}
# flake8: noqa
| mit |
tadebayo/myedge | myvenv/Lib/site-packages/django/contrib/gis/db/backends/oracle/adapter.py | 273 | 1866 | from cx_Oracle import CLOB
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.geos import GeometryCollection, Polygon
from django.utils.six.moves import range
class OracleSpatialAdapter(WKTAdapter):
input_size = CLOB
def __init__(self, geom):
"""
Oracle requires that polygon rings are in proper orientation. This
affects spatial operations and an invalid orientation may cause
failures. Correct orientations are:
* Outer ring - counter clockwise
* Inner ring(s) - clockwise
"""
if isinstance(geom, Polygon):
self._fix_polygon(geom)
elif isinstance(geom, GeometryCollection):
self._fix_geometry_collection(geom)
self.wkt = geom.wkt
self.srid = geom.srid
def _fix_polygon(self, poly):
# Fix single polygon orientation as described in __init__()
if self._isClockwise(poly.exterior_ring):
poly.exterior_ring = list(reversed(poly.exterior_ring))
for i in range(1, len(poly)):
if not self._isClockwise(poly[i]):
poly[i] = list(reversed(poly[i]))
return poly
def _fix_geometry_collection(self, coll):
# Fix polygon orientations in geometry collections as described in
# __init__()
for i, geom in enumerate(coll):
if isinstance(geom, Polygon):
coll[i] = self._fix_polygon(geom)
def _isClockwise(self, coords):
# A modified shoelace algorithm to determine polygon orientation.
# See https://en.wikipedia.org/wiki/Shoelace_formula
n = len(coords)
area = 0.0
for i in range(n):
j = (i + 1) % n
area += coords[i][0] * coords[j][1]
area -= coords[j][0] * coords[i][1]
return area < 0.0
| mit |
tyagiarpit/servo | tests/wpt/harness/wptrunner/testloader.py | 97 | 22234 | import json
import os
import sys
import urlparse
from abc import ABCMeta, abstractmethod
from Queue import Empty
from collections import defaultdict, OrderedDict, deque
from multiprocessing import Queue
import manifestinclude
import manifestexpected
import wpttest
from mozlog import structured
manifest = None
manifest_update = None
def do_delayed_imports():
# This relies on an already loaded module having set the sys.path correctly :(
global manifest, manifest_update
from manifest import manifest
from manifest import update as manifest_update
class TestChunker(object):
def __init__(self, total_chunks, chunk_number):
self.total_chunks = total_chunks
self.chunk_number = chunk_number
assert self.chunk_number <= self.total_chunks
self.logger = structured.get_default_logger()
def __call__(self, manifest):
raise NotImplementedError
class Unchunked(TestChunker):
def __init__(self, *args, **kwargs):
TestChunker.__init__(self, *args, **kwargs)
assert self.total_chunks == 1
def __call__(self, manifest):
for item in manifest:
yield item
class HashChunker(TestChunker):
def __call__(self):
chunk_index = self.chunk_number - 1
for test_path, tests in manifest:
if hash(test_path) % self.total_chunks == chunk_index:
yield test_path, tests
class EqualTimeChunker(TestChunker):
def _group_by_directory(self, manifest_items):
"""Split the list of manifest items into a ordered dict that groups tests in
so that anything in the same subdirectory beyond a depth of 3 is in the same
group. So all tests in a/b/c, a/b/c/d and a/b/c/e will be grouped together
and separate to tests in a/b/f
Returns: tuple (ordered dict of {test_dir: PathData}, total estimated runtime)
"""
class PathData(object):
def __init__(self, path):
self.path = path
self.time = 0
self.tests = []
by_dir = OrderedDict()
total_time = 0
for i, (test_path, tests) in enumerate(manifest_items):
test_dir = tuple(os.path.split(test_path)[0].split(os.path.sep)[:3])
if not test_dir in by_dir:
by_dir[test_dir] = PathData(test_dir)
data = by_dir[test_dir]
time = sum(wpttest.DEFAULT_TIMEOUT if test.timeout !=
"long" else wpttest.LONG_TIMEOUT for test in tests)
data.time += time
total_time += time
data.tests.append((test_path, tests))
return by_dir, total_time
def _maybe_remove(self, chunks, i, direction):
"""Trial removing a chunk from one chunk to an adjacent one.
:param chunks: - the list of all chunks
:param i: - the chunk index in the list of chunks to try removing from
:param direction: either "next" if we are going to move from the end to
the subsequent chunk, or "prev" if we are going to move
from the start into the previous chunk.
:returns bool: Did a chunk get moved?"""
source_chunk = chunks[i]
if direction == "next":
target_chunk = chunks[i+1]
path_index = -1
move_func = lambda: target_chunk.appendleft(source_chunk.pop())
elif direction == "prev":
target_chunk = chunks[i-1]
path_index = 0
move_func = lambda: target_chunk.append(source_chunk.popleft())
else:
raise ValueError("Unexpected move direction %s" % direction)
return self._maybe_move(source_chunk, target_chunk, path_index, move_func)
def _maybe_add(self, chunks, i, direction):
"""Trial adding a chunk from one chunk to an adjacent one.
:param chunks: - the list of all chunks
:param i: - the chunk index in the list of chunks to try adding to
:param direction: either "next" if we are going to remove from the
the subsequent chunk, or "prev" if we are going to remove
from the the previous chunk.
:returns bool: Did a chunk get moved?"""
target_chunk = chunks[i]
if direction == "next":
source_chunk = chunks[i+1]
path_index = 0
move_func = lambda: target_chunk.append(source_chunk.popleft())
elif direction == "prev":
source_chunk = chunks[i-1]
path_index = -1
move_func = lambda: target_chunk.appendleft(source_chunk.pop())
else:
raise ValueError("Unexpected move direction %s" % direction)
return self._maybe_move(source_chunk, target_chunk, path_index, move_func)
def _maybe_move(self, source_chunk, target_chunk, path_index, move_func):
"""Move from one chunk to another, assess the change in badness,
and keep the move iff it decreases the badness score.
:param source_chunk: chunk to move from
:param target_chunk: chunk to move to
:param path_index: 0 if we are moving from the start or -1 if we are moving from the
end
:param move_func: Function that actually moves between chunks"""
if len(source_chunk.paths) <= 1:
return False
move_time = source_chunk.paths[path_index].time
new_source_badness = self._badness(source_chunk.time - move_time)
new_target_badness = self._badness(target_chunk.time + move_time)
delta_badness = ((new_source_badness + new_target_badness) -
(source_chunk.badness + target_chunk.badness))
if delta_badness < 0:
move_func()
return True
return False
def _badness(self, time):
"""Metric of badness for a specific chunk
:param time: the time for a specific chunk"""
return (time - self.expected_time)**2
def _get_chunk(self, manifest_items):
by_dir, total_time = self._group_by_directory(manifest_items)
if len(by_dir) < self.total_chunks:
raise ValueError("Tried to split into %i chunks, but only %i subdirectories included" % (
self.total_chunks, len(by_dir)))
self.expected_time = float(total_time) / self.total_chunks
chunks = self._create_initial_chunks(by_dir)
while True:
# Move a test from one chunk to the next until doing so no longer
# reduces the badness
got_improvement = self._update_chunks(chunks)
if not got_improvement:
break
self.logger.debug(self.expected_time)
for i, chunk in chunks.iteritems():
self.logger.debug("%i: %i, %i" % (i + 1, chunk.time, chunk.badness))
assert self._all_tests(by_dir) == self._chunked_tests(chunks)
return self._get_tests(chunks)
@staticmethod
def _all_tests(by_dir):
"""Return a set of all tests in the manifest from a grouping by directory"""
return set(x[0] for item in by_dir.itervalues()
for x in item.tests)
@staticmethod
def _chunked_tests(chunks):
"""Return a set of all tests in the manifest from the chunk list"""
return set(x[0] for chunk in chunks.itervalues()
for path in chunk.paths
for x in path.tests)
def _create_initial_chunks(self, by_dir):
"""Create an initial unbalanced list of chunks.
:param by_dir: All tests in the manifest grouped by subdirectory
:returns list: A list of Chunk objects"""
class Chunk(object):
def __init__(self, paths, index):
"""List of PathData objects that together form a single chunk of
tests"""
self.paths = deque(paths)
self.time = sum(item.time for item in paths)
self.index = index
def appendleft(self, path):
"""Add a PathData object to the start of the chunk"""
self.paths.appendleft(path)
self.time += path.time
def append(self, path):
"""Add a PathData object to the end of the chunk"""
self.paths.append(path)
self.time += path.time
def pop(self):
"""Remove PathData object from the end of the chunk"""
assert len(self.paths) > 1
self.time -= self.paths[-1].time
return self.paths.pop()
def popleft(self):
"""Remove PathData object from the start of the chunk"""
assert len(self.paths) > 1
self.time -= self.paths[0].time
return self.paths.popleft()
@property
def badness(self_):
"""Badness metric for this chunk"""
return self._badness(self_.time)
initial_size = len(by_dir) / self.total_chunks
chunk_boundaries = [initial_size * i
for i in xrange(self.total_chunks)] + [len(by_dir)]
chunks = OrderedDict()
for i, lower in enumerate(chunk_boundaries[:-1]):
upper = chunk_boundaries[i + 1]
paths = by_dir.values()[lower:upper]
chunks[i] = Chunk(paths, i)
assert self._all_tests(by_dir) == self._chunked_tests(chunks)
return chunks
def _update_chunks(self, chunks):
"""Run a single iteration of the chunk update algorithm.
:param chunks: - List of chunks
"""
#TODO: consider replacing this with a heap
sorted_chunks = sorted(chunks.values(), key=lambda x:-x.badness)
got_improvement = False
for chunk in sorted_chunks:
if chunk.time < self.expected_time:
f = self._maybe_add
else:
f = self._maybe_remove
if chunk.index == 0:
order = ["next"]
elif chunk.index == self.total_chunks - 1:
order = ["prev"]
else:
if chunk.time < self.expected_time:
# First try to add a test from the neighboring chunk with the
# greatest total time
if chunks[chunk.index + 1].time > chunks[chunk.index - 1].time:
order = ["next", "prev"]
else:
order = ["prev", "next"]
else:
# First try to remove a test and add to the neighboring chunk with the
# lowest total time
if chunks[chunk.index + 1].time > chunks[chunk.index - 1].time:
order = ["prev", "next"]
else:
order = ["next", "prev"]
for direction in order:
if f(chunks, chunk.index, direction):
got_improvement = True
break
if got_improvement:
break
return got_improvement
def _get_tests(self, chunks):
"""Return the list of tests corresponding to the chunk number we are running.
:param chunks: List of chunks"""
tests = []
for path in chunks[self.chunk_number - 1].paths:
tests.extend(path.tests)
return tests
def __call__(self, manifest_iter):
manifest = list(manifest_iter)
tests = self._get_chunk(manifest)
for item in tests:
yield item
class TestFilter(object):
def __init__(self, test_manifests, include=None, exclude=None, manifest_path=None):
if manifest_path is not None and include is None:
self.manifest = manifestinclude.get_manifest(manifest_path)
else:
self.manifest = manifestinclude.IncludeManifest.create()
if include:
self.manifest.set("skip", "true")
for item in include:
self.manifest.add_include(test_manifests, item)
if exclude:
for item in exclude:
self.manifest.add_exclude(test_manifests, item)
def __call__(self, manifest_iter):
for test_path, tests in manifest_iter:
include_tests = set()
for test in tests:
if self.manifest.include(test):
include_tests.add(test)
if include_tests:
yield test_path, include_tests
class TagFilter(object):
def __init__(self, tags):
self.tags = set(tags)
def __call__(self, test_iter):
for test in test_iter:
if test.tags & self.tags:
yield test
class ManifestLoader(object):
def __init__(self, test_paths, force_manifest_update=False):
do_delayed_imports()
self.test_paths = test_paths
self.force_manifest_update = force_manifest_update
self.logger = structured.get_default_logger()
if self.logger is None:
self.logger = structured.structuredlog.StructuredLogger("ManifestLoader")
def load(self):
rv = {}
for url_base, paths in self.test_paths.iteritems():
manifest_file = self.load_manifest(url_base=url_base,
**paths)
path_data = {"url_base": url_base}
path_data.update(paths)
rv[manifest_file] = path_data
return rv
def create_manifest(self, manifest_path, tests_path, url_base="/"):
self.update_manifest(manifest_path, tests_path, url_base, recreate=True)
def update_manifest(self, manifest_path, tests_path, url_base="/",
recreate=False):
self.logger.info("Updating test manifest %s" % manifest_path)
json_data = None
if not recreate:
try:
with open(manifest_path) as f:
json_data = json.load(f)
except IOError:
#If the existing file doesn't exist just create one from scratch
pass
if not json_data:
manifest_file = manifest.Manifest(None, url_base)
else:
try:
manifest_file = manifest.Manifest.from_json(tests_path, json_data)
except manifest.ManifestVersionMismatch:
manifest_file = manifest.Manifest(None, url_base)
manifest_update.update(tests_path, url_base, manifest_file)
manifest.write(manifest_file, manifest_path)
def load_manifest(self, tests_path, metadata_path, url_base="/"):
manifest_path = os.path.join(metadata_path, "MANIFEST.json")
if (not os.path.exists(manifest_path) or
self.force_manifest_update):
self.update_manifest(manifest_path, tests_path, url_base)
manifest_file = manifest.load(tests_path, manifest_path)
if manifest_file.url_base != url_base:
self.logger.info("Updating url_base in manifest from %s to %s" % (manifest_file.url_base,
url_base))
manifest_file.url_base = url_base
manifest.write(manifest_file, manifest_path)
return manifest_file
def iterfilter(filters, iter):
for f in filters:
iter = f(iter)
for item in iter:
yield item
class TestLoader(object):
def __init__(self,
test_manifests,
test_types,
run_info,
manifest_filters=None,
meta_filters=None,
chunk_type="none",
total_chunks=1,
chunk_number=1,
include_https=True):
self.test_types = test_types
self.run_info = run_info
self.manifest_filters = manifest_filters if manifest_filters is not None else []
self.meta_filters = meta_filters if meta_filters is not None else []
self.manifests = test_manifests
self.tests = None
self.disabled_tests = None
self.include_https = include_https
self.chunk_type = chunk_type
self.total_chunks = total_chunks
self.chunk_number = chunk_number
self.chunker = {"none": Unchunked,
"hash": HashChunker,
"equal_time": EqualTimeChunker}[chunk_type](total_chunks,
chunk_number)
self._test_ids = None
self.directory_manifests = {}
self._load_tests()
@property
def test_ids(self):
if self._test_ids is None:
self._test_ids = []
for test_dict in [self.disabled_tests, self.tests]:
for test_type in self.test_types:
self._test_ids += [item.id for item in test_dict[test_type]]
return self._test_ids
def get_test(self, manifest_test, inherit_metadata, test_metadata):
if test_metadata is not None:
inherit_metadata.append(test_metadata)
test_metadata = test_metadata.get_test(manifest_test.id)
return wpttest.from_manifest(manifest_test, inherit_metadata, test_metadata)
def load_dir_metadata(self, test_manifest, metadata_path, test_path):
rv = []
path_parts = os.path.dirname(test_path).split(os.path.sep)
for i in xrange(1,len(path_parts) + 1):
path = os.path.join(os.path.sep.join(path_parts[:i]), "__dir__.ini")
if path not in self.directory_manifests:
self.directory_manifests[path] = manifestexpected.get_dir_manifest(
metadata_path, path, self.run_info)
manifest = self.directory_manifests[path]
if manifest is not None:
rv.append(manifest)
return rv
def load_metadata(self, test_manifest, metadata_path, test_path):
inherit_metadata = self.load_dir_metadata(test_manifest, metadata_path, test_path)
test_metadata = manifestexpected.get_manifest(
metadata_path, test_path, test_manifest.url_base, self.run_info)
return inherit_metadata, test_metadata
def iter_tests(self):
manifest_items = []
for manifest in sorted(self.manifests.keys(), key=lambda x:x.url_base):
manifest_iter = iterfilter(self.manifest_filters,
manifest.itertypes(*self.test_types))
manifest_items.extend(manifest_iter)
if self.chunker is not None:
manifest_items = self.chunker(manifest_items)
for test_path, tests in manifest_items:
manifest_file = iter(tests).next().manifest
metadata_path = self.manifests[manifest_file]["metadata_path"]
inherit_metadata, test_metadata = self.load_metadata(manifest_file, metadata_path, test_path)
for test in iterfilter(self.meta_filters,
self.iter_wpttest(inherit_metadata, test_metadata, tests)):
yield test_path, test.test_type, test
def iter_wpttest(self, inherit_metadata, test_metadata, tests):
for manifest_test in tests:
yield self.get_test(manifest_test, inherit_metadata, test_metadata)
def _load_tests(self):
"""Read in the tests from the manifest file and add them to a queue"""
tests = {"enabled":defaultdict(list),
"disabled":defaultdict(list)}
for test_path, test_type, test in self.iter_tests():
enabled = not test.disabled()
if not self.include_https and test.environment["protocol"] == "https":
enabled = False
key = "enabled" if enabled else "disabled"
tests[key][test_type].append(test)
self.tests = tests["enabled"]
self.disabled_tests = tests["disabled"]
def groups(self, test_types, chunk_type="none", total_chunks=1, chunk_number=1):
groups = set()
for test_type in test_types:
for test in self.tests[test_type]:
group = test.url.split("/")[1]
groups.add(group)
return groups
class TestSource(object):
__metaclass__ = ABCMeta
@abstractmethod
def queue_tests(self, test_queue):
pass
@abstractmethod
def requeue_test(self, test):
pass
def __enter__(self):
return self
def __exit__(self, *args, **kwargs):
pass
class SingleTestSource(TestSource):
def __init__(self, test_queue):
self.test_queue = test_queue
@classmethod
def queue_tests(cls, test_queue, test_type, tests):
for test in tests[test_type]:
test_queue.put(test)
def get_queue(self):
if self.test_queue.empty():
return None
return self.test_queue
def requeue_test(self, test):
self.test_queue.put(test)
class PathGroupedSource(TestSource):
def __init__(self, test_queue):
self.test_queue = test_queue
self.current_queue = None
@classmethod
def queue_tests(cls, test_queue, test_type, tests, depth=None):
if depth is True:
depth = None
prev_path = None
group = None
for test in tests[test_type]:
path = urlparse.urlsplit(test.url).path.split("/")[1:-1][:depth]
if path != prev_path:
group = []
test_queue.put(group)
prev_path = path
group.append(test)
def get_queue(self):
if not self.current_queue or self.current_queue.empty():
try:
data = self.test_queue.get(block=True, timeout=1)
self.current_queue = Queue()
for item in data:
self.current_queue.put(item)
except Empty:
return None
return self.current_queue
def requeue_test(self, test):
self.current_queue.put(test)
def __exit__(self, *args, **kwargs):
if self.current_queue:
self.current_queue.close()
| mpl-2.0 |
SimVascular/VTK | Filters/Core/Testing/Python/multipleComponentContour.py | 20 | 1879 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# get the interactor ui
## Graphics stuff
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
gs1 = vtk.vtkImageGaussianSource()
gs1.SetWholeExtent(0,31,0,31,0,31)
gs1.SetCenter(10,16,16)
gs1.SetMaximum(1000)
gs1.SetStandardDeviation(7)
gs2 = vtk.vtkImageGaussianSource()
gs2.SetWholeExtent(0,31,0,31,0,31)
gs2.SetCenter(22,16,16)
gs2.SetMaximum(1000)
gs2.SetStandardDeviation(7)
iac = vtk.vtkImageAppendComponents()
iac.AddInputConnection(gs1.GetOutputPort())
iac.AddInputConnection(gs2.GetOutputPort())
cf1 = vtk.vtkContourFilter()
cf1.SetInputConnection(iac.GetOutputPort())
cf1.SetValue(0,500)
cf1.SetArrayComponent(0)
cf2 = vtk.vtkContourFilter()
cf2.SetInputConnection(iac.GetOutputPort())
cf2.SetValue(0,500)
cf2.SetArrayComponent(1)
mapper1 = vtk.vtkPolyDataMapper()
mapper1.SetInputConnection(cf1.GetOutputPort())
mapper1.SetImmediateModeRendering(1)
mapper1.SetScalarRange(0,1)
mapper1.SetScalarVisibility(0)
mapper1.Update()
mapper2 = vtk.vtkPolyDataMapper()
mapper2.SetInputConnection(cf2.GetOutputPort())
mapper2.SetImmediateModeRendering(1)
mapper2.SetScalarRange(0,1)
mapper2.SetScalarVisibility(0)
actor1 = vtk.vtkActor()
actor1.SetMapper(mapper1)
actor1.GetProperty().SetColor(1,1,1)
ren1.AddActor(actor1)
actor2 = vtk.vtkActor()
actor2.SetMapper(mapper2)
actor2.GetProperty().SetColor(1,0,0)
ren1.AddActor(actor2)
# Add the actors to the renderer, set the background and size
#
ren1.SetBackground(.3,.3,.3)
renWin.SetSize(400,400)
# enable user interface interactor
iren.Initialize()
# prevent the tk window from showing up then start the event loop
# --- end of script --
| bsd-3-clause |
SeniorLimpio/ldroid_kernel | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
watspidererik/testenv | flask/lib/python2.7/site-packages/pip/_vendor/colorama/ansi.py | 527 | 1039 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
'''
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
'''
CSI = '\033['
def code_to_chars(code):
return CSI + str(code) + 'm'
class AnsiCodes(object):
def __init__(self, codes):
for name in dir(codes):
if not name.startswith('_'):
value = getattr(codes, name)
setattr(self, name, code_to_chars(value))
class AnsiFore:
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
class AnsiBack:
BLACK = 40
RED = 41
GREEN = 42
YELLOW = 43
BLUE = 44
MAGENTA = 45
CYAN = 46
WHITE = 47
RESET = 49
class AnsiStyle:
BRIGHT = 1
DIM = 2
NORMAL = 22
RESET_ALL = 0
Fore = AnsiCodes( AnsiFore )
Back = AnsiCodes( AnsiBack )
Style = AnsiCodes( AnsiStyle )
| mit |
jiangzhixiao/odoo | addons/account/wizard/account_period_close.py | 341 | 2646 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_period_close(osv.osv_memory):
"""
close period
"""
_name = "account.period.close"
_description = "period close"
_columns = {
'sure': fields.boolean('Check this box'),
}
def data_save(self, cr, uid, ids, context=None):
"""
This function close period
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: account period close’s ID or list of IDs
"""
journal_period_pool = self.pool.get('account.journal.period')
period_pool = self.pool.get('account.period')
account_move_obj = self.pool.get('account.move')
mode = 'done'
for form in self.read(cr, uid, ids, context=context):
if form['sure']:
for id in context['active_ids']:
account_move_ids = account_move_obj.search(cr, uid, [('period_id', '=', id), ('state', '=', "draft")], context=context)
if account_move_ids:
raise osv.except_osv(_('Invalid Action!'), _('In order to close a period, you must first post related journal entries.'))
cr.execute('update account_journal_period set state=%s where period_id=%s', (mode, id))
cr.execute('update account_period set state=%s where id=%s', (mode, id))
self.invalidate_cache(cr, uid, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fredrik-johansson/mpmath | mpmath/tests/test_ode.py | 15 | 1822 | #from mpmath.calculus import ODE_step_euler, ODE_step_rk4, odeint, arange
from mpmath import odefun, cos, sin, mpf, sinc, mp
'''
solvers = [ODE_step_euler, ODE_step_rk4]
def test_ode1():
"""
Let's solve:
x'' + w**2 * x = 0
i.e. x1 = x, x2 = x1':
x1' = x2
x2' = -x1
"""
def derivs((x1, x2), t):
return x2, -x1
for solver in solvers:
t = arange(0, 3.1415926, 0.005)
sol = odeint(derivs, (0., 1.), t, solver)
x1 = [a[0] for a in sol]
x2 = [a[1] for a in sol]
# the result is x1 = sin(t), x2 = cos(t)
# let's just check the end points for t = pi
assert abs(x1[-1]) < 1e-2
assert abs(x2[-1] - (-1)) < 1e-2
def test_ode2():
"""
Let's solve:
x' - x = 0
i.e. x = exp(x)
"""
def derivs((x), t):
return x
for solver in solvers:
t = arange(0, 1, 1e-3)
sol = odeint(derivs, (1.,), t, solver)
x = [a[0] for a in sol]
# the result is x = exp(t)
# let's just check the end point for t = 1, i.e. x = e
assert abs(x[-1] - 2.718281828) < 1e-2
'''
def test_odefun_rational():
mp.dps = 15
# A rational function
f = lambda t: 1/(1+mpf(t)**2)
g = odefun(lambda x, y: [-2*x*y[0]**2], 0, [f(0)])
assert f(2).ae(g(2)[0])
def test_odefun_sinc_large():
mp.dps = 15
# Sinc function; test for large x
f = sinc
g = odefun(lambda x, y: [(cos(x)-y[0])/x], 1, [f(1)], tol=0.01, degree=5)
assert abs(f(100) - g(100)[0])/f(100) < 0.01
def test_odefun_harmonic():
mp.dps = 15
# Harmonic oscillator
f = odefun(lambda x, y: [-y[1], y[0]], 0, [1, 0])
for x in [0, 1, 2.5, 8, 3.7]: # we go back to 3.7 to check caching
c, s = f(x)
assert c.ae(cos(x))
assert s.ae(sin(x))
| bsd-3-clause |
theflofly/tensorflow | tensorflow/examples/speech_commands/train.py | 4 | 17458 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Simple speech recognition to spot a limited number of keywords.
This is a self-contained example script that will train a very basic audio
recognition model in TensorFlow. It downloads the necessary training data and
runs with reasonable defaults to train within a few hours even only using a CPU.
For more information, please see
https://www.tensorflow.org/tutorials/audio_recognition.
It is intended as an introduction to using neural networks for audio
recognition, and is not a full speech recognition system. For more advanced
speech systems, I recommend looking into Kaldi. This network uses a keyword
detection style to spot discrete words from a small vocabulary, consisting of
"yes", "no", "up", "down", "left", "right", "on", "off", "stop", and "go".
To run the training process, use:
bazel run tensorflow/examples/speech_commands:train
This will write out checkpoints to /tmp/speech_commands_train/, and will
download over 1GB of open source training data, so you'll need enough free space
and a good internet connection. The default data is a collection of thousands of
one-second .wav files, each containing one spoken word. This data set is
collected from https://aiyprojects.withgoogle.com/open_speech_recording, please
consider contributing to help improve this and other models!
As training progresses, it will print out its accuracy metrics, which should
rise above 90% by the end. Once it's complete, you can run the freeze script to
get a binary GraphDef that you can easily deploy on mobile applications.
If you want to train on your own data, you'll need to create .wavs with your
recordings, all at a consistent length, and then arrange them into subfolders
organized by label. For example, here's a possible file structure:
my_wavs >
up >
audio_0.wav
audio_1.wav
down >
audio_2.wav
audio_3.wav
other>
audio_4.wav
audio_5.wav
You'll also need to tell the script what labels to look for, using the
`--wanted_words` argument. In this case, 'up,down' might be what you want, and
the audio in the 'other' folder would be used to train an 'unknown' category.
To pull this all together, you'd run:
bazel run tensorflow/examples/speech_commands:train -- \
--data_dir=my_wavs --wanted_words=up,down
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os.path
import sys
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
import input_data
import models
from tensorflow.python.platform import gfile
FLAGS = None
def main(_):
# We want to see all the logging messages for this tutorial.
tf.logging.set_verbosity(tf.logging.INFO)
# Start a new TensorFlow session.
sess = tf.InteractiveSession()
# Begin by making sure we have the training data we need. If you already have
# training data of your own, use `--data_url= ` on the command line to avoid
# downloading.
model_settings = models.prepare_model_settings(
len(input_data.prepare_words_list(FLAGS.wanted_words.split(','))),
FLAGS.sample_rate, FLAGS.clip_duration_ms, FLAGS.window_size_ms,
FLAGS.window_stride_ms, FLAGS.feature_bin_count, FLAGS.preprocess)
audio_processor = input_data.AudioProcessor(
FLAGS.data_url, FLAGS.data_dir,
FLAGS.silence_percentage, FLAGS.unknown_percentage,
FLAGS.wanted_words.split(','), FLAGS.validation_percentage,
FLAGS.testing_percentage, model_settings, FLAGS.summaries_dir)
fingerprint_size = model_settings['fingerprint_size']
label_count = model_settings['label_count']
time_shift_samples = int((FLAGS.time_shift_ms * FLAGS.sample_rate) / 1000)
# Figure out the learning rates for each training phase. Since it's often
# effective to have high learning rates at the start of training, followed by
# lower levels towards the end, the number of steps and learning rates can be
# specified as comma-separated lists to define the rate at each stage. For
# example --how_many_training_steps=10000,3000 --learning_rate=0.001,0.0001
# will run 13,000 training loops in total, with a rate of 0.001 for the first
# 10,000, and 0.0001 for the final 3,000.
training_steps_list = list(map(int, FLAGS.how_many_training_steps.split(',')))
learning_rates_list = list(map(float, FLAGS.learning_rate.split(',')))
if len(training_steps_list) != len(learning_rates_list):
raise Exception(
'--how_many_training_steps and --learning_rate must be equal length '
'lists, but are %d and %d long instead' % (len(training_steps_list),
len(learning_rates_list)))
input_placeholder = tf.placeholder(
tf.float32, [None, fingerprint_size], name='fingerprint_input')
if FLAGS.quantize:
fingerprint_min, fingerprint_max = input_data.get_features_range(
model_settings)
fingerprint_input = tf.fake_quant_with_min_max_args(
input_placeholder, fingerprint_min, fingerprint_max)
else:
fingerprint_input = input_placeholder
logits, dropout_prob = models.create_model(
fingerprint_input,
model_settings,
FLAGS.model_architecture,
is_training=True)
# Define loss and optimizer
ground_truth_input = tf.placeholder(
tf.int64, [None], name='groundtruth_input')
# Optionally we can add runtime checks to spot when NaNs or other symptoms of
# numerical errors start occurring during training.
control_dependencies = []
if FLAGS.check_nans:
checks = tf.add_check_numerics_ops()
control_dependencies = [checks]
# Create the back propagation and training evaluation machinery in the graph.
with tf.name_scope('cross_entropy'):
cross_entropy_mean = tf.losses.sparse_softmax_cross_entropy(
labels=ground_truth_input, logits=logits)
if FLAGS.quantize:
tf.contrib.quantize.create_training_graph(quant_delay=0)
with tf.name_scope('train'), tf.control_dependencies(control_dependencies):
learning_rate_input = tf.placeholder(
tf.float32, [], name='learning_rate_input')
train_step = tf.train.GradientDescentOptimizer(
learning_rate_input).minimize(cross_entropy_mean)
predicted_indices = tf.argmax(logits, 1)
correct_prediction = tf.equal(predicted_indices, ground_truth_input)
confusion_matrix = tf.confusion_matrix(
ground_truth_input, predicted_indices, num_classes=label_count)
evaluation_step = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
with tf.get_default_graph().name_scope('eval'):
tf.summary.scalar('cross_entropy', cross_entropy_mean)
tf.summary.scalar('accuracy', evaluation_step)
global_step = tf.train.get_or_create_global_step()
increment_global_step = tf.assign(global_step, global_step + 1)
saver = tf.train.Saver(tf.global_variables())
# Merge all the summaries and write them out to /tmp/retrain_logs (by default)
merged_summaries = tf.summary.merge_all(scope='eval')
train_writer = tf.summary.FileWriter(FLAGS.summaries_dir + '/train',
sess.graph)
validation_writer = tf.summary.FileWriter(FLAGS.summaries_dir + '/validation')
tf.global_variables_initializer().run()
start_step = 1
if FLAGS.start_checkpoint:
models.load_variables_from_checkpoint(sess, FLAGS.start_checkpoint)
start_step = global_step.eval(session=sess)
tf.logging.info('Training from step: %d ', start_step)
# Save graph.pbtxt.
tf.train.write_graph(sess.graph_def, FLAGS.train_dir,
FLAGS.model_architecture + '.pbtxt')
# Save list of words.
with gfile.GFile(
os.path.join(FLAGS.train_dir, FLAGS.model_architecture + '_labels.txt'),
'w') as f:
f.write('\n'.join(audio_processor.words_list))
# Training loop.
training_steps_max = np.sum(training_steps_list)
for training_step in xrange(start_step, training_steps_max + 1):
# Figure out what the current learning rate is.
training_steps_sum = 0
for i in range(len(training_steps_list)):
training_steps_sum += training_steps_list[i]
if training_step <= training_steps_sum:
learning_rate_value = learning_rates_list[i]
break
# Pull the audio samples we'll use for training.
train_fingerprints, train_ground_truth = audio_processor.get_data(
FLAGS.batch_size, 0, model_settings, FLAGS.background_frequency,
FLAGS.background_volume, time_shift_samples, 'training', sess)
# Run the graph with this batch of training data.
train_summary, train_accuracy, cross_entropy_value, _, _ = sess.run(
[
merged_summaries,
evaluation_step,
cross_entropy_mean,
train_step,
increment_global_step,
],
feed_dict={
fingerprint_input: train_fingerprints,
ground_truth_input: train_ground_truth,
learning_rate_input: learning_rate_value,
dropout_prob: 0.5
})
train_writer.add_summary(train_summary, training_step)
tf.logging.info('Step #%d: rate %f, accuracy %.1f%%, cross entropy %f' %
(training_step, learning_rate_value, train_accuracy * 100,
cross_entropy_value))
is_last_step = (training_step == training_steps_max)
if (training_step % FLAGS.eval_step_interval) == 0 or is_last_step:
set_size = audio_processor.set_size('validation')
total_accuracy = 0
total_conf_matrix = None
for i in xrange(0, set_size, FLAGS.batch_size):
validation_fingerprints, validation_ground_truth = (
audio_processor.get_data(FLAGS.batch_size, i, model_settings, 0.0,
0.0, 0, 'validation', sess))
# Run a validation step and capture training summaries for TensorBoard
# with the `merged` op.
validation_summary, validation_accuracy, conf_matrix = sess.run(
[merged_summaries, evaluation_step, confusion_matrix],
feed_dict={
fingerprint_input: validation_fingerprints,
ground_truth_input: validation_ground_truth,
dropout_prob: 1.0
})
validation_writer.add_summary(validation_summary, training_step)
batch_size = min(FLAGS.batch_size, set_size - i)
total_accuracy += (validation_accuracy * batch_size) / set_size
if total_conf_matrix is None:
total_conf_matrix = conf_matrix
else:
total_conf_matrix += conf_matrix
tf.logging.info('Confusion Matrix:\n %s' % (total_conf_matrix))
tf.logging.info('Step %d: Validation accuracy = %.1f%% (N=%d)' %
(training_step, total_accuracy * 100, set_size))
# Save the model checkpoint periodically.
if (training_step % FLAGS.save_step_interval == 0 or
training_step == training_steps_max):
checkpoint_path = os.path.join(FLAGS.train_dir,
FLAGS.model_architecture + '.ckpt')
tf.logging.info('Saving to "%s-%d"', checkpoint_path, training_step)
saver.save(sess, checkpoint_path, global_step=training_step)
set_size = audio_processor.set_size('testing')
tf.logging.info('set_size=%d', set_size)
total_accuracy = 0
total_conf_matrix = None
for i in xrange(0, set_size, FLAGS.batch_size):
test_fingerprints, test_ground_truth = audio_processor.get_data(
FLAGS.batch_size, i, model_settings, 0.0, 0.0, 0, 'testing', sess)
test_accuracy, conf_matrix = sess.run(
[evaluation_step, confusion_matrix],
feed_dict={
fingerprint_input: test_fingerprints,
ground_truth_input: test_ground_truth,
dropout_prob: 1.0
})
batch_size = min(FLAGS.batch_size, set_size - i)
total_accuracy += (test_accuracy * batch_size) / set_size
if total_conf_matrix is None:
total_conf_matrix = conf_matrix
else:
total_conf_matrix += conf_matrix
tf.logging.info('Confusion Matrix:\n %s' % (total_conf_matrix))
tf.logging.info('Final test accuracy = %.1f%% (N=%d)' % (total_accuracy * 100,
set_size))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--data_url',
type=str,
# pylint: disable=line-too-long
default='http://download.tensorflow.org/data/speech_commands_v0.02.tar.gz',
# pylint: enable=line-too-long
help='Location of speech training data archive on the web.')
parser.add_argument(
'--data_dir',
type=str,
default='/tmp/speech_dataset/',
help="""\
Where to download the speech training data to.
""")
parser.add_argument(
'--background_volume',
type=float,
default=0.1,
help="""\
How loud the background noise should be, between 0 and 1.
""")
parser.add_argument(
'--background_frequency',
type=float,
default=0.8,
help="""\
How many of the training samples have background noise mixed in.
""")
parser.add_argument(
'--silence_percentage',
type=float,
default=10.0,
help="""\
How much of the training data should be silence.
""")
parser.add_argument(
'--unknown_percentage',
type=float,
default=10.0,
help="""\
How much of the training data should be unknown words.
""")
parser.add_argument(
'--time_shift_ms',
type=float,
default=100.0,
help="""\
Range to randomly shift the training audio by in time.
""")
parser.add_argument(
'--testing_percentage',
type=int,
default=10,
help='What percentage of wavs to use as a test set.')
parser.add_argument(
'--validation_percentage',
type=int,
default=10,
help='What percentage of wavs to use as a validation set.')
parser.add_argument(
'--sample_rate',
type=int,
default=16000,
help='Expected sample rate of the wavs',)
parser.add_argument(
'--clip_duration_ms',
type=int,
default=1000,
help='Expected duration in milliseconds of the wavs',)
parser.add_argument(
'--window_size_ms',
type=float,
default=30.0,
help='How long each spectrogram timeslice is.',)
parser.add_argument(
'--window_stride_ms',
type=float,
default=10.0,
help='How far to move in time between spectogram timeslices.',)
parser.add_argument(
'--feature_bin_count',
type=int,
default=40,
help='How many bins to use for the MFCC fingerprint',
)
parser.add_argument(
'--how_many_training_steps',
type=str,
default='15000,3000',
help='How many training loops to run',)
parser.add_argument(
'--eval_step_interval',
type=int,
default=400,
help='How often to evaluate the training results.')
parser.add_argument(
'--learning_rate',
type=str,
default='0.001,0.0001',
help='How large a learning rate to use when training.')
parser.add_argument(
'--batch_size',
type=int,
default=100,
help='How many items to train with at once',)
parser.add_argument(
'--summaries_dir',
type=str,
default='/tmp/retrain_logs',
help='Where to save summary logs for TensorBoard.')
parser.add_argument(
'--wanted_words',
type=str,
default='yes,no,up,down,left,right,on,off,stop,go',
help='Words to use (others will be added to an unknown label)',)
parser.add_argument(
'--train_dir',
type=str,
default='/tmp/speech_commands_train',
help='Directory to write event logs and checkpoint.')
parser.add_argument(
'--save_step_interval',
type=int,
default=100,
help='Save model checkpoint every save_steps.')
parser.add_argument(
'--start_checkpoint',
type=str,
default='',
help='If specified, restore this pretrained model before any training.')
parser.add_argument(
'--model_architecture',
type=str,
default='conv',
help='What model architecture to use')
parser.add_argument(
'--check_nans',
type=bool,
default=False,
help='Whether to check for invalid numbers during processing')
parser.add_argument(
'--quantize',
type=bool,
default=False,
help='Whether to train the model for eight-bit deployment')
parser.add_argument(
'--preprocess',
type=str,
default='mfcc',
help='Spectrogram processing mode. Can be "mfcc", "average", or "micro"')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| apache-2.0 |
d40223223/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/VFS_import.py | 738 | 3059 | import os
from browser import doc
#_scripts=doc.createElement('script')
#_scripts.src="/src/py_VFS.js"
#_scripts.type="text/javascript"
#doc.get(tag='head')[0].appendChild(_scripts)
VFS=dict(JSObject(__BRYTHON__.py_VFS))
class VFSModuleFinder:
def __init__(self, path_entry):
print("in VFSModuleFinder")
if path_entry.startswith('/libs') or path_entry.startswith('/Lib'):
self.path_entry=path_entry
else:
raise ImportError()
def __str__(self):
return '<%s for "%s">' % (self.__class__.__name__, self.path_entry)
def find_module(self, fullname, path=None):
path = path or self.path_entry
#print('looking for "%s" in %s ...' % (fullname, path))
for _ext in ['js', 'pyj', 'py']:
_filepath=os.path.join(self.path_entry, '%s.%s' % (fullname, _ext))
if _filepath in VFS:
print("module found at %s:%s" % (_filepath, fullname))
return VFSModuleLoader(_filepath, fullname)
print('module %s not found' % fullname)
raise ImportError()
return None
class VFSModuleLoader:
"""Load source for modules"""
def __init__(self, filepath, name):
self._filepath=filepath
self._name=name
def get_source(self):
if self._filepath in VFS:
return JSObject(readFromVFS(self._filepath))
raise ImportError('could not find source for %s' % fullname)
def is_package(self):
return '.' in self._name
def load_module(self):
if self._name in sys.modules:
#print('reusing existing module from previous import of "%s"' % fullname)
mod = sys.modules[self._name]
return mod
_src=self.get_source()
if self._filepath.endswith('.js'):
mod=JSObject(import_js_module(_src, self._filepath, self._name))
elif self._filepath.endswith('.py'):
mod=JSObject(import_py_module(_src, self._filepath, self._name))
elif self._filepath.endswith('.pyj'):
mod=JSObject(import_pyj_module(_src, self._filepath, self._name))
else:
raise ImportError('Invalid Module: %s' % self._filepath)
# Set a few properties required by PEP 302
mod.__file__ = self._filepath
mod.__name__ = self._name
mod.__path__ = os.path.abspath(self._filepath)
mod.__loader__ = self
mod.__package__ = '.'.join(self._name.split('.')[:-1])
if self.is_package():
print('adding path for package')
# Set __path__ for packages
# so we can find the sub-modules.
mod.__path__ = [ self.path_entry ]
else:
print('imported as regular module')
print('creating a new module object for "%s"' % self._name)
sys.modules.setdefault(self._name, mod)
JSObject(__BRYTHON__.imported)[self._name]=mod
return mod
JSObject(__BRYTHON__.path_hooks.insert(0, VFSModuleFinder))
| gpl-3.0 |
vicky2135/lucious | oscar/lib/python2.7/site-packages/django/forms/utils.py | 241 | 6131 | from __future__ import unicode_literals
import json
import sys
from django.conf import settings
from django.core.exceptions import ValidationError # backwards compatibility
from django.utils import six, timezone
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.html import escape, format_html, format_html_join, html_safe
from django.utils.translation import ugettext_lazy as _
try:
from collections import UserList
except ImportError: # Python 2
from UserList import UserList
def pretty_name(name):
"""Converts 'first_name' to 'First name'"""
if not name:
return ''
return name.replace('_', ' ').capitalize()
def flatatt(attrs):
"""
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. In the case of a boolean value, the key will appear
without a value. It is assumed that the keys do not need to be
XML-escaped. If the passed dictionary is empty, then return an empty
string.
The result is passed through 'mark_safe' (by way of 'format_html_join').
"""
key_value_attrs = []
boolean_attrs = []
for attr, value in attrs.items():
if isinstance(value, bool):
if value:
boolean_attrs.append((attr,))
else:
key_value_attrs.append((attr, value))
return (
format_html_join('', ' {}="{}"', sorted(key_value_attrs)) +
format_html_join('', ' {}', sorted(boolean_attrs))
)
@html_safe
@python_2_unicode_compatible
class ErrorDict(dict):
"""
A collection of errors that knows how to display itself in various formats.
The dictionary keys are the field names, and the values are the errors.
"""
def as_data(self):
return {f: e.as_data() for f, e in self.items()}
def as_json(self, escape_html=False):
return json.dumps({f: e.get_json_data(escape_html) for f, e in self.items()})
def as_ul(self):
if not self:
return ''
return format_html(
'<ul class="errorlist">{}</ul>',
format_html_join('', '<li>{}{}</li>', ((k, force_text(v)) for k, v in self.items()))
)
def as_text(self):
output = []
for field, errors in self.items():
output.append('* %s' % field)
output.append('\n'.join(' * %s' % e for e in errors))
return '\n'.join(output)
def __str__(self):
return self.as_ul()
@html_safe
@python_2_unicode_compatible
class ErrorList(UserList, list):
"""
A collection of errors that knows how to display itself in various formats.
"""
def __init__(self, initlist=None, error_class=None):
super(ErrorList, self).__init__(initlist)
if error_class is None:
self.error_class = 'errorlist'
else:
self.error_class = 'errorlist {}'.format(error_class)
def as_data(self):
return ValidationError(self.data).error_list
def get_json_data(self, escape_html=False):
errors = []
for error in self.as_data():
message = list(error)[0]
errors.append({
'message': escape(message) if escape_html else message,
'code': error.code or '',
})
return errors
def as_json(self, escape_html=False):
return json.dumps(self.get_json_data(escape_html))
def as_ul(self):
if not self.data:
return ''
return format_html(
'<ul class="{}">{}</ul>',
self.error_class,
format_html_join('', '<li>{}</li>', ((force_text(e),) for e in self))
)
def as_text(self):
return '\n'.join('* %s' % e for e in self)
def __str__(self):
return self.as_ul()
def __repr__(self):
return repr(list(self))
def __contains__(self, item):
return item in list(self)
def __eq__(self, other):
return list(self) == other
def __ne__(self, other):
return list(self) != other
def __getitem__(self, i):
error = self.data[i]
if isinstance(error, ValidationError):
return list(error)[0]
return force_text(error)
def __reduce_ex__(self, *args, **kwargs):
# The `list` reduce function returns an iterator as the fourth element
# that is normally used for repopulating. Since we only inherit from
# `list` for `isinstance` backward compatibility (Refs #17413) we
# nullify this iterator as it would otherwise result in duplicate
# entries. (Refs #23594)
info = super(UserList, self).__reduce_ex__(*args, **kwargs)
return info[:3] + (None, None)
# Utilities for time zone support in DateTimeField et al.
def from_current_timezone(value):
"""
When time zone support is enabled, convert naive datetimes
entered in the current time zone to aware datetimes.
"""
if settings.USE_TZ and value is not None and timezone.is_naive(value):
current_timezone = timezone.get_current_timezone()
try:
return timezone.make_aware(value, current_timezone)
except Exception:
message = _(
'%(datetime)s couldn\'t be interpreted '
'in time zone %(current_timezone)s; it '
'may be ambiguous or it may not exist.'
)
params = {'datetime': value, 'current_timezone': current_timezone}
six.reraise(ValidationError, ValidationError(
message,
code='ambiguous_timezone',
params=params,
), sys.exc_info()[2])
return value
def to_current_timezone(value):
"""
When time zone support is enabled, convert aware datetimes
to naive datetimes in the current time zone for display.
"""
if settings.USE_TZ and value is not None and timezone.is_aware(value):
current_timezone = timezone.get_current_timezone()
return timezone.make_naive(value, current_timezone)
return value
| bsd-3-clause |
meganlkm/faker | faker/providers/person/fi_FI/__init__.py | 19 | 14403 | # coding=utf-8
from __future__ import unicode_literals
from .. import Provider as PersonProvider
class Provider(PersonProvider):
formats = (
'{{first_name}} {{last_name}}',
'{{first_name}} {{last_name}}',
'{{last_name}}, {{first_name}}'
)
prefixes = (
'Herra', 'hra', 'Rouva', 'rva', 'Tohtori', 'tri', 'prof.', 'arkkit.'
)
suffixes = ('DI', 'PhD', 'MSc', 'BSc')
first_names = (
'Aake', 'Aapeli', 'Aapo', 'Aarne', 'Aarni', 'Aarno', 'Aaro', 'Aaron',
'Aarre', 'Aatami', 'Aatos', 'Aatto', 'Aatu', 'Ahti', 'Aimo', 'Aki',
'Aksel', 'Akseli', 'Aku', 'Alarik', 'Aleksanteri', 'Aleksi', 'Aleksis',
'Ali', 'Allan', 'Alpo', 'Altti', 'Alvar', 'Alvari', 'Anselmi', 'Anssi',
'Antero', 'Anton', 'Antti', 'Antton', 'Anttoni', 'Ari', 'Ari-Pekka',
'Armas', 'Arto', 'Arttu', 'Arttur', 'Artturi', 'Arvi', 'Arvo', 'Asko',
'Aslak', 'Asser', 'Asseri', 'Atte', 'Aukusti', 'Aulis', 'Auvo','Benjam',
'Benjamin', 'Daniel', 'Eeli', 'Eelis', 'Eemeli', 'Eemil', 'Eerik',
'Eerikki', 'Eero', 'Eetu', 'Eevert', 'Eevertti', 'Einari', 'Eino',
'Elias', 'Eliel', 'Eljas', 'Elmeri', 'Elmo', 'Ensio', 'Erkki', 'Erno',
'Esa', 'Esaias', 'Esko', 'Hannes', 'Hannu', 'Harri', 'Harry', 'Heikki',
'Heimo', 'Heino','Hemmo', 'Henri', 'Henrik', 'Henrikki', 'Herman',
'Hermanni', 'Huugo', 'Iikka', 'Iiro', 'Iisak', 'Iisakki', 'Iivari',
'Ilari', 'Ilkka', 'Ilmari', 'Ilmo', 'Ilpo', 'Immanuel', 'Into', 'Ismo',
'Isto', 'Jaakko', 'Jalmar', 'Jalmari', 'Jalo', 'Jami', 'Jani', 'Janne',
'Jari', 'Jari-Pekka', 'Jarkko', 'Jarmo', 'Jarno', 'Jasper', 'Jere',
'Jeremias', 'Jesse', 'Jimi', 'Joel', 'Johannes', 'Joni', 'Jonne',
'Joona', 'Joonas', 'Joonatan', 'Jooseppi', 'Jori', 'Jorma', 'Jouko',
'Jouni', 'Juha', 'Juha-Matti', 'Juha-Pekka', 'Juhana', 'Juhani', 'Juho',
'Jukka', 'Jukka-Pekka', 'Julius', 'Jussi', 'Juuso', 'Jyri', 'Jyrki',
'Kaapo', 'Kaarle', 'Kaarlo', 'Kai', 'Kaino', 'Kalervo', 'Kaleva',
'Kalevi', 'Kalle', 'Kari', 'Karri', 'Kasper', 'Kasperi', 'Kauko',
'Kauno', 'Keijo', 'Kimi', 'Kimmo', 'Klaus', 'Konsta', 'Konstantin',
'Kosti', 'Kristian', 'Kullervo', 'Kustaa', 'Kusti', 'Kyösti', 'Lari',
'Lasse', 'Lassi', 'Lauri', 'Leevi', 'Lenni', 'Leo', 'Luka', 'Luukas',
'Manu', 'Markku', 'Marko', 'Markus', 'Martti', 'Matias', 'Matti',
'Mauno', 'Maunu', 'Mauri', 'Miika', 'Miikka', 'Mika', 'Mikael', 'Mikko',
'Miko', 'Miro', 'Miska', 'Nestor', 'Nestori', 'Niilo', 'Niklas', 'Niko',
'Nikolai', 'Nuutti', 'Oiva', 'Olavi', 'Oliver', 'Olli', 'Olli-Pekka',
'Onni', 'Orvo', 'Oskar', 'Oskari', 'Osmo', 'Ossi', 'Ossian', 'Otso',
'Otto', 'Paavali', 'Paavo', 'Panu', 'Pasi', 'Paul', 'Pauli', 'Pekka',
'Pellervo', 'Pentti', 'Pertti', 'Perttu', 'Petri', 'Petter', 'Petteri',
'Pietari', 'Pyry', 'Päiviö', 'Rafael', 'Raimo', 'Raine', 'Rainer',
'Rami', 'Rasmus', 'Rauli', 'Rauni', 'Rauno', 'Reijo', 'Reima', 'Reino',
'Riku', 'Risto', 'Roni', 'Sakari', 'Saku', 'Salomon', 'Sami', 'Sampo',
'Sampsa', 'Samu', 'Samuel', 'Samuli', 'Santeri', 'Santtu', 'Sauli',
'Sebastian', 'Seppo', 'Severi', 'Simo', 'Soini', 'Sulevi', 'Sulo',
'Taavetti', 'Taavi', 'Taisto', 'Taito', 'Taneli', 'Tapani', 'Tapio',
'Tarmo', 'Tatu', 'Tauno', 'Teemu', 'Teijo', 'Tenho', 'Teppo',
'Terho', 'Tero', 'Teuvo', 'Timo', 'Tino', 'Toimi', 'Toivo', 'Tomi',
'Tommi', 'Toni', 'Topi', 'Topias', 'Torsti', 'Touko', 'Tuomas', 'Tuomo',
'Tuukka', 'Tuure', 'Ukko', 'Uljas', 'Untamo', 'Unto', 'Uolevi', 'Urho',
'Urpo', 'Usko', 'Uuno', 'Valde', 'Valdemar', 'Valentin', 'Valto',
'Valtteri', 'Waltteri', 'Veeti', 'Veijo', 'Veikka', 'Veikko', 'Veli',
'Veli-Matti', 'Veli-Pekka', 'Verner', 'Verneri', 'Vesa', 'Vieno',
'Vilhelmi', 'Vilho', 'Vili', 'Viljam', 'Viljami', 'Viljo', 'Ville',
'Voitto', 'Väinämö', 'Väinö', 'Ylermi', 'Yrjö', 'Aada', 'Aallotar',
'Aija', 'Aila', 'Aili', 'Aina', 'Aini', 'Aino', 'Aira', 'Airi',
'Aleksandra', 'Aliina', 'Aliisa', 'Alina', 'Alisa', 'Alli', 'Alma',
'Amalia', 'Amanda', 'Anastasia', 'Anelma', 'Anette', 'Anita', 'Anitta',
'Anja', 'Anna', 'Anna-Kaisa', 'Anna-Leena', 'Anna-Liisa', 'Anna-Maija',
'Anna-Mari', 'Anna-Maria', 'Anne', 'Anne-Mari', 'Anne-Maria', 'Annele',
'Anneli', 'Anni', 'Anniina', 'Annika', 'Annikki', 'Annukka', 'Anu',
'Arja', 'Armi', 'Asta', 'Auli', 'Aulikki', 'Aune', 'Aura', 'Aurora',
'Bertta', 'Eedit', 'Eelin', 'Eerika', 'Eeva', 'Eeva-Liisa', 'Eevi',
'Eija', 'Eila', 'Eine', 'Eini', 'Eira', 'Elena', 'Eliina', 'Eliisa',
'Eliisabet', 'Elina', 'Elisa', 'Elisabet', 'Elise', 'Ella', 'Ellen',
'Elli', 'Elma', 'Elna', 'Elsa', 'Else', 'Elsi', 'Elvi', 'Elviira',
'Emilia', 'Emma', 'Emmi', 'Enni', 'Eriika', 'Erja', 'Essi',
'Ester', 'Esteri', 'Eveliina', 'Fanni', 'Hanna', 'Hanna-Leena',
'Hanna-Mari', 'Hanne', 'Hannele', 'Heidi', 'Heini', 'Heleena', 'Helena',
'Heli', 'Helinä', 'Heljä', 'Helka', 'Hellevi', 'Helli', 'Hellin',
'Hellä', 'Helmi', 'Helmiina', 'Helvi', 'Hely', 'Henna', 'Henrietta',
'Henriikka', 'Hertta', 'Heta', 'Hilda', 'Hilja', 'Hilkka', 'Hilla',
'Hillevi', 'Hilma', 'Iida', 'Iina', 'Iines', 'Iiris', 'Ilma', 'Ilmi',
'Ilona', 'Ilta', 'Impi', 'Inari', 'Inka', 'Inkeri', 'Ira', 'Irene',
'Irina', 'Iris', 'Irja', 'Irma', 'Irmeli', 'Isabella', 'Jaana', 'Jade',
'Janette', 'Janika', 'Janina', 'Janita', 'Janna', 'Jasmiina', 'Jasmin',
'Jemina', 'Jenna', 'Jenni', 'Joanna', 'Johanna', 'Jonna', 'Josefiina',
'Julia', 'Justiina', 'Jutta', 'Juulia', 'Kaarin', 'Kaarina', 'Kaija',
'Kaino', 'Kaisa', 'Kaisu', 'Kanerva', 'Karita', 'Karoliina','Katariina',
'Kati', 'Katja', 'Katri', 'Katriina', 'Kerttu', 'Kiia', 'Kirsi',
'Kirsi-Marja', 'Kirsti', 'Kreeta', 'Krista', 'Kristiina', 'Kustaava',
'Kyllikki', 'Lahja', 'Laila', 'Laimi', 'Laina', 'Laura', 'Lea', 'Leea',
'Leena', 'Leila', 'Lemmikki', 'Lemmitty', 'Lempi', 'Liisa', 'Liisi',
'Lilja', 'Lilli', 'Linda', 'Linnea', 'Lotta', 'Loviisa', 'Lumi','Lyydi',
'Lyydia', 'Lyyli', 'Maaret', 'Maaria', 'Maarit', 'Maija', 'Maija-Leena',
'Maija-Liisa', 'Maiju', 'Maila', 'Maire', 'Margareeta', 'Margareetta',
'Mari', 'Maria', 'Marianna', 'Marianne', 'Mariia', 'Mariitta', 'Marika',
'Marita', 'Maritta', 'Marja', 'Marja-Leena', 'Marja-Liisa',
'Marja-Riitta', 'Marja-Terttu', 'Marjaana', 'Marjatta', 'Marjo',
'Marjo-Riitta', 'Marjukka', 'Marjut', 'Marketta', 'Marleena', 'Martta',
'Matilda', 'Matleena', 'Meeri','Meri', 'Merja', 'Mervi', 'Miia','Miina',
'Mikaela', 'Milja', 'Milka', 'Milla','Mimosa', 'Minea', 'Minja','Minna',
'Minttu', 'Mira', 'Mirja', 'Mirjam','Mirjami', 'Mirka', 'Mirva','Moona',
'Naima', 'Natalia', 'Nea', 'Neea', 'Nella', 'Nelli', 'Netta', 'Niina',
'Noora', 'Oili', 'Olivia', 'Onerva', 'Oona', 'Orvokki', 'Outi', 'Paula',
'Pauliina','Peppi', 'Petra', 'Pihla', 'Piia', 'Pilvi', 'Pinja','Pirita',
'Piritta', 'Pirjo', 'Pirkko', 'Pirkko-Liisa', 'Päivi', 'Päivikki',
'Raakel', 'Raija', 'Raila', 'Raili', 'Raisa', 'Rauha', 'Rauni',
'Rebekka', 'Reeta', 'Reetta', 'Reija', 'Riikka', 'Riina', 'Riitta',
'Riitta-Liisa', 'Ritva', 'Ronja', 'Roosa', 'Saana', 'Saara', 'Saija',
'Saila', 'Saima', 'Saimi', 'Saini', 'Salla', 'Salli', 'Salme','Sanelma',
'Sanna', 'Sanni', 'Sara', 'Sari', 'Sarita', 'Satu', 'Seija', 'Selma',
'Senja', 'Senni', 'Siiri', 'Silja', 'Sini', 'Sinikka', 'Sirkka',
'Sirkka-Liisa', 'Sirkku', 'Sirpa', 'Sisko', 'Siviä', 'Sofia', 'Sohvi',
'Soile', 'Soili', 'Soilikki', 'Sointu', 'Sonja', 'Stiina', 'Suoma',
'Susan', 'Susanna','Susanne', 'Suvi', 'Sylvi', 'Sylvia', 'Säde','Taija',
'Taimi', 'Taina', 'Talvikki', 'Tanja', 'Tarja', 'Taru', 'Tea', 'Teija',
'Tekla', 'Tellervo', 'Teresa', 'Terhi', 'Terhikki', 'Terttu', 'Tiia',
'Tiina', 'Tilda', 'Titta', 'Toini', 'Tuija', 'Tuire', 'Tuovi', 'Tuuli',
'Tuulia', 'Tuulikki', 'Tytti', 'Tyyne', 'Tyyni', 'Ulla', 'Ulla-Maija',
'Unelma', 'Ursula', 'Valma', 'Valpuri', 'Vappu', 'Varpu', 'Veera',
'Vellamo', 'Venla', 'Vieno', 'Viivi', 'Vilhelmiina', 'Wilhelmiina',
'Vilja', 'Vilma', 'Wilma', 'Viola', 'Virpi', 'Virva', 'Virve', 'Vuokko'
)
last_names = (
'Aalto', 'Aaltonen', 'Aarnio', 'Ahde', 'Aho', 'Ahopalo', 'Ahokas',
'Ahokangas', 'Ahola', 'Ahonen', 'Ahti', 'Ahtisaari', 'Alanen',
'Alasalmi', 'Alho', 'Annala', 'Anttila', 'Anttonen', 'Arajärvi', 'Aro',
'Aromaa', 'Asikainen', 'Askola', 'Astala', 'Asunmaa', 'Aura', 'Autio',
'Auvinen', 'Eerikäinen', 'Eerola', 'Einiö', 'Ekola', 'Elo', 'Elomaa',
'Eloranta', 'Elsilä', 'Ernamo', 'Erola', 'Eronen', 'Eskelinen','Eskola',
'Haanpää', 'Haapakoski', 'Haapasalo', 'Haataja', 'Hakala', 'Hannula',
'Harju', 'Harjula', 'Hartikainen', 'Hautala', 'Heikkilä', 'Heikkinen',
'Heinonen', 'Heiskanen', 'Helminen', 'Hietanen', 'Hiltunen','Hinkkanen',
'Hintikka', 'Hirsjärvi', 'Hirvelä', 'Hirvonen', 'Holappa', 'Hujanen',
'Huotari', 'Huovinen', 'Huttunen', 'Huusko', 'Huuskonen', 'Hynninen',
'Hyttinen', 'Häkkinen', 'Häkämies', 'Hämäläinen', 'Hänninen',
'Ihalainen', 'Ihamäki', 'Iivonen', 'Ijäs', 'Ikola', 'Ikonen','Ikäheimo',
'Ilmola', 'Ilmonen', 'Ilvonen', 'Immonen', 'Inkinen', 'Innanen',
'Isokangas', 'Isokallio', 'Isokoski', 'Isometsä', 'Isomäki', 'Isotalo',
'Issakainen', 'Itkonen', 'Itälä', 'Jaakkola', 'Jaakonsaari', 'Jaatinen',
'Jalava', 'Jalkanen', 'Jalonen', 'Jantunen', 'Jarva', 'Jokela',
'Jokelainen', 'Jokinen', 'Juhola', 'Jukarainen', 'Jukola', 'Julkunen',
'Juntunen', 'Jussila', 'Jutila', 'Juva', 'Juvonen', 'Jylhä',
'Jäntti', 'Järvi', 'Järvilehto', 'Järvinen', 'Jääskeläinen',
'Kainulainen', 'Kallio', 'Kalliokoski', 'Kangas', 'Karhu','Karjalainen',
'Karonen', 'Karppanen', 'Karppinen', 'Kataisto', 'Kataja', 'Kemppainen',
'Keskinen', 'Keto', 'Ketola', 'Kettunen', 'Kinnunen', 'Kivelä',
'Kiviluoto', 'Kivinen', 'Kivistö', 'Koistinen', 'Koivula', 'Kokkonen',
'Korhonen', 'Korpela', 'Koskinen', 'Kosonen', 'Krouvi' 'Kulmala',
'Kunnas', 'Kuosmanen', 'Kuparinen', 'Kurkela', 'Kurkinen', 'Kuusela',
'Kuusisto', 'Laaksonen', 'Laatikainen', 'Lahtela', 'Lahti', 'Lahtinen',
'Laiho', 'Laine', 'Laitinen', 'Lamminen', 'Lampinen', 'Lankinen',
'Lappalainen', 'Larivaara', 'Lassila', 'Latvanen', 'Laukkanen',
'Laurila', 'Lauronen', 'Lehtinen', 'Lehto', 'Lehtonen', 'Leino',
'Leinonen', 'Lepistö', 'Leppälä', 'Leppänen', 'Leskinen', 'Lesonen',
'Liikanen', 'Lilja', 'Linna', 'Lipponen', 'Litmanen', 'Liukkonen',
'Loponen', 'Luhtanen', 'Lumme', 'Luostarinen', 'Makkonen', 'Manninen',
'Markku', 'Martikainen', 'Matikainen', 'Matinsalo', 'Mattila',
'Meriluoto', 'Miettunen', 'Mieto', 'Mikkola', 'Moilanen', 'Mustonen',
'Muukkonen', 'Myllylä', 'Myllyniemi', 'Mäenpää', 'Mäkelä', 'Mäki',
'Mäkilä', 'Mäkinen', 'Mänttäri', 'Mänty', 'Määttä', 'Naarajärvi',
'Narinen', 'Nenonen', 'Neuvonen', 'Nevala', 'Nevalainen', 'Niemelä',
'Niemi', 'Nieminen', 'Niinisalo', 'Niinistö', 'Niiranen', 'Nikkinen',
'Nikkola', 'Nikkonen', 'Nikula', 'Niskala', 'Nissinen', 'Nousiainen',
'Nummi', 'Numminen', 'Nurmela', 'Nurmi', 'Nurminen', 'Nuutinen',
'Nykänen', 'Närhi', 'Oikarinen', 'Oittinen', 'Ojakangas', 'Ojala',
'Ojalehto', 'Ojanen', 'Oksanen', 'Ollikainen', 'Ollila', 'Onnela',
'Oranen', 'Otila', 'Outila', 'Outinen', 'Ovaska', 'Paananen', 'Paasio',
'Pajari', 'Pale', 'Pakarinen', 'Parras', 'Parviainen', 'Pasanen',
'Pekkanen', 'Pekkala', 'Pelkonen', 'Peltola', 'Peltonen', 'Peltoniemi',
'Peltosaari', 'Pennanen', 'Pesonen', 'Pesälä', 'Petäjä', 'Pirhonen',
'Pirinen', 'Pitkänen', 'Pohjola', 'Pohjonen', 'Pokka', 'Porkka', 'Poso',
'Poutanen', 'Puikkonen', 'Pulkkinen', 'Purho', 'Pyysalo', 'Päätalo',
'Rahikainen', 'Rahkamo', 'Rahnasto', 'Rajala', 'Rannisto', 'Rantala',
'Rantanen', 'Rauhala', 'Rautakorpi', 'Rautanen', 'Rautavaara',
'Rautiainen', 'Rautio', 'Reinikainen', 'Rekunen', 'Repo', 'Repomem',
'Riihelä', 'Riihijärvi', 'Riikonen', 'Rinne', 'Rintala', 'Rissanen',
'Ritala', 'Roimola', 'Roivas', 'Ronni', 'Ruohonen', 'Ruutu', 'Ryttäri',
'Räikkönen', 'Räisänen', 'Räty', 'Saarela', 'Saari', 'Saarikoski',
'Saario', 'Saarinen', 'Saikkonen', 'Sainio', 'Saisio', 'Saksala',
'Salli', 'Sallinen', 'Salmi', 'Salminen', 'Salo', 'Salonen',
'Savolainen', 'Seppälä', 'Seppänen', 'Setälä', 'Sillanpää', 'Sinisalo',
'Sipilä', 'Sipinen', 'Sirkiä', 'Sirviö', 'Soikkeli', 'Soinio',
'Sormunen', 'Suhonen', 'Sulkanen', 'Suntila', 'Suominen', 'Supinen',
'Takala', 'Tammisto', 'Tamminen', 'Tanskanen', 'Taipale', 'Tarkka',
'Tenhunen', 'Tiainen', 'Tiihonen', 'Tiilikainen', 'Tikkanen', 'Timonen',
'Tirkkonen', 'Toivonen', 'Tuomi', 'Tuominen', 'Tuomioja', 'Tuomola',
'Turunen', 'Tuuri', 'Ukkola', 'Ulmanen', 'Uosukainen', 'Uotila',
'Uotinen', 'Uronen', 'Utriainen', 'Utrio', 'Uusitalo', 'Vainikainen',
'Vainio', 'Vanhala', 'Varala', 'Varis', 'Varvikko', 'Vatanen','Veintie',
'Venäläinen', 'Viitala', 'Viitanen', 'Viljanen', 'Vinni', 'Virolainen',
'Virtanen', 'Vitikka', 'Voutilainen', 'Vuolle', 'Vuorela', 'Vuorinen',
'Vuoristo', 'Välkkynen', 'Väänänen', 'Väätäinen', 'Ykspetäjä',
'Ylikangas', 'Ylikoski', 'Ylimäki', 'Ylinen', 'Yliniemi', 'Ylioja',
'Ylisuvanto', 'Ylitalo', 'Ylämäki', 'Ylänne', 'Ylätalo', 'Ylönen',
'Ylöstalo', 'Yrjälä', 'Yrjänä', 'Yrjänäinen', 'Yrjölä', 'Yrjönen'
)
| mit |
teslaji/homebase | venv/HomeBase/lib/python3.5/site-packages/django/db/migrations/autodetector.py | 41 | 56981 | from __future__ import unicode_literals
import functools
import re
from itertools import chain
from django.conf import settings
from django.db import models
from django.db.migrations import operations
from django.db.migrations.migration import Migration
from django.db.migrations.operations.models import AlterModelOptions
from django.db.migrations.optimizer import MigrationOptimizer
from django.db.migrations.questioner import MigrationQuestioner
from django.db.migrations.utils import (
COMPILED_REGEX_TYPE, RegexObject, get_migration_name_timestamp,
)
from django.utils import six
from .topological_sort import stable_topological_sort
class MigrationAutodetector(object):
"""
Takes a pair of ProjectStates, and compares them to see what the
first would need doing to make it match the second (the second
usually being the project's current state).
Note that this naturally operates on entire projects at a time,
as it's likely that changes interact (for example, you can't
add a ForeignKey without having a migration to add the table it
depends on first). A user interface may offer single-app usage
if it wishes, with the caveat that it may not always be possible.
"""
def __init__(self, from_state, to_state, questioner=None):
self.from_state = from_state
self.to_state = to_state
self.questioner = questioner or MigrationQuestioner()
self.existing_apps = {app for app, model in from_state.models}
def changes(self, graph, trim_to_apps=None, convert_apps=None, migration_name=None):
"""
Main entry point to produce a list of applicable changes.
Takes a graph to base names on and an optional set of apps
to try and restrict to (restriction is not guaranteed)
"""
changes = self._detect_changes(convert_apps, graph)
changes = self.arrange_for_graph(changes, graph, migration_name)
if trim_to_apps:
changes = self._trim_to_apps(changes, trim_to_apps)
return changes
def deep_deconstruct(self, obj):
"""
Recursive deconstruction for a field and its arguments.
Used for full comparison for rename/alter; sometimes a single-level
deconstruction will not compare correctly.
"""
if isinstance(obj, list):
return [self.deep_deconstruct(value) for value in obj]
elif isinstance(obj, tuple):
return tuple(self.deep_deconstruct(value) for value in obj)
elif isinstance(obj, dict):
return {
key: self.deep_deconstruct(value)
for key, value in obj.items()
}
elif isinstance(obj, functools.partial):
return (obj.func, self.deep_deconstruct(obj.args), self.deep_deconstruct(obj.keywords))
elif isinstance(obj, COMPILED_REGEX_TYPE):
return RegexObject(obj)
elif isinstance(obj, type):
# If this is a type that implements 'deconstruct' as an instance method,
# avoid treating this as being deconstructible itself - see #22951
return obj
elif hasattr(obj, 'deconstruct'):
deconstructed = obj.deconstruct()
if isinstance(obj, models.Field):
# we have a field which also returns a name
deconstructed = deconstructed[1:]
path, args, kwargs = deconstructed
return (
path,
[self.deep_deconstruct(value) for value in args],
{
key: self.deep_deconstruct(value)
for key, value in kwargs.items()
},
)
else:
return obj
def only_relation_agnostic_fields(self, fields):
"""
Return a definition of the fields that ignores field names and
what related fields actually relate to.
Used for detecting renames (as, of course, the related fields
change during renames)
"""
fields_def = []
for name, field in sorted(fields):
deconstruction = self.deep_deconstruct(field)
if field.remote_field and field.remote_field.model:
del deconstruction[2]['to']
fields_def.append(deconstruction)
return fields_def
def _detect_changes(self, convert_apps=None, graph=None):
"""
Returns a dict of migration plans which will achieve the
change from from_state to to_state. The dict has app labels
as keys and a list of migrations as values.
The resulting migrations aren't specially named, but the names
do matter for dependencies inside the set.
convert_apps is the list of apps to convert to use migrations
(i.e. to make initial migrations for, in the usual case)
graph is an optional argument that, if provided, can help improve
dependency generation and avoid potential circular dependencies.
"""
# The first phase is generating all the operations for each app
# and gathering them into a big per-app list.
# We'll then go through that list later and order it and split
# into migrations to resolve dependencies caused by M2Ms and FKs.
self.generated_operations = {}
# Prepare some old/new state and model lists, separating
# proxy models and ignoring unmigrated apps.
self.old_apps = self.from_state.concrete_apps
self.new_apps = self.to_state.apps
self.old_model_keys = []
self.old_proxy_keys = []
self.old_unmanaged_keys = []
self.new_model_keys = []
self.new_proxy_keys = []
self.new_unmanaged_keys = []
for al, mn in sorted(self.from_state.models.keys()):
model = self.old_apps.get_model(al, mn)
if not model._meta.managed:
self.old_unmanaged_keys.append((al, mn))
elif al not in self.from_state.real_apps:
if model._meta.proxy:
self.old_proxy_keys.append((al, mn))
else:
self.old_model_keys.append((al, mn))
for al, mn in sorted(self.to_state.models.keys()):
model = self.new_apps.get_model(al, mn)
if not model._meta.managed:
self.new_unmanaged_keys.append((al, mn))
elif (
al not in self.from_state.real_apps or
(convert_apps and al in convert_apps)
):
if model._meta.proxy:
self.new_proxy_keys.append((al, mn))
else:
self.new_model_keys.append((al, mn))
# Renames have to come first
self.generate_renamed_models()
# Prepare lists of fields and generate through model map
self._prepare_field_lists()
self._generate_through_model_map()
# Generate non-rename model operations
self.generate_deleted_models()
self.generate_created_models()
self.generate_deleted_proxies()
self.generate_created_proxies()
self.generate_altered_options()
self.generate_altered_managers()
# Generate field operations
self.generate_renamed_fields()
self.generate_removed_fields()
self.generate_added_fields()
self.generate_altered_fields()
self.generate_altered_unique_together()
self.generate_altered_index_together()
self.generate_altered_db_table()
self.generate_altered_order_with_respect_to()
self._sort_migrations()
self._build_migration_list(graph)
self._optimize_migrations()
return self.migrations
def _prepare_field_lists(self):
"""
Prepare field lists, and prepare a list of the fields that used
through models in the old state so we can make dependencies
from the through model deletion to the field that uses it.
"""
self.kept_model_keys = set(self.old_model_keys).intersection(self.new_model_keys)
self.kept_proxy_keys = set(self.old_proxy_keys).intersection(self.new_proxy_keys)
self.kept_unmanaged_keys = set(self.old_unmanaged_keys).intersection(self.new_unmanaged_keys)
self.through_users = {}
self.old_field_keys = set()
self.new_field_keys = set()
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
self.old_field_keys.update((app_label, model_name, x) for x, y in old_model_state.fields)
self.new_field_keys.update((app_label, model_name, x) for x, y in new_model_state.fields)
def _generate_through_model_map(self):
"""
Through model map generation
"""
for app_label, model_name in sorted(self.old_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
for field_name, field in old_model_state.fields:
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and
not old_field.remote_field.through._meta.auto_created):
through_key = (
old_field.remote_field.through._meta.app_label,
old_field.remote_field.through._meta.model_name,
)
self.through_users[through_key] = (app_label, old_model_name, field_name)
def _build_migration_list(self, graph=None):
"""
We need to chop the lists of operations up into migrations with
dependencies on each other. We do this by stepping up an app's list of
operations until we find one that has an outgoing dependency that isn't
in another app's migration yet (hasn't been chopped off its list). We
then chop off the operations before it into a migration and move onto
the next app. If we loop back around without doing anything, there's a
circular dependency (which _should_ be impossible as the operations are
all split at this point so they can't depend and be depended on).
"""
self.migrations = {}
num_ops = sum(len(x) for x in self.generated_operations.values())
chop_mode = False
while num_ops:
# On every iteration, we step through all the apps and see if there
# is a completed set of operations.
# If we find that a subset of the operations are complete we can
# try to chop it off from the rest and continue, but we only
# do this if we've already been through the list once before
# without any chopping and nothing has changed.
for app_label in sorted(self.generated_operations.keys()):
chopped = []
dependencies = set()
for operation in list(self.generated_operations[app_label]):
deps_satisfied = True
operation_dependencies = set()
for dep in operation._auto_deps:
is_swappable_dep = False
if dep[0] == "__setting__":
# We need to temporarily resolve the swappable dependency to prevent
# circular references. While keeping the dependency checks on the
# resolved model we still add the swappable dependencies.
# See #23322
resolved_app_label, resolved_object_name = getattr(settings, dep[1]).split('.')
original_dep = dep
dep = (resolved_app_label, resolved_object_name.lower(), dep[2], dep[3])
is_swappable_dep = True
if dep[0] != app_label and dep[0] != "__setting__":
# External app dependency. See if it's not yet
# satisfied.
for other_operation in self.generated_operations.get(dep[0], []):
if self.check_dependency(other_operation, dep):
deps_satisfied = False
break
if not deps_satisfied:
break
else:
if is_swappable_dep:
operation_dependencies.add((original_dep[0], original_dep[1]))
elif dep[0] in self.migrations:
operation_dependencies.add((dep[0], self.migrations[dep[0]][-1].name))
else:
# If we can't find the other app, we add a first/last dependency,
# but only if we've already been through once and checked everything
if chop_mode:
# If the app already exists, we add a dependency on the last migration,
# as we don't know which migration contains the target field.
# If it's not yet migrated or has no migrations, we use __first__
if graph and graph.leaf_nodes(dep[0]):
operation_dependencies.add(graph.leaf_nodes(dep[0])[0])
else:
operation_dependencies.add((dep[0], "__first__"))
else:
deps_satisfied = False
if deps_satisfied:
chopped.append(operation)
dependencies.update(operation_dependencies)
self.generated_operations[app_label] = self.generated_operations[app_label][1:]
else:
break
# Make a migration! Well, only if there's stuff to put in it
if dependencies or chopped:
if not self.generated_operations[app_label] or chop_mode:
subclass = type(str("Migration"), (Migration,), {"operations": [], "dependencies": []})
instance = subclass("auto_%i" % (len(self.migrations.get(app_label, [])) + 1), app_label)
instance.dependencies = list(dependencies)
instance.operations = chopped
instance.initial = app_label not in self.existing_apps
self.migrations.setdefault(app_label, []).append(instance)
chop_mode = False
else:
self.generated_operations[app_label] = chopped + self.generated_operations[app_label]
new_num_ops = sum(len(x) for x in self.generated_operations.values())
if new_num_ops == num_ops:
if not chop_mode:
chop_mode = True
else:
raise ValueError("Cannot resolve operation dependencies: %r" % self.generated_operations)
num_ops = new_num_ops
def _sort_migrations(self):
"""
Reorder to make things possible. The order we have already isn't bad,
but we need to pull a few things around so FKs work nicely inside the
same app
"""
for app_label, ops in sorted(self.generated_operations.items()):
# construct a dependency graph for intra-app dependencies
dependency_graph = {op: set() for op in ops}
for op in ops:
for dep in op._auto_deps:
if dep[0] == app_label:
for op2 in ops:
if self.check_dependency(op2, dep):
dependency_graph[op].add(op2)
# we use a stable sort for deterministic tests & general behavior
self.generated_operations[app_label] = stable_topological_sort(ops, dependency_graph)
def _optimize_migrations(self):
# Add in internal dependencies among the migrations
for app_label, migrations in self.migrations.items():
for m1, m2 in zip(migrations, migrations[1:]):
m2.dependencies.append((app_label, m1.name))
# De-dupe dependencies
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.dependencies = list(set(migration.dependencies))
# Optimize migrations
for app_label, migrations in self.migrations.items():
for migration in migrations:
migration.operations = MigrationOptimizer().optimize(migration.operations, app_label=app_label)
def check_dependency(self, operation, dependency):
"""
Returns ``True`` if the given operation depends on the given dependency,
``False`` otherwise.
"""
# Created model
if dependency[2] is None and dependency[3] is True:
return (
isinstance(operation, operations.CreateModel) and
operation.name_lower == dependency[1].lower()
)
# Created field
elif dependency[2] is not None and dependency[3] is True:
return (
(
isinstance(operation, operations.CreateModel) and
operation.name_lower == dependency[1].lower() and
any(dependency[2] == x for x, y in operation.fields)
) or
(
isinstance(operation, operations.AddField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
)
# Removed field
elif dependency[2] is not None and dependency[3] is False:
return (
isinstance(operation, operations.RemoveField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
# Removed model
elif dependency[2] is None and dependency[3] is False:
return (
isinstance(operation, operations.DeleteModel) and
operation.name_lower == dependency[1].lower()
)
# Field being altered
elif dependency[2] is not None and dependency[3] == "alter":
return (
isinstance(operation, operations.AlterField) and
operation.model_name_lower == dependency[1].lower() and
operation.name_lower == dependency[2].lower()
)
# order_with_respect_to being unset for a field
elif dependency[2] is not None and dependency[3] == "order_wrt_unset":
return (
isinstance(operation, operations.AlterOrderWithRespectTo) and
operation.name_lower == dependency[1].lower() and
(operation.order_with_respect_to or "").lower() != dependency[2].lower()
)
# Field is removed and part of an index/unique_together
elif dependency[2] is not None and dependency[3] == "foo_together_change":
return (
isinstance(operation, (operations.AlterUniqueTogether,
operations.AlterIndexTogether)) and
operation.name_lower == dependency[1].lower()
)
# Unknown dependency. Raise an error.
else:
raise ValueError("Can't handle dependency %r" % (dependency, ))
def add_operation(self, app_label, operation, dependencies=None, beginning=False):
# Dependencies are (app_label, model_name, field_name, create/delete as True/False)
operation._auto_deps = dependencies or []
if beginning:
self.generated_operations.setdefault(app_label, []).insert(0, operation)
else:
self.generated_operations.setdefault(app_label, []).append(operation)
def swappable_first_key(self, item):
"""
Sorting key function that places potential swappable models first in
lists of created models (only real way to solve #22783)
"""
try:
model = self.new_apps.get_model(item[0], item[1])
base_names = [base.__name__ for base in model.__bases__]
string_version = "%s.%s" % (item[0], item[1])
if (
model._meta.swappable or
"AbstractUser" in base_names or
"AbstractBaseUser" in base_names or
settings.AUTH_USER_MODEL.lower() == string_version.lower()
):
return ("___" + item[0], "___" + item[1])
except LookupError:
pass
return item
def generate_renamed_models(self):
"""
Finds any renamed models, and generates the operations for them,
and removes the old entry from the model lists.
Must be run before other model-level generation.
"""
self.renamed_models = {}
self.renamed_models_rel = {}
added_models = set(self.new_model_keys) - set(self.old_model_keys)
for app_label, model_name in sorted(added_models):
model_state = self.to_state.models[app_label, model_name]
model_fields_def = self.only_relation_agnostic_fields(model_state.fields)
removed_models = set(self.old_model_keys) - set(self.new_model_keys)
for rem_app_label, rem_model_name in removed_models:
if rem_app_label == app_label:
rem_model_state = self.from_state.models[rem_app_label, rem_model_name]
rem_model_fields_def = self.only_relation_agnostic_fields(rem_model_state.fields)
if model_fields_def == rem_model_fields_def:
if self.questioner.ask_rename_model(rem_model_state, model_state):
self.add_operation(
app_label,
operations.RenameModel(
old_name=rem_model_state.name,
new_name=model_state.name,
)
)
self.renamed_models[app_label, model_name] = rem_model_name
renamed_models_rel_key = '%s.%s' % (rem_model_state.app_label, rem_model_state.name)
self.renamed_models_rel[renamed_models_rel_key] = '%s.%s' % (
model_state.app_label,
model_state.name,
)
self.old_model_keys.remove((rem_app_label, rem_model_name))
self.old_model_keys.append((app_label, model_name))
break
def generate_created_models(self):
"""
Find all new models (both managed and unmanaged) and make create
operations for them as well as separate operations to create any
foreign key or M2M relationships (we'll optimize these back in later
if we can).
We also defer any model options that refer to collections of fields
that might be deferred (e.g. unique_together, index_together).
"""
old_keys = set(self.old_model_keys).union(self.old_unmanaged_keys)
added_models = set(self.new_model_keys) - old_keys
added_unmanaged_models = set(self.new_unmanaged_keys) - old_keys
all_added_models = chain(
sorted(added_models, key=self.swappable_first_key, reverse=True),
sorted(added_unmanaged_models, key=self.swappable_first_key, reverse=True)
)
for app_label, model_name in all_added_models:
model_state = self.to_state.models[app_label, model_name]
model_opts = self.new_apps.get_model(app_label, model_name)._meta
# Gather related fields
related_fields = {}
primary_key_rel = None
for field in model_opts.local_fields:
if field.remote_field:
if field.remote_field.model:
if field.primary_key:
primary_key_rel = field.remote_field.model
elif not field.remote_field.parent_link:
related_fields[field.name] = field
# through will be none on M2Ms on swapped-out models;
# we can treat lack of through as auto_created=True, though.
if (getattr(field.remote_field, "through", None) and
not field.remote_field.through._meta.auto_created):
related_fields[field.name] = field
for field in model_opts.local_many_to_many:
if field.remote_field.model:
related_fields[field.name] = field
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
related_fields[field.name] = field
# Are there unique/index_together to defer?
unique_together = model_state.options.pop('unique_together', None)
index_together = model_state.options.pop('index_together', None)
order_with_respect_to = model_state.options.pop('order_with_respect_to', None)
# Depend on the deletion of any possible proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, six.string_types) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Depend on the other end of the primary key if it's a relation
if primary_key_rel:
dependencies.append((
primary_key_rel._meta.app_label,
primary_key_rel._meta.object_name,
None,
True
))
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[d for d in model_state.fields if d[0] not in related_fields],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
dependencies=dependencies,
beginning=True,
)
# Don't add operations which modify the database for unmanaged models
if not model_opts.managed:
continue
# Generate operations for each related field
for name, field in sorted(related_fields.items()):
dependencies = self._get_dependecies_for_foreign_key(field)
# Depend on our own model being created
dependencies.append((app_label, model_name, None, True))
# Make operation
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=name,
field=field,
),
dependencies=list(set(dependencies)),
)
# Generate other opns
related_dependencies = [
(app_label, model_name, name, True)
for name, field in sorted(related_fields.items())
]
related_dependencies.append((app_label, model_name, None, True))
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=unique_together,
),
dependencies=related_dependencies
)
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=index_together,
),
dependencies=related_dependencies
)
if order_with_respect_to:
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=order_with_respect_to,
),
dependencies=[
(app_label, model_name, order_with_respect_to, True),
(app_label, model_name, None, True),
]
)
def generate_created_proxies(self):
"""
Makes CreateModel statements for proxy models.
We use the same statements as that way there's less code duplication,
but of course for proxy models we can skip all that pointless field
stuff and just chuck out an operation.
"""
added = set(self.new_proxy_keys) - set(self.old_proxy_keys)
for app_label, model_name in sorted(added):
model_state = self.to_state.models[app_label, model_name]
assert model_state.options.get("proxy")
# Depend on the deletion of any possible non-proxy version of us
dependencies = [
(app_label, model_name, None, False),
]
# Depend on all bases
for base in model_state.bases:
if isinstance(base, six.string_types) and "." in base:
base_app_label, base_name = base.split(".", 1)
dependencies.append((base_app_label, base_name, None, True))
# Generate creation operation
self.add_operation(
app_label,
operations.CreateModel(
name=model_state.name,
fields=[],
options=model_state.options,
bases=model_state.bases,
managers=model_state.managers,
),
# Depend on the deletion of any possible non-proxy version of us
dependencies=dependencies,
)
def generate_deleted_models(self):
"""
Find all deleted models (managed and unmanaged) and make delete
operations for them as well as separate operations to delete any
foreign key or M2M relationships (we'll optimize these back in later
if we can).
We also bring forward removal of any model options that refer to
collections of fields - the inverse of generate_created_models().
"""
new_keys = set(self.new_model_keys).union(self.new_unmanaged_keys)
deleted_models = set(self.old_model_keys) - new_keys
deleted_unmanaged_models = set(self.old_unmanaged_keys) - new_keys
all_deleted_models = chain(sorted(deleted_models), sorted(deleted_unmanaged_models))
for app_label, model_name in all_deleted_models:
model_state = self.from_state.models[app_label, model_name]
model = self.old_apps.get_model(app_label, model_name)
if not model._meta.managed:
# Skip here, no need to handle fields for unmanaged models
continue
# Gather related fields
related_fields = {}
for field in model._meta.local_fields:
if field.remote_field:
if field.remote_field.model:
related_fields[field.name] = field
# through will be none on M2Ms on swapped-out models;
# we can treat lack of through as auto_created=True, though.
if (getattr(field.remote_field, "through", None) and
not field.remote_field.through._meta.auto_created):
related_fields[field.name] = field
for field in model._meta.local_many_to_many:
if field.remote_field.model:
related_fields[field.name] = field
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
related_fields[field.name] = field
# Generate option removal first
unique_together = model_state.options.pop('unique_together', None)
index_together = model_state.options.pop('index_together', None)
if unique_together:
self.add_operation(
app_label,
operations.AlterUniqueTogether(
name=model_name,
unique_together=None,
)
)
if index_together:
self.add_operation(
app_label,
operations.AlterIndexTogether(
name=model_name,
index_together=None,
)
)
# Then remove each related field
for name, field in sorted(related_fields.items()):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=name,
)
)
# Finally, remove the model.
# This depends on both the removal/alteration of all incoming fields
# and the removal of all its own related fields, and if it's
# a through model the field that references it.
dependencies = []
for related_object in model._meta.related_objects:
related_object_app_label = related_object.related_model._meta.app_label
object_name = related_object.related_model._meta.object_name
field_name = related_object.field.name
dependencies.append((related_object_app_label, object_name, field_name, False))
if not related_object.many_to_many:
dependencies.append((related_object_app_label, object_name, field_name, "alter"))
for name, field in sorted(related_fields.items()):
dependencies.append((app_label, model_name, name, False))
# We're referenced in another field's through=
through_user = self.through_users.get((app_label, model_state.name_lower))
if through_user:
dependencies.append((through_user[0], through_user[1], through_user[2], False))
# Finally, make the operation, deduping any dependencies
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
dependencies=list(set(dependencies)),
)
def generate_deleted_proxies(self):
"""
Makes DeleteModel statements for proxy models.
"""
deleted = set(self.old_proxy_keys) - set(self.new_proxy_keys)
for app_label, model_name in sorted(deleted):
model_state = self.from_state.models[app_label, model_name]
assert model_state.options.get("proxy")
self.add_operation(
app_label,
operations.DeleteModel(
name=model_state.name,
),
)
def generate_renamed_fields(self):
"""
Works out renamed fields
"""
self.renamed_fields = {}
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
# Scan to see if this is actually a rename!
field_dec = self.deep_deconstruct(field)
for rem_app_label, rem_model_name, rem_field_name in sorted(self.old_field_keys - self.new_field_keys):
if rem_app_label == app_label and rem_model_name == model_name:
old_field_dec = self.deep_deconstruct(old_model_state.get_field_by_name(rem_field_name))
if field.remote_field and field.remote_field.model and 'to' in old_field_dec[2]:
old_rel_to = old_field_dec[2]['to']
if old_rel_to in self.renamed_models_rel:
old_field_dec[2]['to'] = self.renamed_models_rel[old_rel_to]
if old_field_dec == field_dec:
if self.questioner.ask_rename(model_name, rem_field_name, field_name, field):
self.add_operation(
app_label,
operations.RenameField(
model_name=model_name,
old_name=rem_field_name,
new_name=field_name,
)
)
self.old_field_keys.remove((rem_app_label, rem_model_name, rem_field_name))
self.old_field_keys.add((app_label, model_name, field_name))
self.renamed_fields[app_label, model_name, field_name] = rem_field_name
break
def generate_added_fields(self):
"""
Fields that have been added
"""
for app_label, model_name, field_name in sorted(self.new_field_keys - self.old_field_keys):
self._generate_added_field(app_label, model_name, field_name)
def _generate_added_field(self, app_label, model_name, field_name):
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
# Fields that are foreignkeys/m2ms depend on stuff
dependencies = []
if field.remote_field and field.remote_field.model:
dependencies.extend(self._get_dependecies_for_foreign_key(field))
# You can't just add NOT NULL fields with no default or fields
# which don't allow empty strings as default.
preserve_default = True
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
if (not field.null and not field.has_default() and
not field.many_to_many and
not (field.blank and field.empty_strings_allowed) and
not (isinstance(field, time_fields) and field.auto_now)):
field = field.clone()
if isinstance(field, time_fields) and field.auto_now_add:
field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
else:
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
preserve_default = False
self.add_operation(
app_label,
operations.AddField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
),
dependencies=dependencies,
)
def generate_removed_fields(self):
"""
Fields that have been removed.
"""
for app_label, model_name, field_name in sorted(self.old_field_keys - self.new_field_keys):
self._generate_removed_field(app_label, model_name, field_name)
def _generate_removed_field(self, app_label, model_name, field_name):
self.add_operation(
app_label,
operations.RemoveField(
model_name=model_name,
name=field_name,
),
# We might need to depend on the removal of an
# order_with_respect_to or index/unique_together operation;
# this is safely ignored if there isn't one
dependencies=[
(app_label, model_name, field_name, "order_wrt_unset"),
(app_label, model_name, field_name, "foo_together_change"),
],
)
def generate_altered_fields(self):
"""
Fields that have been altered.
"""
for app_label, model_name, field_name in sorted(self.old_field_keys.intersection(self.new_field_keys)):
# Did the field change?
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_field_name = self.renamed_fields.get((app_label, model_name, field_name), field_name)
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(old_field_name)
new_field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
# Implement any model renames on relations; these are handled by RenameModel
# so we need to exclude them from the comparison
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "model", None):
rename_key = (
new_field.remote_field.model._meta.app_label,
new_field.remote_field.model._meta.model_name,
)
if rename_key in self.renamed_models:
new_field.remote_field.model = old_field.remote_field.model
if hasattr(new_field, "remote_field") and getattr(new_field.remote_field, "through", None):
rename_key = (
new_field.remote_field.through._meta.app_label,
new_field.remote_field.through._meta.model_name,
)
if rename_key in self.renamed_models:
new_field.remote_field.through = old_field.remote_field.through
old_field_dec = self.deep_deconstruct(old_field)
new_field_dec = self.deep_deconstruct(new_field)
if old_field_dec != new_field_dec:
both_m2m = old_field.many_to_many and new_field.many_to_many
neither_m2m = not old_field.many_to_many and not new_field.many_to_many
if both_m2m or neither_m2m:
# Either both fields are m2m or neither is
preserve_default = True
if (old_field.null and not new_field.null and not new_field.has_default() and
not new_field.many_to_many):
field = new_field.clone()
new_default = self.questioner.ask_not_null_alteration(field_name, model_name)
if new_default is not models.NOT_PROVIDED:
field.default = new_default
preserve_default = False
else:
field = new_field
self.add_operation(
app_label,
operations.AlterField(
model_name=model_name,
name=field_name,
field=field,
preserve_default=preserve_default,
)
)
else:
# We cannot alter between m2m and concrete fields
self._generate_removed_field(app_label, model_name, field_name)
self._generate_added_field(app_label, model_name, field_name)
def _get_dependecies_for_foreign_key(self, field):
# Account for FKs to swappable models
swappable_setting = getattr(field, 'swappable_setting', None)
if swappable_setting is not None:
dep_app_label = "__setting__"
dep_object_name = swappable_setting
else:
dep_app_label = field.remote_field.model._meta.app_label
dep_object_name = field.remote_field.model._meta.object_name
dependencies = [(dep_app_label, dep_object_name, None, True)]
if getattr(field.remote_field, "through", None) and not field.remote_field.through._meta.auto_created:
dependencies.append((
field.remote_field.through._meta.app_label,
field.remote_field.through._meta.object_name,
None,
True,
))
return dependencies
def _generate_altered_foo_together(self, operation):
option_name = operation.option_name
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
# We run the old version through the field renames to account for those
old_value = old_model_state.options.get(option_name) or set()
if old_value:
old_value = {
tuple(
self.renamed_fields.get((app_label, model_name, n), n)
for n in unique
)
for unique in old_value
}
new_value = new_model_state.options.get(option_name) or set()
if new_value:
new_value = set(new_value)
if old_value != new_value:
dependencies = []
for foo_togethers in new_value:
for field_name in foo_togethers:
field = self.new_apps.get_model(app_label, model_name)._meta.get_field(field_name)
if field.remote_field and field.remote_field.model:
dependencies.extend(self._get_dependecies_for_foreign_key(field))
self.add_operation(
app_label,
operation(
name=model_name,
**{option_name: new_value}
),
dependencies=dependencies,
)
def generate_altered_unique_together(self):
self._generate_altered_foo_together(operations.AlterUniqueTogether)
def generate_altered_index_together(self):
self._generate_altered_foo_together(operations.AlterIndexTogether)
def generate_altered_db_table(self):
models_to_check = self.kept_model_keys.union(self.kept_proxy_keys).union(self.kept_unmanaged_keys)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_db_table_name = old_model_state.options.get('db_table')
new_db_table_name = new_model_state.options.get('db_table')
if old_db_table_name != new_db_table_name:
self.add_operation(
app_label,
operations.AlterModelTable(
name=model_name,
table=new_db_table_name,
)
)
def generate_altered_options(self):
"""
Works out if any non-schema-affecting options have changed and
makes an operation to represent them in state changes (in case Python
code in migrations needs them)
"""
models_to_check = self.kept_model_keys.union(
self.kept_proxy_keys
).union(
self.kept_unmanaged_keys
).union(
# unmanaged converted to managed
set(self.old_unmanaged_keys).intersection(self.new_model_keys)
).union(
# managed converted to unmanaged
set(self.old_model_keys).intersection(self.new_unmanaged_keys)
)
for app_label, model_name in sorted(models_to_check):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
old_options = dict(
option for option in old_model_state.options.items()
if option[0] in AlterModelOptions.ALTER_OPTION_KEYS
)
new_options = dict(
option for option in new_model_state.options.items()
if option[0] in AlterModelOptions.ALTER_OPTION_KEYS
)
if old_options != new_options:
self.add_operation(
app_label,
operations.AlterModelOptions(
name=model_name,
options=new_options,
)
)
def generate_altered_order_with_respect_to(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if (old_model_state.options.get("order_with_respect_to") !=
new_model_state.options.get("order_with_respect_to")):
# Make sure it comes second if we're adding
# (removal dependency is part of RemoveField)
dependencies = []
if new_model_state.options.get("order_with_respect_to"):
dependencies.append((
app_label,
model_name,
new_model_state.options["order_with_respect_to"],
True,
))
# Actually generate the operation
self.add_operation(
app_label,
operations.AlterOrderWithRespectTo(
name=model_name,
order_with_respect_to=new_model_state.options.get('order_with_respect_to'),
),
dependencies=dependencies,
)
def generate_altered_managers(self):
for app_label, model_name in sorted(self.kept_model_keys):
old_model_name = self.renamed_models.get((app_label, model_name), model_name)
old_model_state = self.from_state.models[app_label, old_model_name]
new_model_state = self.to_state.models[app_label, model_name]
if old_model_state.managers != new_model_state.managers:
self.add_operation(
app_label,
operations.AlterModelManagers(
name=model_name,
managers=new_model_state.managers,
)
)
def arrange_for_graph(self, changes, graph, migration_name=None):
"""
Takes in a result from changes() and a MigrationGraph,
and fixes the names and dependencies of the changes so they
extend the graph from the leaf nodes for each app.
"""
leaves = graph.leaf_nodes()
name_map = {}
for app_label, migrations in list(changes.items()):
if not migrations:
continue
# Find the app label's current leaf node
app_leaf = None
for leaf in leaves:
if leaf[0] == app_label:
app_leaf = leaf
break
# Do they want an initial migration for this app?
if app_leaf is None and not self.questioner.ask_initial(app_label):
# They don't.
for migration in migrations:
name_map[(app_label, migration.name)] = (app_label, "__first__")
del changes[app_label]
continue
# Work out the next number in the sequence
if app_leaf is None:
next_number = 1
else:
next_number = (self.parse_number(app_leaf[1]) or 0) + 1
# Name each migration
for i, migration in enumerate(migrations):
if i == 0 and app_leaf:
migration.dependencies.append(app_leaf)
if i == 0 and not app_leaf:
new_name = "0001_%s" % migration_name if migration_name else "0001_initial"
else:
new_name = "%04i_%s" % (
next_number,
migration_name or self.suggest_name(migration.operations)[:100],
)
name_map[(app_label, migration.name)] = (app_label, new_name)
next_number += 1
migration.name = new_name
# Now fix dependencies
for app_label, migrations in changes.items():
for migration in migrations:
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
return changes
def _trim_to_apps(self, changes, app_labels):
"""
Takes changes from arrange_for_graph and set of app labels and
returns a modified set of changes which trims out as many migrations
that are not in app_labels as possible.
Note that some other migrations may still be present, as they may be
required dependencies.
"""
# Gather other app dependencies in a first pass
app_dependencies = {}
for app_label, migrations in changes.items():
for migration in migrations:
for dep_app_label, name in migration.dependencies:
app_dependencies.setdefault(app_label, set()).add(dep_app_label)
required_apps = set(app_labels)
# Keep resolving till there's no change
old_required_apps = None
while old_required_apps != required_apps:
old_required_apps = set(required_apps)
for app_label in list(required_apps):
required_apps.update(app_dependencies.get(app_label, set()))
# Remove all migrations that aren't needed
for app_label in list(changes.keys()):
if app_label not in required_apps:
del changes[app_label]
return changes
@classmethod
def suggest_name(cls, ops):
"""
Given a set of operations, suggests a name for the migration
they might represent. Names are not guaranteed to be unique,
but we put some effort in to the fallback name to avoid VCS conflicts
if we can.
"""
if len(ops) == 1:
if isinstance(ops[0], operations.CreateModel):
return ops[0].name_lower
elif isinstance(ops[0], operations.DeleteModel):
return "delete_%s" % ops[0].name_lower
elif isinstance(ops[0], operations.AddField):
return "%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
elif isinstance(ops[0], operations.RemoveField):
return "remove_%s_%s" % (ops[0].model_name_lower, ops[0].name_lower)
elif len(ops) > 1:
if all(isinstance(o, operations.CreateModel) for o in ops):
return "_".join(sorted(o.name_lower for o in ops))
return "auto_%s" % get_migration_name_timestamp()
@classmethod
def parse_number(cls, name):
"""
Given a migration name, tries to extract a number from the
beginning of it. If no number found, returns None.
"""
match = re.match(r'^\d+', name)
if match:
return int(match.group())
return None
| gpl-3.0 |
SkySkimmer/coq | doc/tools/coqrst/notations/TacticNotationsLexer.py | 6 | 3961 | # Generated from TacticNotations.g by ANTLR 4.7.2
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\f")
buf.write("f\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3\2\3\2\3\2\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\3\5\3!\n\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6")
buf.write("\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3")
buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6")
buf.write("\3\6\5\6F\n\6\3\7\3\7\3\b\3\b\6\bL\n\b\r\b\16\bM\5\bP")
buf.write("\n\b\3\t\3\t\5\tT\n\t\3\t\6\tW\n\t\r\t\16\tX\3\n\3\n\3")
buf.write("\n\6\n^\n\n\r\n\16\n_\3\13\6\13c\n\13\r\13\16\13d\2\2")
buf.write("\f\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\3\2\5")
buf.write("\4\2BBaa\6\2\"\"BBaa}\177\5\2\62;C\\c|\2v\2\3\3\2\2\2")
buf.write("\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r")
buf.write("\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3")
buf.write("\2\2\2\3\27\3\2\2\2\5 \3\2\2\2\7\"\3\2\2\2\t$\3\2\2\2")
buf.write("\13E\3\2\2\2\rG\3\2\2\2\17O\3\2\2\2\21Q\3\2\2\2\23Z\3")
buf.write("\2\2\2\25b\3\2\2\2\27\30\7}\2\2\30\31\7~\2\2\31\4\3\2")
buf.write("\2\2\32\33\7}\2\2\33!\7-\2\2\34\35\7}\2\2\35!\7,\2\2\36")
buf.write("\37\7}\2\2\37!\7A\2\2 \32\3\2\2\2 \34\3\2\2\2 \36\3\2")
buf.write("\2\2!\6\3\2\2\2\"#\7}\2\2#\b\3\2\2\2$%\7\177\2\2%\n\3")
buf.write("\2\2\2&\'\7\'\2\2\'F\7}\2\2()\7\'\2\2)F\7\177\2\2*+\7")
buf.write("\'\2\2+F\7~\2\2,-\7b\2\2-.\7\'\2\2.F\7}\2\2/\60\7B\2\2")
buf.write("\60\61\7\'\2\2\61F\7}\2\2\62\63\7\'\2\2\63\64\7~\2\2\64")
buf.write("F\7/\2\2\65\66\7\'\2\2\66\67\7~\2\2\678\7/\2\28F\7@\2")
buf.write("\29:\7\'\2\2:;\7~\2\2;F\7~\2\2<=\7\'\2\2=>\7~\2\2>?\7")
buf.write("~\2\2?F\7~\2\2@A\7\'\2\2AB\7~\2\2BC\7~\2\2CD\7~\2\2DF")
buf.write("\7~\2\2E&\3\2\2\2E(\3\2\2\2E*\3\2\2\2E,\3\2\2\2E/\3\2")
buf.write("\2\2E\62\3\2\2\2E\65\3\2\2\2E9\3\2\2\2E<\3\2\2\2E@\3\2")
buf.write("\2\2F\f\3\2\2\2GH\7~\2\2H\16\3\2\2\2IP\t\2\2\2JL\n\3\2")
buf.write("\2KJ\3\2\2\2LM\3\2\2\2MK\3\2\2\2MN\3\2\2\2NP\3\2\2\2O")
buf.write("I\3\2\2\2OK\3\2\2\2P\20\3\2\2\2QV\7B\2\2RT\7a\2\2SR\3")
buf.write("\2\2\2ST\3\2\2\2TU\3\2\2\2UW\t\4\2\2VS\3\2\2\2WX\3\2\2")
buf.write("\2XV\3\2\2\2XY\3\2\2\2Y\22\3\2\2\2Z[\7a\2\2[]\7a\2\2\\")
buf.write("^\t\4\2\2]\\\3\2\2\2^_\3\2\2\2_]\3\2\2\2_`\3\2\2\2`\24")
buf.write("\3\2\2\2ac\7\"\2\2ba\3\2\2\2cd\3\2\2\2db\3\2\2\2de\3\2")
buf.write("\2\2e\26\3\2\2\2\13\2 EMOSX_d\2")
return buf.getvalue()
class TacticNotationsLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
LALT = 1
LGROUP = 2
LBRACE = 3
RBRACE = 4
ESCAPED = 5
PIPE = 6
ATOM = 7
ID = 8
SUB = 9
WHITESPACE = 10
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'{|'", "'{'", "'}'", "'|'" ]
symbolicNames = [ "<INVALID>",
"LALT", "LGROUP", "LBRACE", "RBRACE", "ESCAPED", "PIPE", "ATOM",
"ID", "SUB", "WHITESPACE" ]
ruleNames = [ "LALT", "LGROUP", "LBRACE", "RBRACE", "ESCAPED", "PIPE",
"ATOM", "ID", "SUB", "WHITESPACE" ]
grammarFileName = "TacticNotations.g"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
| lgpl-2.1 |
donspaulding/adspygoogle | examples/adspygoogle/dfp/v201206/get_all_roles.py | 2 | 1477 | #!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all roles. This sample can be used to determine which
role id is needed when getting and creating users."""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
user_service = client.GetService('UserService', version='v201206')
# Get all roles.
roles = user_service.GetAllRoles()
# Display results.
for role in roles:
print ('Role with id \'%s\' and name \'%s\' was found.'
% (role['id'], role['name']))
| apache-2.0 |
rlr/fjord | fjord/base/browsers.py | 2 | 5980 | import re
from collections import namedtuple
# From http://msdn.microsoft.com/en-us/library/ms537503(v=vs.85).aspx
WINDOWS_VERSION = {
'Windows NT 10.0': ('Windows', '10'),
'Windows NT 6.4': ('Windows', '10'),
'Windows NT 6.3': ('Windows', '8.1'),
'Windows NT 6.2': ('Windows', '8'),
'Windows NT 6.1': ('Windows', '7'),
'Windows NT 6.0': ('Windows', 'Vista'),
'Windows NT 5.2': ('Windows', 'XP'),
'Windows NT 5.1': ('Windows', 'XP'),
'Windows NT 5.01': ('Windows', '2000'),
'Windows NT 5.0': ('Windows', '2000'),
'Windows NT 4.0': ('Windows', 'NT'),
'Windows 98; Win 9x 4.90': ('Windows', 'ME'),
'Windows 98': ('Windows', '98'),
'Windows 95': ('Windows', '95'),
}
GECKO_TO_FIREFOXOS_VERSION = {
'18.0': '1.0',
'18.1': '1.1',
'26.0': '1.2',
'28.0': '1.3'
}
UNKNOWN = ''
Browser = namedtuple('Browser', [
'browser', 'browser_version', 'platform', 'platform_version',
'mobile'])
def parse_ua(ua):
"""Parse user agents from Firefox and friends.
:arg ua: a User-Agent string
:returns: Browser namedtuple with attributes:
- browser: "Unknown" or a browser like "Firefox", "Iceweasel",
"Firefox for Android", etc.
- browser_version: "Unknown" or a 3 dotted section like "14.0.1",
"4.0.0", etc.
- platform: "Unknown" or a platform like "Windows", "OS X",
"Linux", "Android", etc.
- platform_version: "Unknown or something like "Vista" or "7" for
Windows or something like "10.6.2" or "10.4.0" for OSX.
- mobile: True if the user agent represents a mobile browser.
False if it's definitely not a mobile browser. None if we
don't know.
.. Note::
This should never throw an exception. If it does, that's
a bug.
"""
mobile = 'mobile' in ua.lower()
# Unknown values are UNKNOWN. If we are sure something is mobile,
# say True. Otherwise say None.
no_browser = Browser(UNKNOWN, UNKNOWN, UNKNOWN, UNKNOWN,
mobile or None)
if 'firefox' not in ua.lower():
return no_browser
# For reference, a normal Firefox on android user agent looks like
# Mozilla/5.0 (Android; Mobile; rv:14.0) Gecko/14.0 Firefox/14.0.2
# Extract the part within the parenthesis, and the part after the
# parenthesis. Inside has information about the platform, and
# outside has information about the browser.
match = re.match(r'^Mozilla[^(]+\(([^)]+)\) (.+)', ua)
if match is None:
# If match is None here, then this is not a UA we can infer
# browser information from, so we return unknown.
return no_browser
# The part in parenthesis is semi-colon seperated
# Result: ['Android', 'Mobile', 'rv:14.0']
platform_parts = [p.strip() for p in match.group(1).split(';')]
# The rest is space seperated A/B pairs. Pull out both sides of
# the slash.
# Result: [['Gecko', '14.0'], ['Firefox', '14.0.2']]
browser_parts = [p.split('/') for p in match.group(2).split(' ')]
browser = 'Firefox'
browser_version = UNKNOWN
for part in browser_parts:
if 'Firefox' in part and len(part) > 1:
browser_version = part[1]
elif 'Iceweasel' in part:
browser = 'Iceweasel'
platform = platform_parts.pop(0)
platform_version = UNKNOWN
while platform in ['X11', 'Ubuntu', 'U']:
platform = platform_parts.pop(0)
if platform == 'Windows':
# Firefox 3.6 has the Windows version later in the parts. So
# skim the parts to find a version that's in WINDOWS_VERSION.
# If we don't find anything, just leave things as is.
possible_platforms = [p for p in platform_parts
if p in WINDOWS_VERSION]
if possible_platforms:
platform = possible_platforms[0]
if platform in WINDOWS_VERSION.keys():
platform, platform_version = WINDOWS_VERSION[platform]
elif platform.startswith('Linux'):
platform = 'Linux'
elif platform.startswith('FreeBSD'):
platform = 'FreeBSD'
elif platform in ('OS X', 'Macintosh'):
for part in platform_parts:
if 'OS X' in part:
# If OS X is in one of the parts, then we normalize
# the platform to 'OS X'.
platform = 'OS X'
platform_version = part.split(' ')[-1]
break
if platform_version:
platform_version = platform_version.replace('_', '.')
elif browser == 'Firefox' and platform == 'Android':
browser = 'Firefox for Android'
# Firefox OS doesn't list a platform because "The web is the
# platform." It is the only platform to do this, so we can still
# uniquely identify it.
if platform == 'Mobile':
platform = 'Firefox OS'
browser = 'Firefox OS'
# Set versions to UNKNOWN. This handles the case where the
# version of Gecko doesn't line up with a Firefox OS product
# release.
browser_version = UNKNOWN
platform_version = UNKNOWN
# Now try to infer the Firefox OS version from the Gecko
# version. If we can, then we set the browser_version and
# platform_version.
for part in browser_parts:
if 'Gecko' in part and len(part) > 1:
fxos_version = GECKO_TO_FIREFOXOS_VERSION.get(part[1])
if fxos_version is not None:
browser_version = fxos_version
platform_version = fxos_version
break
# Make sure browser_version is at least x.y for non-Firefox OS
# browsers.
if (browser != 'Firefox OS'
and browser_version != UNKNOWN
and browser_version.count('.') < 1):
browser_version += '.0'
return Browser(browser, browser_version, platform, platform_version,
mobile)
| bsd-3-clause |
ewbankkit/cloud-custodian | tools/c7n_gcp/tests/test_bigquery.py | 5 | 4353 | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gcp_common import BaseTest, event_data
class BigQueryDataSetTest(BaseTest):
def test_query(self):
factory = self.replay_flight_data('bq-dataset-query')
p = self.load_policy({
'name': 'bq-get',
'resource': 'gcp.bq-dataset'},
session_factory=factory)
dataset = p.resource_manager.get_resource(
event_data('bq-dataset-create.json'))
self.assertEqual(
dataset['datasetReference']['datasetId'],
'devxyz')
self.assertTrue('access' in dataset)
self.assertEqual(dataset['labels'], {'env': 'dev'})
class BigQueryJobTest(BaseTest):
def test_query(self):
project_id = 'cloud-custodian'
factory = self.replay_flight_data('bq-job-query', project_id=project_id)
p = self.load_policy({
'name': 'bq-job-get',
'resource': 'gcp.bq-job'},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['status']['state'], 'DONE')
self.assertEqual(resources[0]['jobReference']['location'], 'US')
self.assertEqual(resources[0]['jobReference']['projectId'], project_id)
def test_job_get(self):
project_id = 'cloud-custodian'
job_id = 'bquxjob_4c28c9a7_16958c2791d'
location = 'US'
factory = self.replay_flight_data('bq-job-get', project_id=project_id)
p = self.load_policy({
'name': 'bq-job-get',
'resource': 'gcp.bq-job',
'mode': {
'type': 'gcp-audit',
'methods': ['google.cloud.bigquery.v2.JobService.InsertJob']
}
}, session_factory=factory)
exec_mode = p.get_execution_mode()
event = event_data('bq-job-create.json')
job = exec_mode.run(event, None)
self.assertEqual(job[0]['jobReference']['jobId'], job_id)
self.assertEqual(job[0]['jobReference']['location'], location)
self.assertEqual(job[0]['jobReference']['projectId'], project_id)
self.assertEqual(job[0]['id'], "{}:{}.{}".format(project_id, location, job_id))
class BigQueryProjectTest(BaseTest):
def test_query(self):
factory = self.replay_flight_data('bq-project-query')
p = self.load_policy({
'name': 'bq-get',
'resource': 'gcp.bq-project'},
session_factory=factory)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['friendlyName'], 'test project')
self.assertEqual(resources[0]['id'], 'cloud-custodian')
class BigQueryTableTest(BaseTest):
def test_query(self):
project_id = 'cloud-custodian'
factory = self.replay_flight_data('bq-table-query', project_id=project_id)
p = self.load_policy({
'name': 'bq-table-query',
'resource': 'gcp.bq-table'},
session_factory=factory)
resources = p.run()
self.assertIn('tableReference', resources[0].keys())
self.assertEqual('TABLE', resources[0]['type'])
def test_table_get(self):
project_id = 'cloud-custodian'
factory = self.replay_flight_data('bq-table-get', project_id=project_id)
p = self.load_policy({
'name': 'bq-table-get',
'resource': 'gcp.bq-table',
'mode': {
'type': 'gcp-audit',
'methods': ['google.cloud.bigquery.v2.TableService.InsertTable']
}
}, session_factory=factory)
exec_mode = p.get_execution_mode()
event = event_data('bq-table-create.json')
job = exec_mode.run(event, None)
self.assertIn('tableReference', job[0].keys())
| apache-2.0 |
mcanthony/nupic | tests/integration/nupic/algorithms/knn_classifier_test/categories_test.py | 35 | 3887 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import logging
import unittest2 as unittest
import numpy
from nupic.algorithms.KNNClassifier import KNNClassifier
LOGGER = logging.getLogger(__name__)
class KNNCategoriesTest(unittest.TestCase):
"""Tests how k Nearest Neighbor classifier handles categories"""
def testCategories(self):
# We need determinism!
#
# "Life is like a game of cards. The hand you are dealt is determinism; the
# way you play it is free will." Jawaharlal Nehru
#
# What the heck, let's just set the random seed
numpy.random.seed(42)
failures, _knn = simulateCategories()
self.assertEqual(len(failures), 0,
"Tests failed: \n" + failures)
def simulateCategories(numSamples=100, numDimensions=500):
"""Simulate running KNN classifier on many disjoint categories"""
failures = ""
LOGGER.info("Testing the sparse KNN Classifier on many disjoint categories")
knn = KNNClassifier(k=1, distanceNorm=1.0, useSparseMemory=True)
for i in range(0, numSamples):
# select category randomly and generate vector
c = 2*numpy.random.randint(0, 50) + 50
v = createPattern(c, numDimensions)
knn.learn(v, c)
# Go through each category and ensure we have at least one from each!
for i in range(0, 50):
c = 2*i+50
v = createPattern(c, numDimensions)
knn.learn(v, c)
errors = 0
for i in range(0, numSamples):
# select category randomly and generate vector
c = 2*numpy.random.randint(0, 50) + 50
v = createPattern(c, numDimensions)
inferCat, _kir, _kd, _kcd = knn.infer(v)
if inferCat != c:
LOGGER.info("Mistake with %s %s %s %s %s", v[v.nonzero()], \
"mapped to category", inferCat, "instead of category", c)
LOGGER.info(" %s", v.nonzero())
errors += 1
if errors != 0:
failures += "Failure in handling non-consecutive category indices\n"
# Test closest methods
errors = 0
for i in range(0, 10):
# select category randomly and generate vector
c = 2*numpy.random.randint(0, 50) + 50
v = createPattern(c, numDimensions)
p = knn.closestTrainingPattern(v, c)
if not (c in p.nonzero()[0]):
LOGGER.info("Mistake %s %s", p.nonzero(), v.nonzero())
LOGGER.info("%s %s", p[p.nonzero()], v[v.nonzero()])
errors += 1
if errors != 0:
failures += "Failure in closestTrainingPattern method\n"
return failures, knn
def createPattern(c, numDimensions):
"""
Create a sparse pattern from category c with the given number of dimensions.
The pattern is created by setting element c to be a high random number.
Element c-1 and c+1 are set to low random numbers. numDimensions must be > c.
"""
v = numpy.zeros(numDimensions)
v[c] = 5*numpy.random.random() + 10
v[c+1] = numpy.random.random()
if c > 0:
v[c-1] = numpy.random.random()
return v
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
zkraime/osf.io | website/identifiers/metadata.py | 51 | 1953 | # -*- coding: utf-8 -*-
import lxml.etree
import lxml.builder
NAMESPACE = 'http://datacite.org/schema/kernel-3'
XSI = 'http://www.w3.org/2001/XMLSchema-instance'
SCHEMA_LOCATION = 'http://datacite.org/schema/kernel-3 http://schema.datacite.org/meta/kernel-3/metadata.xsd'
E = lxml.builder.ElementMaker(nsmap={
None: NAMESPACE,
'xsi': XSI},
)
CREATOR = E.creator
CREATOR_NAME = E.creatorName
# This function is not OSF-specific
def datacite_metadata(doi, title, creators, publisher, publication_year, pretty_print=False):
"""Return the formatted datacite metadata XML as a string.
:param str doi
:param str title
:param list creators: List of creator names, formatted like 'Shakespeare, William'
:param str publisher: Publisher name.
:param int publication_year
:param bool pretty_print
"""
creators = [CREATOR(CREATOR_NAME(each)) for each in creators]
root = E.resource(
E.identifier(doi, identifierType='DOI'),
E.creators(*creators),
E.titles(E.title(title)),
E.publisher(publisher),
E.publicationYear(str(publication_year)),
)
# set xsi:schemaLocation
root.attrib['{%s}schemaLocation' % XSI] = SCHEMA_LOCATION
return lxml.etree.tostring(root, pretty_print=pretty_print)
# This function is OSF specific.
def datacite_metadata_for_node(node, doi, pretty_print=False):
"""Return the datacite metadata XML document for a given node as a string.
:param Node node
:param str doi
"""
def format_contrib(contributor):
return u'{}, {}'.format(contributor.family_name, contributor.given_name)
creators = [format_contrib(each)
for each in node.visible_contributors]
return datacite_metadata(
doi=doi,
title=node.title,
creators=creators,
publisher='Open Science Framework',
publication_year=node.registered_date.year,
pretty_print=pretty_print
)
| apache-2.0 |
reviewboard/reviewboard | reviewboard/reviews/views.py | 2 | 90019 | from __future__ import unicode_literals
import io
import json
import logging
import re
import struct
import dateutil.parser
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from django.db.models import Q
from django.http import (Http404,
HttpResponse,
HttpResponseBadRequest,
HttpResponseNotFound)
from django.shortcuts import get_object_or_404, get_list_or_404, render
from django.template.defaultfilters import date
from django.utils import six, timezone
from django.utils.formats import localize
from django.utils.html import escape, format_html, strip_tags
from django.utils.safestring import mark_safe
from django.utils.timezone import is_aware, localtime, make_aware, utc
from django.utils.translation import ugettext_lazy as _, ugettext
from django.views.generic.base import (ContextMixin, RedirectView,
TemplateView, View)
from djblets.siteconfig.models import SiteConfiguration
from djblets.util.compat.django.template.loader import render_to_string
from djblets.util.dates import get_latest_timestamp
from djblets.util.http import set_last_modified
from djblets.views.generic.base import (CheckRequestMethodViewMixin,
PrePostDispatchViewMixin)
from djblets.views.generic.etag import ETagViewMixin
from reviewboard.accounts.mixins import (CheckLoginRequiredViewMixin,
LoginRequiredViewMixin,
UserProfileRequiredViewMixin)
from reviewboard.accounts.models import ReviewRequestVisit, Profile
from reviewboard.admin.decorators import check_read_only
from reviewboard.admin.mixins import CheckReadOnlyViewMixin
from reviewboard.admin.read_only import is_site_read_only_for
from reviewboard.attachments.models import (FileAttachment,
get_latest_file_attachments)
from reviewboard.diffviewer.diffutils import (convert_to_unicode,
get_file_chunks_in_range,
get_filediff_encodings,
get_last_header_before_line,
get_last_line_number_in_diff,
get_original_file,
get_patched_file)
from reviewboard.diffviewer.models import DiffSet
from reviewboard.diffviewer.views import (DiffFragmentView,
DiffViewerView,
DownloadPatchErrorBundleView,
exception_traceback_string)
from reviewboard.hostingsvcs.bugtracker import BugTracker
from reviewboard.notifications.email.message import (
prepare_reply_published_mail,
prepare_review_published_mail,
prepare_review_request_mail)
from reviewboard.notifications.email.views import BasePreviewEmailView
from reviewboard.reviews.ui.screenshot import LegacyScreenshotReviewUI
from reviewboard.reviews.context import (comment_counts,
diffsets_with_comments,
has_comments_in_diffsets_excluding,
interdiffs_with_comments,
make_review_request_context)
from reviewboard.reviews.detail import ReviewRequestPageData, entry_registry
from reviewboard.reviews.markdown_utils import (is_rich_text_default_for_user,
render_markdown)
from reviewboard.reviews.models import (Comment,
Review,
ReviewRequest,
Screenshot)
from reviewboard.reviews.ui.base import FileAttachmentReviewUI
from reviewboard.scmtools.errors import FileNotFoundError
from reviewboard.scmtools.models import Repository
from reviewboard.site.mixins import CheckLocalSiteAccessViewMixin
from reviewboard.site.urlresolvers import local_site_reverse
logger = logging.getLogger(__name__)
class ReviewRequestViewMixin(CheckRequestMethodViewMixin,
CheckLoginRequiredViewMixin,
CheckLocalSiteAccessViewMixin,
PrePostDispatchViewMixin):
"""Common functionality for all review request-related pages.
This performs checks to ensure that the user has access to the page,
returning an error page if not. It also provides common functionality
for fetching a review request for the given page, returning suitable
context for the template, and generating an image used to represent
the site when posting to social media sites.
"""
permission_denied_template_name = \
'reviews/review_request_permission_denied.html'
def pre_dispatch(self, request, review_request_id, *args, **kwargs):
"""Look up objects and permissions before dispatching the request.
This will first look up the review request, returning an error page
if it's not accessible. It will then store the review request before
calling the handler for the HTTP request.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
review_request_id (int):
The ID of the review request being accessed.
*args (tuple):
Positional arguments to pass to the handler.
**kwargs (dict):
Keyword arguments to pass to the handler.
These will be arguments provided by the URL pattern.
Returns:
django.http.HttpResponse:
The resulting HTTP response to send to the client, if there's
a Permission Denied.
"""
self.review_request = self.get_review_request(
review_request_id=review_request_id,
local_site=self.local_site)
if not self.review_request.is_accessible_by(request.user):
return self.render_permission_denied(request)
return None
def render_permission_denied(self, request):
"""Render a Permission Denied page.
This will be shown to the user if they're not able to view the
review request.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
Returns:
django.http.HttpResponse:
The resulting HTTP response to send to the client.
"""
return render(request,
self.permission_denied_template_name,
status=403)
def get_review_request(self, review_request_id, local_site=None):
"""Return the review request for the given display ID.
Args:
review_request_id (int):
The review request's display ID.
local_site (reviewboard.site.models.LocalSite):
The Local Site the review request is on.
Returns:
reviewboard.reviews.models.review_request.ReviewRequest:
The review request for the given display ID and Local Site.
Raises:
django.http.Http404:
The review request could not be found.
"""
q = ReviewRequest.objects.all()
if local_site:
q = q.filter(local_site=local_site,
local_id=review_request_id)
else:
q = q.filter(pk=review_request_id)
q = q.select_related('submitter', 'repository')
return get_object_or_404(q)
def get_diff(self, revision=None, draft=None):
"""Return a diff on the review request matching the given criteria.
If a draft is provided, and ``revision`` is either ``None`` or matches
the revision on the draft's DiffSet, that DiffSet will be returned.
Args:
revision (int, optional):
The revision of the diff to retrieve. If not provided, the
latest DiffSet will be returned.
draft (reviewboard.reviews.models.review_request_draft.
ReviewRequestDraft, optional):
The draft of the review request.
Returns:
reviewboard.diffviewer.models.diffset.DiffSet:
The resulting DiffSet.
Raises:
django.http.Http404:
The diff does not exist.
"""
# Normalize the revision, since it might come in as a string.
if revision:
revision = int(revision)
# This will try to grab the diff associated with a draft if the review
# request has an associated draft and is either the revision being
# requested or no revision is being requested.
if (draft and draft.diffset_id and
(revision is None or draft.diffset.revision == revision)):
return draft.diffset
query = Q(history=self.review_request.diffset_history_id)
# Grab a revision if requested.
if revision is not None:
query = query & Q(revision=revision)
try:
return DiffSet.objects.filter(query).latest()
except DiffSet.DoesNotExist:
raise Http404
def get_social_page_image_url(self, file_attachments):
"""Return the URL to an image used for social media sharing.
This will look for the first attachment in a list of attachments that
can be used to represent the review request on social media sites and
chat services. If a suitable attachment is found, its URL will be
returned.
Args:
file_attachments (list of reviewboard.attachments.models.
FileAttachment):
A list of file attachments used on a review request.
Returns:
unicode:
The URL to the first image file attachment, if found, or ``None``
if no suitable attachments were found.
"""
for file_attachment in file_attachments:
if file_attachment.mimetype.startswith('image/'):
return file_attachment.get_absolute_url()
return None
def get_review_request_status_html(self, review_request_details,
close_info, extra_info=[]):
"""Return HTML describing the current status of a review request.
This will return a description of the submitted, discarded, or open
state for the review request, for use in the rendering of the page.
Args:
review_request_details (reviewboard.reviews.models
.base_review_request_details
.BaseReviewRequestDetails):
The review request or draft being viewed.
close_info (dict):
A dictionary of information on the closed state of the
review request.
extra_info (list of dict):
A list of dictionaries showing additional status information.
Each must have a ``text`` field containing a format string
using ``{keyword}``-formatted variables, a ``timestamp`` field
(which will be normalized to the local timestamp), and an
optional ``extra_vars`` for the format string.
Returns:
unicode:
The status text as HTML for the page.
"""
review_request = self.review_request
status = review_request.status
review_request_details = review_request_details
if status == ReviewRequest.SUBMITTED:
timestamp = close_info['timestamp']
if timestamp:
text = ugettext('Created {created_time} and submitted '
'{timestamp}')
else:
text = ugettext('Created {created_time} and submitted')
elif status == ReviewRequest.DISCARDED:
timestamp = close_info['timestamp']
if timestamp:
text = ugettext('Created {created_time} and discarded '
'{timestamp}')
else:
text = ugettext('Created {created_time} and discarded')
elif status == ReviewRequest.PENDING_REVIEW:
text = ugettext('Created {created_time} and updated {timestamp}')
timestamp = review_request_details.last_updated
else:
logger.error('Unexpected review request status %r for '
'review request %s',
status, review_request.display_id,
request=self.request)
return ''
parts = [
{
'text': text,
'timestamp': timestamp,
'extra_vars': {
'created_time': date(localtime(review_request.time_added)),
},
},
] + extra_info
html_parts = []
for part in parts:
if part['timestamp']:
timestamp = localtime(part['timestamp'])
timestamp_html = format_html(
'<time class="timesince" datetime="{0}">{1}</time>',
timestamp.isoformat(),
localize(timestamp))
else:
timestamp_html = ''
html_parts.append(format_html(
part['text'],
timestamp=timestamp_html,
**part.get('extra_vars', {})))
return mark_safe(' — '.join(html_parts))
#
# Helper functions
#
def build_diff_comment_fragments(
comments,
context,
comment_template_name='reviews/diff_comment_fragment.html',
error_template_name='diffviewer/diff_fragment_error.html',
lines_of_context=None,
show_controls=False,
request=None):
comment_entries = []
had_error = False
siteconfig = SiteConfiguration.objects.get_current()
if lines_of_context is None:
lines_of_context = [0, 0]
for comment in comments:
try:
max_line = get_last_line_number_in_diff(context, comment.filediff,
comment.interfilediff)
first_line = max(1, comment.first_line - lines_of_context[0])
last_line = min(comment.last_line + lines_of_context[1], max_line)
num_lines = last_line - first_line + 1
chunks = list(get_file_chunks_in_range(context,
comment.filediff,
comment.interfilediff,
first_line,
num_lines))
comment_context = {
'comment': comment,
'header': get_last_header_before_line(context,
comment.filediff,
comment.interfilediff,
first_line),
'chunks': chunks,
'domain': Site.objects.get_current().domain,
'domain_method': siteconfig.get('site_domain_method'),
'lines_of_context': lines_of_context,
'expandable_above': show_controls and first_line != 1,
'expandable_below': show_controls and last_line != max_line,
'collapsible': lines_of_context != [0, 0],
'lines_above': first_line - 1,
'lines_below': max_line - last_line,
'first_line': first_line,
}
comment_context.update(context)
content = render_to_string(template_name=comment_template_name,
context=comment_context,
request=request)
except Exception as e:
content = exception_traceback_string(
None, e, error_template_name, {
'comment': comment,
'file': {
'depot_filename': comment.filediff.source_file,
'index': None,
'filediff': comment.filediff,
},
'domain': Site.objects.get_current().domain,
'domain_method': siteconfig.get("site_domain_method"),
})
# It's bad that we failed, and we'll return a 500, but we'll
# still return content for anything we have. This will prevent any
# caching.
had_error = True
chunks = []
comment_entries.append({
'comment': comment,
'html': content,
'chunks': chunks,
})
return had_error, comment_entries
#
# View functions
#
class RootView(CheckLoginRequiredViewMixin,
UserProfileRequiredViewMixin,
CheckLocalSiteAccessViewMixin,
RedirectView):
"""Handles the root URL of Review Board or a Local Site.
If the user is authenticated, this will redirect to their Dashboard.
Otherwise, they'll be redirected to the All Review Requests page.
Either page may then redirect for login or show a Permission Denied,
depending on the settings.
"""
permanent = False
def get_redirect_url(self, *args, **kwargs):
"""Return the URL to redirect to.
Args:
*args (tuple):
Positional arguments passed to the view.
**kwargs (dict):
Keyword arguments passed to the view.
Returns:
unicode:
The URL to redirect to. If the user is authenticated, this will
return the dashboard's URL. Otherwise, it will return the
All Review Request page's URL.
"""
if self.request.user.is_authenticated():
url_name = 'dashboard'
else:
url_name = 'all-review-requests'
return local_site_reverse(url_name, local_site=self.local_site)
class NewReviewRequestView(LoginRequiredViewMixin,
CheckLocalSiteAccessViewMixin,
UserProfileRequiredViewMixin,
CheckReadOnlyViewMixin,
TemplateView):
"""View for the New Review Request page.
This provides the user with a UI consisting of all their repositories,
allowing them to manually upload a diff against the repository or,
depending on the repository's capabilities, to browse for an existing
commit to post.
"""
template_name = 'reviews/new_review_request.html'
def get_context_data(self, **kwargs):
"""Return data for the template.
This will return information on each repository shown on the page.
Args:
**kwargs (dict):
Additional keyword arguments passed to the view.
Returns:
dict:
Context data for the template.
"""
local_site = self.local_site
if local_site:
local_site_prefix = 's/%s/' % local_site.name
else:
local_site_prefix = ''
valid_repos = [{
'name': _('(None - File attachments only)'),
'scmtoolName': '',
'supportsPostCommit': False,
'filesOnly': True,
'localSitePrefix': local_site_prefix,
}]
repos = Repository.objects.accessible(self.request.user,
local_site=local_site)
for repo in repos.order_by('name'):
try:
valid_repos.append({
'id': repo.pk,
'name': repo.name,
'scmtoolName': repo.scmtool_class.name,
'localSitePrefix': local_site_prefix,
'supportsPostCommit': repo.supports_post_commit,
'requiresChangeNumber': repo.supports_pending_changesets,
'requiresBasedir': not repo.diffs_use_absolute_paths,
'filesOnly': False,
})
except Exception:
logger.exception(
'Error loading information for repository "%s" (ID %d) '
'for the New Review Request page.',
repo.name, repo.pk)
return {
'page_model_attrs': {
'repositories': valid_repos,
}
}
class ReviewRequestDetailView(ReviewRequestViewMixin,
UserProfileRequiredViewMixin,
ETagViewMixin,
TemplateView):
"""A view for the main review request page.
This page shows information on the review request, all the reviews and
issues that have been posted, and the status updates made on uploaded
changes.
"""
template_name = 'reviews/review_detail.html'
def __init__(self, **kwargs):
"""Initialize a view for the request.
Args:
**kwargs (dict):
Keyword arguments passed to :py:meth:`as_view`.
"""
super(ReviewRequestDetailView, self).__init__(**kwargs)
self.data = None
self.visited = None
self.blocks = None
self.last_activity_time = None
self.last_visited = None
def get_etag_data(self, request, *args, **kwargs):
"""Return an ETag for the view.
This will look up state needed for the request and generate a
suitable ETag. Some of the information will be stored for later
computation of the template context.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
*args (tuple, unused):
Positional arguments passsed to the handler.
**kwargs (dict, unused):
Keyword arguments passed to the handler.
Returns:
unicode:
The ETag for the page.
"""
review_request = self.review_request
# Track the visit to this review request, so the dashboard can
# reflect whether there are new updates.
self.visited, self.last_visited = self.track_review_request_visit()
# Begin building data for the contents of the page. This will include
# the reviews, change descriptions, and other content shown on the
# page.
data = ReviewRequestPageData(review_request=review_request,
request=request,
last_visited=self.last_visited)
self.data = data
data.query_data_pre_etag()
self.blocks = review_request.get_blocks()
# Prepare data used in both the page and the ETag.
starred = self.is_review_request_starred()
self.last_activity_time = review_request.get_last_activity_info(
data.diffsets, data.reviews)['timestamp']
etag_timestamp = self.last_activity_time
entry_etags = ':'.join(
entry_cls.build_etag_data(data)
for entry_cls in entry_registry
)
if data.draft:
draft_timestamp = data.draft.last_updated
else:
draft_timestamp = ''
return ':'.join(six.text_type(value) for value in (
request.user,
etag_timestamp,
draft_timestamp,
data.latest_changedesc_timestamp,
entry_etags,
data.latest_review_timestamp,
review_request.last_review_activity_timestamp,
is_rich_text_default_for_user(request.user),
is_site_read_only_for(request.user),
[r.pk for r in self.blocks],
starred,
self.visited and self.visited.visibility,
(self.last_visited and
self.last_visited < self.last_activity_time),
settings.AJAX_SERIAL,
))
def track_review_request_visit(self):
"""Track a visit to the review request.
If the user is authenticated, their visit to this page will be
recorded. That information is used to provide an indicator in the
dashboard when a review request is later updated.
Returns:
tuple:
A tuple containing the following items:
1. The resulting
:py:class:`~reviewboard.accounts.models.ReviewRequestVisit`,
if the user is authenticated and the visit could be returned or
created.
2. The timestamp when the user had last visited the site, prior to
this visit (or 0 if they haven't).
"""
user = self.request.user
visited = None
last_visited = None
if user.is_authenticated():
review_request = self.review_request
try:
visited, visited_is_new = \
ReviewRequestVisit.objects.get_or_create(
user=user, review_request=review_request)
last_visited = visited.timestamp.replace(tzinfo=utc)
except ReviewRequestVisit.DoesNotExist:
# Somehow, this visit was seen as created but then not
# accessible. We need to log this and then continue on.
logger.error('Unable to get or create ReviewRequestVisit '
'for user "%s" on review request at %s',
user.username,
review_request.get_absolute_url())
visited = None
# If the review request is public and pending review and if the user
# is logged in, mark that they've visited this review request.
if (visited and
review_request.public and
review_request.status == review_request.PENDING_REVIEW):
visited.timestamp = timezone.now()
visited.save()
return visited, last_visited
def is_review_request_starred(self):
"""Return whether the review request has been starred by the user.
Returns:
bool:
``True`` if the user has starred the review request.
``False`` if they have not.
"""
user = self.request.user
if user.is_authenticated():
try:
return (
user.get_profile(create_if_missing=False)
.starred_review_requests
.filter(pk=self.review_request.pk)
.exists()
)
except Profile.DoesNotExist:
pass
return False
def get_context_data(self, **kwargs):
"""Return data for the template.
This will return information on the review request, the entries to
show, file attachments, issues, metadata to use when sharing the
review request on social networks, and everything else needed to
render the page.
Args:
**kwargs (dict):
Additional keyword arguments passed to the view.
Returns:
dict:
Context data for the template.
"""
review_request = self.review_request
request = self.request
data = self.data
data.query_data_post_etag()
entries = data.get_entries()
review = review_request.get_pending_review(request.user)
close_info = review_request.get_close_info()
review_request_status_html = self.get_review_request_status_html(
review_request_details=data.review_request_details,
close_info=close_info)
file_attachments = \
get_latest_file_attachments(data.active_file_attachments)
social_page_image_url = self.get_social_page_image_url(
file_attachments)
context = \
super(ReviewRequestDetailView, self).get_context_data(**kwargs)
context.update(make_review_request_context(request, review_request))
context.update({
'blocks': self.blocks,
'draft': data.draft,
'review_request_details': data.review_request_details,
'review_request_visit': self.visited,
'review_request_status_html': review_request_status_html,
'entries': entries,
'last_activity_time': self.last_activity_time,
'last_visited': self.last_visited,
'review': review,
'request': request,
'close_description': close_info['close_description'],
'close_description_rich_text': close_info['is_rich_text'],
'close_timestamp': close_info['timestamp'],
'issue_counts': data.issue_counts,
'issues': data.issues,
'file_attachments': file_attachments,
'all_file_attachments': data.all_file_attachments,
'screenshots': data.active_screenshots,
'social_page_image_url': social_page_image_url,
'social_page_title': (
'Review Request #%s: %s'
% (review_request.display_id, review_request.summary)
),
})
return context
class ReviewRequestUpdatesView(ReviewRequestViewMixin, ETagViewMixin,
ContextMixin, View):
"""Internal view for sending data for updating the review request page.
This view serializes data representing components of the review request
page (the issue summary table and entries) that need to periodically
update without a full page reload. It's used internally by the page to
request and handle updates.
The resulting format is a custom, condensed format containing metadata
information and HTML for each component being updated. It's designed
to be quick to parse and reduces the amount of data to send across the
wire (unlike a format like JSON, which would add overhead to the
serialization/deserialization time and data size when storing HTML).
Each entry in the payload is in the following format, with all entries
joined together:
<metadata length>\\n
<metadata content>
<html length>\\n
<html content>
The format is subject to change without notice, and should not be
relied upon by third parties.
"""
def __init__(self, **kwargs):
"""Initialize the view.
Args:
**kwargs (tuple):
Keyword arguments passed to :py:meth:`as_view`.
"""
super(ReviewRequestUpdatesView, self).__init__(**kwargs)
self.entry_ids = {}
self.data = None
self.since = None
def pre_dispatch(self, request, *args, **kwargs):
"""Look up and validate state before dispatching the request.
This looks up information based on the request before performing any
ETag generation or otherwise handling the HTTP request.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
*args (tuple, unused):
Positional arguments passsed to the view.
**kwargs (dict, unused):
Keyword arguments passed to the view.
Returns:
django.http.HttpResponse:
The HTTP response containin the updates payload.
"""
super(ReviewRequestUpdatesView, self).pre_dispatch(request, *args,
**kwargs)
# Find out which entries and IDs (if any) that the caller is most
# interested in.
entries_str = request.GET.get('entries')
if entries_str:
try:
for entry_part in entries_str.split(';'):
entry_type, entry_ids = entry_part.split(':')
self.entry_ids[entry_type] = set(entry_ids.split(','))
except ValueError as e:
return HttpResponseBadRequest('Invalid ?entries= value: %s'
% e)
if self.entry_ids:
entry_classes = []
for entry_type in six.iterkeys(self.entry_ids):
entry_cls = entry_registry.get_entry(entry_type)
if entry_cls:
entry_classes.append(entry_cls)
else:
entry_classes = list(entry_registry)
if not entry_classes:
raise Http404
self.since = request.GET.get('since')
self.data = ReviewRequestPageData(self.review_request, request,
entry_classes=entry_classes)
def get_etag_data(self, request, *args, **kwargs):
"""Return an ETag for the view.
This will look up state needed for the request and generate a
suitable ETag. Some of the information will be stored for later
computation of the payload.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
*args (tuple, unused):
Positional arguments passsed to the handler.
**kwargs (dict, unused):
Keyword arguments passed to the handler.
Returns:
unicode:
The ETag for the page.
"""
review_request = self.review_request
data = self.data
# Build page data only for the entry we care about.
data.query_data_pre_etag()
last_activity_time = review_request.get_last_activity_info(
data.diffsets, data.reviews)['timestamp']
entry_etags = ':'.join(
entry_cls.build_etag_data(data)
for entry_cls in entry_registry
)
return ':'.join(six.text_type(value) for value in (
request.user,
last_activity_time,
data.latest_review_timestamp,
review_request.last_review_activity_timestamp,
entry_etags,
is_rich_text_default_for_user(request.user),
settings.AJAX_SERIAL,
))
def get(self, request, **kwargs):
"""Handle HTTP GET requests for this view.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The HTTP response to send to the client. This will contain the
custom update payload content.
"""
request = self.request
review_request = self.review_request
data = self.data
since = self.since
# Finish any querying needed by entries on this page.
self.data.query_data_post_etag()
# Gather all the entries into a single list.
#
# Note that the order in which we build the resulting list of entries
# doesn't matter at this stage, but it does need to be consistent.
# The current order (main, initial) is based on Python 2.7 sort order,
# which our tests are based on. This could be changed in the future.
all_entries = data.get_entries()
entries = all_entries['main'] + all_entries['initial']
if self.entry_ids:
# If specific entry IDs have been requested, limit the results
# to those.
entries = (
entry
for entry in entries
if (entry.entry_type_id in self.entry_ids and
entry.entry_id in self.entry_ids[entry.entry_type_id])
)
# See if the caller only wants to fetch entries updated since a given
# timestamp.
if since:
since = dateutil.parser.parse(since)
if not is_aware(since):
since = make_aware(since, utc)
entries = (
entry
for entry in entries
if (entry.updated_timestamp is not None and
entry.updated_timestamp > since)
)
# We can now begin to serialize the payload for all the updates.
payload = io.BytesIO()
base_entry_context = None
needs_issue_summary_table = False
for entry in entries:
metadata = {
'type': 'entry',
'entryType': entry.entry_type_id,
'entryID': entry.entry_id,
'addedTimestamp': six.text_type(entry.added_timestamp),
'updatedTimestamp': six.text_type(entry.updated_timestamp),
'modelData': entry.get_js_model_data(),
'viewOptions': entry.get_js_view_data(),
}
if base_entry_context is None:
# Now that we know the context is needed for entries,
# we can construct and populate it.
base_entry_context = (
super(ReviewRequestUpdatesView, self)
.get_context_data(**kwargs)
)
base_entry_context.update(
make_review_request_context(request, review_request))
try:
html = render_to_string(
template_name=entry.template_name,
context=dict({
'show_entry_statuses_area': (
entry.entry_pos == entry.ENTRY_POS_MAIN),
'entry': entry,
}, **base_entry_context),
request=request)
except Exception as e:
logger.error('Error rendering review request page entry '
'%r: %s',
entry, e, request=request)
self._write_update(payload, metadata, html)
if entry.needs_reviews:
needs_issue_summary_table = True
# If any of the entries required any information on reviews, then
# the state of the issue summary table may have changed. We'll need
# to send this along as well.
if needs_issue_summary_table:
metadata = {
'type': 'issue-summary-table',
}
html = render_to_string(
template_name='reviews/review_issue_summary_table.html',
context={
'issue_counts': data.issue_counts,
'issues': data.issues,
},
request=request)
self._write_update(payload, metadata, html)
# The payload's complete. Close it out and send to the client.
result = payload.getvalue()
payload.close()
return HttpResponse(result, content_type='text/plain; charset=utf-8')
def _write_update(self, payload, metadata, html):
"""Write an update to the payload.
This will format the metadata and HTML for the update and write it.
Args:
payload (io.BytesIO):
The payload to write to.
metadata (dict):
The JSON-serializable metadata to write.
html (unicode):
The HTML to write.
"""
metadata = json.dumps(metadata).encode('utf-8')
html = html.strip().encode('utf-8')
payload.write(struct.pack(b'<L', len(metadata)))
payload.write(metadata)
payload.write(struct.pack(b'<L', len(html)))
payload.write(html)
class ReviewsDiffViewerView(ReviewRequestViewMixin,
UserProfileRequiredViewMixin,
DiffViewerView):
"""Renders the diff viewer for a review request.
This wraps the base
:py:class:`~reviewboard.diffviewer.views.DiffViewerView` to display a diff
for the given review request and the given diff revision or range.
The view expects the following parameters to be provided:
``review_request_id``:
The ID of the ReviewRequest containing the diff to render.
The following may also be provided:
``revision``:
The DiffSet revision to render.
``interdiff_revision``:
The second DiffSet revision in an interdiff revision range.
``local_site``:
The LocalSite the ReviewRequest must be on, if any.
See :py:class:`~reviewboard.diffviewer.views.DiffViewerView`'s
documentation for the accepted query parameters.
"""
def __init__(self, **kwargs):
"""Initialize a view for the request.
Args:
**kwargs (dict):
Keyword arguments passed to :py:meth:`as_view`.
"""
super(ReviewsDiffViewerView, self).__init__(**kwargs)
self.draft = None
self.diffset = None
self.interdiffset = None
def get(self, request, revision=None, interdiff_revision=None, *args,
**kwargs):
"""Handle HTTP GET requests for this view.
This will look up the review request and DiffSets, given the
provided information, and pass them to the parent class for rendering.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
revision (int, optional):
The revision of the diff to view. This defaults to the latest
diff.
interdiff_revision (int, optional):
The revision to use for an interdiff, if viewing an interdiff.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The HTTP response to send to the client.
"""
review_request = self.review_request
self.draft = review_request.get_draft(review_request.submitter)
if self.draft and not self.draft.is_accessible_by(request.user):
self.draft = None
self.diffset = self.get_diff(revision, self.draft)
if interdiff_revision and interdiff_revision != revision:
# An interdiff revision was specified. Try to find a matching
# diffset.
self.interdiffset = self.get_diff(interdiff_revision, self.draft)
return super(ReviewsDiffViewerView, self).get(
request=request,
diffset=self.diffset,
interdiffset=self.interdiffset,
*args,
**kwargs)
def get_context_data(self, **kwargs):
"""Return additional context data for the template.
This provides some additional data used for rendering the diff
viewer. This data is more specific to the reviewing functionality,
as opposed to the data calculated by
:py:meth:`DiffViewerView.get_context_data
<reviewboard.diffviewer.views.DiffViewerView.get_context_data>`
which is more focused on the actual diff.
Args:
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
dict:
Context data used to render the template.
"""
# Try to find an existing pending review of this diff from the
# current user.
pending_review = \
self.review_request.get_pending_review(self.request.user)
has_draft_diff = self.draft and self.draft.diffset
is_draft_diff = has_draft_diff and self.draft.diffset == self.diffset
is_draft_interdiff = (has_draft_diff and self.interdiffset and
self.draft.diffset == self.interdiffset)
# Get the list of diffsets. We only want to calculate this once.
diffsets = self.review_request.get_diffsets()
num_diffs = len(diffsets)
if num_diffs > 0:
latest_diffset = diffsets[-1]
else:
latest_diffset = None
if self.draft and self.draft.diffset:
num_diffs += 1
last_activity_time = self.review_request.get_last_activity_info(
diffsets)['timestamp']
review_request_details = self.draft or self.review_request
file_attachments = list(review_request_details.get_file_attachments())
screenshots = list(review_request_details.get_screenshots())
latest_file_attachments = get_latest_file_attachments(file_attachments)
social_page_image_url = self.get_social_page_image_url(
latest_file_attachments)
# Compute the lists of comments based on filediffs and interfilediffs.
# We do this using the 'through' table so that we can select_related
# the reviews and comments.
comments = {}
q = (
Review.comments.through.objects
.filter(review__review_request=self.review_request)
.select_related()
)
for obj in q:
comment = obj.comment
comment.review_obj = obj.review
key = (comment.filediff_id, comment.interfilediff_id)
comments.setdefault(key, []).append(comment)
# Build the status information shown below the summary.
close_info = self.review_request.get_close_info()
if latest_diffset:
status_extra_info = [{
'text': ugettext('Latest diff uploaded {timestamp}'),
'timestamp': latest_diffset.timestamp,
}]
else:
status_extra_info = []
review_request_status_html = self.get_review_request_status_html(
review_request_details=review_request_details,
close_info=close_info,
extra_info=status_extra_info)
# Build the final context for the page.
context = super(ReviewsDiffViewerView, self).get_context_data(**kwargs)
context.update({
'close_description': close_info['close_description'],
'close_description_rich_text': close_info['is_rich_text'],
'close_timestamp': close_info['timestamp'],
'diffsets': diffsets,
'review': pending_review,
'review_request_details': review_request_details,
'review_request_status_html': review_request_status_html,
'draft': self.draft,
'last_activity_time': last_activity_time,
'file_attachments': latest_file_attachments,
'all_file_attachments': file_attachments,
'screenshots': screenshots,
'comments': comments,
'social_page_image_url': social_page_image_url,
'social_page_title': (
'Diff for Review Request #%s: %s'
% (self.review_request.display_id,
review_request_details.summary)
),
})
context.update(make_review_request_context(self.request,
self.review_request,
is_diff_view=True))
diffset_pair = context['diffset_pair']
diff_context = context['diff_context']
diff_context.update({
'num_diffs': num_diffs,
'comments_hint': {
'has_other_comments': has_comments_in_diffsets_excluding(
pending_review, diffset_pair),
'diffsets_with_comments': [
{
'revision': diffset_info['diffset'].revision,
'is_current': diffset_info['is_current'],
}
for diffset_info in diffsets_with_comments(
pending_review, diffset_pair)
],
'interdiffs_with_comments': [
{
'old_revision': pair['diffset'].revision,
'new_revision': pair['interdiff'].revision,
'is_current': pair['is_current'],
}
for pair in interdiffs_with_comments(
pending_review, diffset_pair)
],
},
})
diff_context['revision'].update({
'latest_revision': (latest_diffset.revision
if latest_diffset else None),
'is_draft_diff': is_draft_diff,
'is_draft_interdiff': is_draft_interdiff,
})
files = []
for f in context['files']:
filediff = f['filediff']
interfilediff = f['interfilediff']
base_filediff = f['base_filediff']
if base_filediff:
base_filediff_id = base_filediff.pk
else:
base_filediff_id = None
data = {
'newfile': f['newfile'],
'binary': f['binary'],
'deleted': f['deleted'],
'id': filediff.pk,
'depot_filename': f['depot_filename'],
'dest_filename': f['dest_filename'],
'dest_revision': f['dest_revision'],
'revision': f['revision'],
'filediff': {
'id': filediff.pk,
'revision': filediff.diffset.revision,
},
'base_filediff_id': base_filediff_id,
'index': f['index'],
'comment_counts': comment_counts(self.request.user, comments,
filediff, interfilediff),
}
if interfilediff:
data['interfilediff'] = {
'id': interfilediff.pk,
'revision': interfilediff.diffset.revision,
}
if f['force_interdiff']:
data['force_interdiff'] = True
data['interdiff_revision'] = f['force_interdiff_revision']
files.append(data)
diff_context['files'] = files
return context
class DownloadRawDiffView(ReviewRequestViewMixin, View):
"""View for downloading a raw diff from a review request.
This will generate a single raw diff file spanning all the FileDiffs
in a diffset for the revision specified in the URL.
"""
def get(self, request, revision=None, *args, **kwargs):
"""Handle HTTP GET requests for this view.
This will generate the raw diff file and send it to the client.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
revision (int, optional):
The revision of the diff to download. Defaults to the latest
revision.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The HTTP response to send to the client.
"""
review_request = self.review_request
draft = review_request.get_draft(request.user)
diffset = self.get_diff(revision, draft)
tool = review_request.repository.get_scmtool()
data = tool.get_parser(b'').raw_diff(diffset)
resp = HttpResponse(data, content_type='text/x-patch')
if diffset.name == 'diff':
filename = 'rb%d.patch' % review_request.display_id
else:
# Get rid of any Unicode characters that may be in the filename.
filename = diffset.name.encode('ascii', 'ignore').decode('ascii')
# Content-Disposition headers containing commas break on Chrome 16
# and newer. To avoid this, replace any commas in the filename with
# an underscore. Was bug 3704.
filename = filename.replace(',', '_')
resp['Content-Disposition'] = 'attachment; filename=%s' % filename
set_last_modified(resp, diffset.timestamp)
return resp
class CommentDiffFragmentsView(ReviewRequestViewMixin, ETagViewMixin,
ContextMixin, View):
"""View for rendering a section of a diff that a comment pertains to.
This takes in one or more
:py:class:`~reviewboard.reviews.models.diff_comment.Comment` IDs
(comma-separated) as part of the URL and returns a payload containing
data and HTML for each comment's diff fragment, which the client can
parse in order to dynamically load the fragments into the page.
The resulting format is a custom, condensed format containing the comment
ID and HTML for each diff fragment. It's designed to be quick to parse and
reduces the amount of data to send across the wire (unlike a format like
JSON, which would add overhead to the serialization/deserialization time
and data size when storing HTML, or JavaScript, which releases prior to
3.0 used to handle injecting fragments into the DOM).
Each entry in the payload is in the following format, with all entries
joined together:
<comment ID>\\n
<html length>\\n
<html content>
The format is subject to change without notice, and should not be relied
upon by third parties.
The following URL query options are supported:
``allow_expansion``:
Whether expansion controls should be shown to the user. To enable
this, the caller must pass a value of ``1``. This is disabled by
default.
``lines_of_context``:
The number of lines of context before and after the commented region
of the diff. This is in the form of ``pre,post``, where both are the
numbers of lines. This defaults to ``0,0``.
"""
comment_template_name = 'reviews/diff_comment_fragment.html'
error_template_name = 'diffviewer/diff_fragment_error.html'
content_type = 'application/javascript'
EXPIRATION_SECONDS = 60 * 60 * 24 * 365 # 1 year
def get_etag_data(self, request, comment_ids, *args, **kwargs):
"""Return an ETag for the view.
This will look up state needed for the request and generate a
suitable ETag. Some of the information will be stored for later
computation of the template context.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
comment_ids (unicode):
A list of comment IDs to render.
*args (tuple, unused):
Positional arguments passsed to the handler.
**kwargs (dict, unused):
Keyword arguments passed to the handler.
Returns:
unicode:
The ETag for the page.
"""
q = (Q(pk__in=comment_ids.split(',')) &
Q(review__review_request=self.review_request))
if request.user.is_authenticated():
q &= Q(review__public=True) | Q(review__user=request.user)
else:
q &= Q(review__public=True)
self.comments = get_list_or_404(Comment, q)
latest_timestamp = get_latest_timestamp(
comment.timestamp
for comment in self.comments
)
return '%s:%s:%s' % (comment_ids, latest_timestamp,
settings.TEMPLATE_SERIAL)
def get(self, request, **kwargs):
"""Handle HTTP GET requests for this view.
This will generate a payload for the diff comments being loaded and
pass them in a format that can be parsed by the client.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
**kwargs (dict):
Keyword arguments passed to the view.
Returns:
django.http.HttpResponse:
The HTTP response containing the fragments payload.
"""
lines_of_context = request.GET.get('lines_of_context', '0,0')
allow_expansion = (request.GET.get('allow_expansion') == '1')
try:
lines_of_context = [int(i) for i in lines_of_context.split(',')]
# Ensure that we have 2 values for lines_of_context. If only one is
# given, assume it is both the before and after context. If more
# than two are given, only consider the first two. If somehow we
# get no lines of context value, we will default to [0, 0].
if len(lines_of_context) == 1:
lines_of_context.append(lines_of_context[0])
elif len(lines_of_context) > 2:
lines_of_context = lines_of_context[0:2]
elif len(lines_of_context) == 0:
raise ValueError
except ValueError:
lines_of_context = [0, 0]
context = \
super(CommentDiffFragmentsView, self).get_context_data(**kwargs)
context.update({
'request': request,
'user': request.user,
})
payload = io.BytesIO()
comment_entries = build_diff_comment_fragments(
comments=self.comments,
context=context,
comment_template_name=self.comment_template_name,
error_template_name=self.error_template_name,
lines_of_context=lines_of_context,
show_controls=allow_expansion)[1]
for entry in comment_entries:
html = entry['html'].strip().encode('utf-8')
payload.write(struct.pack(b'<LL', entry['comment'].pk, len(html)))
payload.write(html)
result = payload.getvalue()
payload.close()
return HttpResponse(result, content_type='text/plain; charset=utf-8')
class ReviewsDiffFragmentView(ReviewRequestViewMixin, DiffFragmentView):
"""Renders a fragment from a file in the diff viewer.
Displays just a fragment of a diff or interdiff owned by the given
review request. The fragment is identified by the chunk index in the
diff.
``review_request_id``:
The ID of the ReviewRequest containing the diff to render.
``revision``:
The DiffSet revision to render.
``filediff_id``:
The ID of the FileDiff within the DiffSet.
The following may also be provided:
``interdiff_revision``:
The second DiffSet revision in an interdiff revision range.
``chunk_index``:
The index (0-based) of the chunk to render. If left out, the
entire file will be rendered.
``local_site``:
The LocalSite the ReviewRequest must be on, if any.
See :py:class:`~reviewboard.diffviewer.views.DiffFragmentView` for the
accepted query parameters.
"""
def process_diffset_info(self, revision, interdiff_revision=None,
**kwargs):
"""Process and return information on the desired diff.
The diff IDs and other data passed to the view can be processed and
converted into DiffSets. A dictionary with the DiffSet and FileDiff
information will be returned.
If the review request cannot be accessed by the user, an HttpResponse
will be returned instead.
Args:
revision (int):
The revision of the diff to view.
interdiff_revision (int, optional):
The second diff revision if viewing an interdiff.
**kwargs (dict):
Keyword arguments passed to the view.
Returns:
dict:
Information on the diff for use in the template and in queries.
"""
user = self.request.user
draft = self.review_request.get_draft(user)
if interdiff_revision is not None:
interdiffset = self.get_diff(interdiff_revision, draft)
else:
interdiffset = None
diffset = self.get_diff(revision, draft)
return super(ReviewsDiffFragmentView, self).process_diffset_info(
diffset_or_id=diffset,
interdiffset_or_id=interdiffset,
**kwargs)
def create_renderer(self, diff_file, *args, **kwargs):
"""Create the DiffRenderer for this fragment.
This will augment the renderer for binary files by looking up
file attachments, if review UIs are involved, disabling caching.
Args:
diff_file (dict):
The information on the diff file to render.
*args (tuple):
Additional positional arguments from the parent class.
**kwargs (dict):
Additional keyword arguments from the parent class.
Returns:
reviewboard.diffviewer.renderers.DiffRenderer:
The resulting diff renderer.
"""
renderer = super(ReviewsDiffFragmentView, self).create_renderer(
diff_file=diff_file, *args, **kwargs)
if diff_file['binary']:
# Determine the file attachments to display in the diff viewer,
# if any.
filediff = diff_file['filediff']
interfilediff = diff_file['interfilediff']
orig_attachment = None
modified_attachment = None
if diff_file['force_interdiff']:
orig_attachment = self._get_diff_file_attachment(filediff)
modified_attachment = \
self._get_diff_file_attachment(interfilediff)
else:
modified_attachment = self._get_diff_file_attachment(filediff)
if not diff_file['is_new_file']:
orig_attachment = \
self._get_diff_file_attachment(filediff, False)
diff_review_ui = None
diff_review_ui_html = None
orig_review_ui = None
orig_review_ui_html = None
modified_review_ui = None
modified_review_ui_html = None
if orig_attachment:
orig_review_ui = orig_attachment.review_ui
if modified_attachment:
modified_review_ui = modified_attachment.review_ui
# See if we're able to generate a diff review UI for these files.
if (orig_review_ui and modified_review_ui and
orig_review_ui.__class__ is modified_review_ui.__class__ and
modified_review_ui.supports_diffing):
# Both files are able to be diffed by this review UI.
# We'll display a special diff review UI instead of two
# side-by-side review UIs.
diff_review_ui = modified_review_ui
diff_review_ui.set_diff_against(orig_attachment)
diff_review_ui_html = \
self._render_review_ui(diff_review_ui, False)
else:
# We won't be showing a diff of these files. Instead, just
# grab the review UIs and render them.
orig_review_ui_html = \
self._render_review_ui(orig_review_ui)
modified_review_ui_html = \
self._render_review_ui(modified_review_ui)
if (diff_review_ui_html or orig_review_ui_html or
modified_review_ui_html):
# Don't cache the view, because the Review UI may care about
# state that we can't anticipate. At the least, it may have
# comments or other data that change between renders, and we
# don't want that to go stale.
renderer.allow_caching = False
renderer.extra_context.update({
'orig_diff_file_attachment': orig_attachment,
'modified_diff_file_attachment': modified_attachment,
'orig_attachment_review_ui_html': orig_review_ui_html,
'modified_attachment_review_ui_html': modified_review_ui_html,
'diff_attachment_review_ui_html': diff_review_ui_html,
})
renderer.extra_context.update(
self._get_download_links(renderer, diff_file))
return renderer
def get_context_data(self, **kwargs):
return {
'review_request': self.review_request,
}
def _get_download_links(self, renderer, diff_file):
if diff_file['binary']:
orig_attachment = \
renderer.extra_context['orig_diff_file_attachment']
modified_attachment = \
renderer.extra_context['modified_diff_file_attachment']
if orig_attachment:
download_orig_url = orig_attachment.get_absolute_url()
else:
download_orig_url = None
if modified_attachment:
download_modified_url = modified_attachment.get_absolute_url()
else:
download_modified_url = None
else:
filediff = diff_file['filediff']
interfilediff = diff_file['interfilediff']
diffset = filediff.diffset
if interfilediff:
orig_url_name = 'download-modified-file'
modified_revision = interfilediff.diffset.revision
modified_filediff_id = interfilediff.pk
else:
orig_url_name = 'download-orig-file'
modified_revision = diffset.revision
modified_filediff_id = filediff.pk
download_orig_url = local_site_reverse(
orig_url_name,
request=self.request,
kwargs={
'review_request_id': self.review_request.display_id,
'revision': diffset.revision,
'filediff_id': filediff.pk,
})
download_modified_url = local_site_reverse(
'download-modified-file',
request=self.request,
kwargs={
'review_request_id': self.review_request.display_id,
'revision': modified_revision,
'filediff_id': modified_filediff_id,
})
return {
'download_orig_url': download_orig_url,
'download_modified_url': download_modified_url,
}
def _render_review_ui(self, review_ui, inline_only=True):
"""Renders the review UI for a file attachment."""
if review_ui and (not inline_only or review_ui.allow_inline):
return mark_safe(review_ui.render_to_string(self.request))
return None
def _get_diff_file_attachment(self, filediff, use_modified=True):
"""Fetch the FileAttachment associated with a FileDiff.
This will query for the FileAttachment based on the provided filediff,
and set the retrieved diff file attachment to a variable whose name is
provided as an argument to this tag.
If 'use_modified' is True, the FileAttachment returned will be from the
modified version of the new file. Otherwise, it's the original file
that's being modified.
If no matching FileAttachment is found or if there is more than one
FileAttachment associated with one FileDiff, None is returned. An error
is logged in the latter case.
"""
if not filediff:
return None
try:
return FileAttachment.objects.get_for_filediff(filediff,
use_modified)
except ObjectDoesNotExist:
return None
except MultipleObjectsReturned:
# Only one FileAttachment should be associated with a FileDiff
logger.error('More than one FileAttachments associated with '
'FileDiff %s',
filediff.pk,
exc_info=1)
return None
class ReviewsDownloadPatchErrorBundleView(DownloadPatchErrorBundleView,
ReviewsDiffFragmentView):
"""A view to download the patch error bundle.
This view allows users to download a bundle containing data to help debug
issues when a patch fails to apply. The bundle will contain the diff, the
original file (as returned by the SCMTool), and the rejects file, if
applicable.
"""
class PreviewReviewRequestEmailView(ReviewRequestViewMixin,
BasePreviewEmailView):
"""Display a preview of an e-mail for a review request.
This can be used to see what an HTML or plain text e-mail will look like
for a newly-posted review request or an update to a review request.
"""
build_email = staticmethod(prepare_review_request_mail)
def get_email_data(self, request, changedesc_id=None, *args, **kwargs):
"""Return data used for the e-mail builder.
The data returned will be passed to :py:attr:`build_email` to handle
rendering the e-mail.
This can also return a :py:class:`~django.http.HttpResponse`, which
is useful for returning errors.
Args:
request (django.http.HttpResponse):
The HTTP response from the client.
changedesc_id (int, optional):
The ID of a change description used when previewing a
Review Request Updated e-mail.
*args (tuple):
Additional positional arguments passed to the handler.
**kwargs (dict):
Additional keyword arguments passed to the handler.
Returns:
object:
The dictionary data to pass as keyword arguments to
:py:attr:`build_email`, or an instance of
:py:class:`~django.http.HttpResponse` to immediately return to
the client.
"""
close_type = None
if changedesc_id:
changedesc = get_object_or_404(self.review_request.changedescs,
pk=changedesc_id)
user = changedesc.get_user(self.review_request)
if 'status' in changedesc.fields_changed:
close_type = changedesc.fields_changed['status']['new'][0]
else:
changedesc = None
user = self.review_request.submitter
return {
'user': user,
'review_request': self.review_request,
'changedesc': changedesc,
'close_type': close_type,
}
class PreviewReviewEmailView(ReviewRequestViewMixin, BasePreviewEmailView):
"""Display a preview of an e-mail for a review.
This can be used to see what an HTML or plain text e-mail will look like
for a review.
"""
build_email = staticmethod(prepare_review_published_mail)
def get_email_data(self, request, review_id, *args, **kwargs):
"""Return data used for the e-mail builder.
The data returned will be passed to :py:attr:`build_email` to handle
rendering the e-mail.
This can also return a :py:class:`~django.http.HttpResponse`, which
is useful for returning errors.
Args:
request (django.http.HttpResponse):
The HTTP response from the client.
review_id (int):
The ID of the review to preview.
*args (tuple):
Additional positional arguments passed to the handler.
**kwargs (dict):
Additional keyword arguments passed to the handler.
Returns:
object:
The dictionary data to pass as keyword arguments to
:py:attr:`build_email`, or an instance of
:py:class:`~django.http.HttpResponse` to immediately return to
the client.
"""
review = get_object_or_404(Review,
pk=review_id,
review_request=self.review_request)
return {
'user': review.user,
'review': review,
'review_request': self.review_request,
'to_owner_only': False,
'request': request,
}
class PreviewReplyEmailView(ReviewRequestViewMixin, BasePreviewEmailView):
"""Display a preview of an e-mail for a reply to a review.
This can be used to see what an HTML or plain text e-mail will look like
for a reply to a review.
"""
build_email = staticmethod(prepare_reply_published_mail)
def get_email_data(self, request, review_id, reply_id, *args, **kwargs):
"""Return data used for the e-mail builder.
The data returned will be passed to :py:attr:`build_email` to handle
rendering the e-mail.
This can also return a :py:class:`~django.http.HttpResponse`, which
is useful for returning errors.
Args:
request (django.http.HttpResponse):
The HTTP response from the client.
review_id (int):
The ID of the review the reply is for.
reply_id (int):
The ID of the reply to preview.
*args (tuple):
Additional positional arguments passed to the handler.
**kwargs (dict):
Additional keyword arguments passed to the handler.
Returns:
object:
The dictionary data to pass as keyword arguments to
:py:attr:`build_email`, or an instance of
:py:class:`~django.http.HttpResponse` to immediately return to
the client.
"""
review = get_object_or_404(Review,
pk=review_id,
review_request=self.review_request)
reply = get_object_or_404(Review, pk=reply_id, base_reply_to=review)
return {
'user': reply.user,
'reply': reply,
'review': review,
'review_request': self.review_request,
}
class ReviewFileAttachmentView(ReviewRequestViewMixin,
UserProfileRequiredViewMixin,
View):
"""Displays a file attachment with a review UI."""
def get(self, request, file_attachment_id, file_attachment_diff_id=None,
*args, **kwargs):
"""Handle a HTTP GET request.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
file_attachment_id (int):
The ID of the file attachment to review.
file_attachment_diff_id (int, optional):
The ID of the file attachment to diff against.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The resulting HTTP response from the handler.
"""
review_request = self.review_request
draft = review_request.get_draft(request.user)
# Make sure the attachment returned is part of either the review request
# or an accessible draft.
review_request_q = (Q(review_request=review_request) |
Q(inactive_review_request=review_request))
if draft:
review_request_q |= Q(drafts=draft) | Q(inactive_drafts=draft)
file_attachment = get_object_or_404(
FileAttachment,
Q(pk=file_attachment_id) & review_request_q)
review_ui = file_attachment.review_ui
if not review_ui:
review_ui = FileAttachmentReviewUI(review_request, file_attachment)
if file_attachment_diff_id:
file_attachment_revision = get_object_or_404(
FileAttachment,
Q(pk=file_attachment_diff_id) &
Q(attachment_history=file_attachment.attachment_history) &
review_request_q)
review_ui.set_diff_against(file_attachment_revision)
try:
is_enabled_for = review_ui.is_enabled_for(
user=request.user,
review_request=review_request,
file_attachment=file_attachment)
except Exception as e:
logger.error('Error when calling is_enabled_for for '
'FileAttachmentReviewUI %r: %s',
review_ui, e, exc_info=1)
is_enabled_for = False
if review_ui and is_enabled_for:
return review_ui.render_to_response(request)
else:
raise Http404
class ReviewScreenshotView(ReviewRequestViewMixin,
UserProfileRequiredViewMixin,
View):
"""Displays a review UI for a screenshot.
Screenshots are a legacy feature, predating file attachments. While they
can't be created anymore, this view does allow for reviewing screenshots
uploaded in old versions.
"""
def get(self, request, screenshot_id, *args, **kwargs):
"""Handle a HTTP GET request.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
screenshot_id (int):
The ID of the screenshot to review.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The resulting HTTP response from the handler.
"""
review_request = self.review_request
draft = review_request.get_draft(request.user)
# Make sure the screenshot returned is part of either the review
# request or an accessible draft.
review_request_q = (Q(review_request=review_request) |
Q(inactive_review_request=review_request))
if draft:
review_request_q |= Q(drafts=draft) | Q(inactive_drafts=draft)
screenshot = get_object_or_404(Screenshot,
Q(pk=screenshot_id) & review_request_q)
review_ui = LegacyScreenshotReviewUI(review_request, screenshot)
return review_ui.render_to_response(request)
class BugURLRedirectView(ReviewRequestViewMixin, View):
"""Redirects the user to an external bug report."""
def get(self, request, bug_id, **kwargs):
"""Handle HTTP GET requests for this view.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
bug_id (unicode):
The ID of the bug report to redirect to.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The HTTP response redirecting the client.
"""
# Need to create a custom HttpResponse because a non-HTTP url scheme
# will cause HttpResponseRedirect to fail with a "Disallowed Redirect".
response = HttpResponse(status=302)
response['Location'] = \
self.review_request.repository.bug_tracker % bug_id
return response
class BugInfoboxView(ReviewRequestViewMixin, TemplateView):
"""Displays information on a bug, for use in bug pop-up infoboxes.
This is meant to be embedded in other pages, rather than being
a standalone page.
"""
template_name = 'reviews/bug_infobox.html'
HTML_ENTITY_RE = re.compile(r'(&[a-z]+;)')
HTML_ENTITY_MAP = {
'"': '"',
'<': '<',
'>': '>',
'&': '&',
}
def get(self, request, bug_id, **kwargs):
"""Handle HTTP GET requests for this view.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
bug_id (unicode):
The ID of the bug to view.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The HTTP response to send to the client.
If details on a bug could not be found or fetching bug information
is not supported, this will return a a :http:`404`.
"""
request = self.request
review_request = self.review_request
repository = review_request.repository
bug_tracker = repository.bug_tracker_service
if not bug_tracker:
return HttpResponseNotFound(
_('Unable to find bug tracker service'))
if not isinstance(bug_tracker, BugTracker):
return HttpResponseNotFound(
_('Bug tracker %s does not support metadata')
% bug_tracker.name)
self.bug_id = bug_id
self.bug_info = bug_tracker.get_bug_info(repository, bug_id)
if (not self.bug_info.get('summary') and
not self.bug_info.get('description')):
return HttpResponseNotFound(
_('No bug metadata found for bug %(bug_id)s on bug tracker '
'%(bug_tracker)s') % {
'bug_id': bug_id,
'bug_tracker': bug_tracker.name,
})
return super(BugInfoboxView, self).get(request, **kwargs)
def get_context_data(self, **kwargs):
"""Return context data for the template.
Args:
**kwargs (dict):
Keyword arguments passed to the view.
Returns:
dict:
The resulting context data for the template.
"""
description_text_format = self.bug_info.get('description_text_format',
'plain')
description = self.normalize_text(self.bug_info['description'],
description_text_format)
bug_url = local_site_reverse(
'bug_url',
args=[self.review_request.display_id, self.bug_id])
context_data = super(BugInfoboxView, self).get_context_data(**kwargs)
context_data.update({
'bug_id': self.bug_id,
'bug_url': bug_url,
'bug_description': description,
'bug_description_rich_text': description_text_format == 'markdown',
'bug_status': self.bug_info['status'],
'bug_summary': self.bug_info['summary'],
})
return context_data
def normalize_text(self, text, text_format):
"""Normalize the text for display.
Based on the text format, this will sanitize and normalize the text
so it's suitable for rendering to HTML.
HTML text will have tags stripped away and certain common entities
replaced.
Markdown text will be rendered using our default Markdown parser
rules.
Plain text (or any unknown text format) will simply be escaped and
wrapped, with paragraphs left intact.
Args:
text (unicode):
The text to normalize for display.
text_format (unicode):
The text format. This should be one of ``html``, ``markdown``,
or ``plain``.
Returns:
django.utils.safestring.SafeText:
The resulting text, safe for rendering in HTML.
"""
if text_format == 'html':
# We want to strip the tags away, but keep certain common entities.
text = (
escape(self.HTML_ENTITY_RE.sub(
lambda m: (self.HTML_ENTITY_MAP.get(m.group(0)) or
m.group(0)),
strip_tags(text)))
.replace('\n\n', '<br><br>'))
elif text_format == 'markdown':
# This might not know every bit of Markdown that's thrown at us,
# but we'll do the best we can.
text = render_markdown(text)
else:
# Should be plain text, but don't trust it.
text = escape(text).replace('\n\n', '<br><br>')
return mark_safe(text)
class ReviewRequestInfoboxView(ReviewRequestViewMixin, TemplateView):
"""Display a review request info popup.
This produces the information needed to be displayed in a summarized
information box upon hovering over a link to a review request.
This is meant to be embedded in other pages, rather than being
a standalone page.
"""
template_name = 'reviews/review_request_infobox.html'
MAX_REVIEWS = 3
def get_context_data(self, **kwargs):
"""Handle HTTP GET requests for this view.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The HTTP response containing the infobox, or an error if the
infobox could not be provided.
"""
review_request = self.review_request
draft = review_request.get_draft(self.request.user)
# We only want to show one label. If there's a draft, then that's
# the most important information, so we'll only show that. Otherwise,
# we'll show the submitted/discarded state.
label = None
if draft:
label = ('review-request-infobox-label-draft', _('Draft'))
elif review_request.status == ReviewRequest.SUBMITTED:
label = ('review-request-infobox-label-submitted', _('Submitted'))
elif review_request.status == ReviewRequest.DISCARDED:
label = ('review-request-infobox-label-discarded', _('Discarded'))
if label:
label = format_html('<label class="{0}">{1}</label>', *label)
# Fetch information on the reviews for this review request.
review_count = (
review_request.reviews
.filter(public=True, base_reply_to__isnull=True)
.count()
)
# Fetch information on the draft for this review request.
diffset = None
if draft and draft.diffset_id:
diffset = draft.diffset
if not diffset and review_request.diffset_history_id:
try:
diffset = (
DiffSet.objects
.filter(history__pk=review_request.diffset_history_id)
.latest()
)
except DiffSet.DoesNotExist:
pass
if diffset:
diff_url = '%s#index_header' % local_site_reverse(
'view-diff-revision',
args=[review_request.display_id, diffset.revision],
local_site=review_request.local_site)
else:
diff_url = None
return {
'review_request': review_request,
'review_request_label': label or '',
'review_request_details': draft or review_request,
'issue_total_count': (review_request.issue_open_count +
review_request.issue_resolved_count +
review_request.issue_dropped_count +
review_request.issue_verifying_count),
'review_count': review_count,
'diffset': diffset,
'diff_url': diff_url,
}
class DownloadDiffFileView(ReviewRequestViewMixin, View):
"""Downloads an original or modified file from a diff.
This will fetch the file from a FileDiff, optionally patching it,
and return the result as an HttpResponse.
"""
TYPE_ORIG = 0
TYPE_MODIFIED = 1
file_type = TYPE_ORIG
def get(self, request, revision, filediff_id, *args, **kwargs):
"""Handle HTTP GET requests for this view.
Args:
request (django.http.HttpRequest):
The HTTP request from the client.
revision (int):
The revision of the diff to download the file from.
filediff_id (int, optional):
The ID of the FileDiff corresponding to the file to download.
*args (tuple):
Positional arguments passed to the handler.
**kwargs (dict):
Keyword arguments passed to the handler.
Returns:
django.http.HttpResponse:
The HTTP response to send to the client.
"""
review_request = self.review_request
draft = review_request.get_draft(request.user)
diffset = self.get_diff(revision, draft)
filediff = get_object_or_404(diffset.files, pk=filediff_id)
try:
data = get_original_file(filediff=filediff,
request=request)
except FileNotFoundError:
logger.exception(
'Could not retrieve file "%s" (revision %s) for filediff '
'ID %s',
filediff.dest_detail, revision, filediff_id)
raise Http404
if self.file_type == self.TYPE_MODIFIED:
data = get_patched_file(source_data=data,
filediff=filediff,
request=request)
encoding_list = get_filediff_encodings(filediff)
data = convert_to_unicode(data, encoding_list)[1]
return HttpResponse(data, content_type='text/plain; charset=utf-8')
| mit |
morta-code/YAX | setup.py | 1 | 1245 | from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='YAX',
version='1.2.0',
packages=['yax'],
url='https://github.com/morta-code/YAX',
license='LGPLv3',
author='Móréh Tamás, MTA-PPKE-NLPG',
author_email='morta@digitus.itk.ppke.hu',
description='Yet Another XML parser with the power of event-based memory-safe mechanism.',
long_description=long_description,
keywords="xml lxml parser event-based record-oriented",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"Programming Language :: Python :: 3",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Text Processing :: Markup :: XML"
]
)
| gpl-3.0 |
lutianming/leetcode | reorder_list.py | 1 | 1288 | # Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# @param head, a ListNode
# @return nothing
def reorderList(self, head):
if not head or not head.next:
return head
fast = head
slow = head
while fast and fast.next:
fast = fast.next.next
slow = slow.next
lasthalf = slow.next
lasthalf = self.reverse(lasthalf)
slow.next = None
firsthalf = head
while lasthalf:
a = firsthalf.next
b = lasthalf.next
firsthalf.next = lasthalf
lasthalf.next = a
firsthalf = a
lasthalf = b
return head
def reverse(self, head):
if not head:
return head
next = head.next
head.next = None
while next:
tmp = next.next
next.next = head
head = next
next = tmp
return head
head = ListNode(1)
head.next = ListNode(2)
# node = head.next
# node.next = ListNode(3)
# node = node.next
# node.next = ListNode(4)
solution = Solution()
head = solution.reorderList(head)
while head:
print(head.val)
head = head.next
| mit |
magyarm/periphondemand-code | src/bin/code/intercon.py | 1 | 5000 | #! /usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Name: Intercon.py
# Purpose:
# Author: Fabien Marteau <fabien.marteau@armadeus.com>
# Created: 13/05/2008
#-----------------------------------------------------------------------------
# Copyright (2008) Armadeus Systems
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#-----------------------------------------------------------------------------
# Revision list :
#
# Date By Changes
#
#-----------------------------------------------------------------------------
__doc__ = ""
__version__ = "1.0.0"
__versionTime__ = "13/05/2008"
__author__ = "Fabien Marteau <fabien.marteau@armadeus.com>"
import periphondemand.bin.define
from periphondemand.bin.define import *
from periphondemand.bin.utils.settings import Settings
from periphondemand.bin.utils.error import Error
from periphondemand.bin.utils import wrappersystem as sy
from periphondemand.bin.utils.display import Display
from periphondemand.bin.core.component import Component
from periphondemand.bin.core.port import Port
from periphondemand.bin.core.interface import Interface
from periphondemand.bin.core.hdl_file import Hdl_file
settings = Settings()
display = Display()
class Intercon(Component):
""" Generate Intercon component
"""
def __init__(self,masterinterface,project):
""" Init fonction
"""
masterinstancename = masterinterface.getParent().getInstanceName()
masterinterfacename = masterinterface.getName()
Component.__init__(self,project)
self.interfaceslist = []
self.addNode(nodename="component")
masterinstance = self.parent.getInstance(masterinstancename)
masterinterface = masterinstance.getInterface(masterinterfacename)
# Write xml description
self.generateXML(masterinterface)
# Write Code for component
masterinterface.getBus().generateIntercon(self)
display.msg("Intercon with name : "+self.getInstanceName()+" Done")
def generateXML(self,masterinterface):
""" Generate intercon code
"""
masterinstance = masterinterface.getParent()
# set name and description
self.setName(str(masterinstance.getInstanceName()) \
+ "_" \
+ str(masterinterface.getName()))
self.setInstanceName(str(masterinstance.getInstanceName())\
+ "_"\
+str(masterinterface.getName())\
+ "_intercon")
self.setDescription("Connect slaves to "\
+ masterinterface.getName()\
+ " from "\
+ masterinstance.getInstanceName())
# Save to make directories
self.saveInstance()
#####
# Create interface for each component connected on intercon
# for slaves and master:
slaveslist = masterinterface.getSlavesList()
interfaceslist = [slave.getInterface() for slave in slaveslist]
interfaceslist.append(masterinterface)
# For each slave and master interface, create interface in intercon
for interface in interfaceslist:
instance = interface.getParent()
#######
# bus (wishbone,...)
bus = Interface(self,
name=instance.getInstanceName()\
+"_"+interface.getName())
bus.setClass("intercon")
# Adding bus interface on intercon
self.addInterface(bus)
#Creating port with invert direction value
for port in interface.getPortsList():
newport = Port(bus,
name=instance.getInstanceName()\
+"_"+port.getName())
newport.setDir(self.invertDir(port.getDir()))
newport.setSize(port.getSize())
# adding port on bus interface
bus.addPort(newport)
#connect port new port on instance interface
port.connectAllPin(newport)
bus.setClass("intercon")
self.setNum("0")
| lgpl-2.1 |
aimas/TuniErp-8.0 | openerp/tools/cache.py | 226 | 6865 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# decorator makes wrappers that have the same API as their wrapped function;
# this is important for the openerp.api.guess() that relies on signatures
from collections import defaultdict
from decorator import decorator
from inspect import getargspec
import logging
_logger = logging.getLogger(__name__)
class ormcache_counter(object):
""" Statistic counters for cache entries. """
__slots__ = ['hit', 'miss', 'err']
def __init__(self):
self.hit = 0
self.miss = 0
self.err = 0
@property
def ratio(self):
return 100.0 * self.hit / (self.hit + self.miss or 1)
# statistic counters dictionary, maps (dbname, modelname, method) to counter
STAT = defaultdict(ormcache_counter)
class ormcache(object):
""" LRU cache decorator for orm methods. """
def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
self.skiparg = skiparg
def __call__(self, method):
self.method = method
lookup = decorator(self.lookup, method)
lookup.clear_cache = self.clear
return lookup
def lru(self, model):
counter = STAT[(model.pool.db_name, model._name, self.method)]
return model.pool.cache, (model._name, self.method), counter
def lookup(self, method, *args, **kwargs):
d, key0, counter = self.lru(args[0])
key = key0 + args[self.skiparg:]
try:
r = d[key]
counter.hit += 1
return r
except KeyError:
counter.miss += 1
value = d[key] = self.method(*args, **kwargs)
return value
except TypeError:
counter.err += 1
return self.method(*args, **kwargs)
def clear(self, model, *args):
""" Remove *args entry from the cache or all keys if *args is undefined """
d, key0, _ = self.lru(model)
if args:
_logger.warn("ormcache.clear arguments are deprecated and ignored "
"(while clearing caches on (%s).%s)",
model._name, self.method.__name__)
d.clear_prefix(key0)
model.pool._any_cache_cleared = True
class ormcache_context(ormcache):
def __init__(self, skiparg=2, size=8192, accepted_keys=()):
super(ormcache_context,self).__init__(skiparg,size)
self.accepted_keys = accepted_keys
def __call__(self, method):
# remember which argument is context
args = getargspec(method)[0]
self.context_pos = args.index('context')
return super(ormcache_context, self).__call__(method)
def lookup(self, method, *args, **kwargs):
d, key0, counter = self.lru(args[0])
# Note. The decorator() wrapper (used in __call__ above) will resolve
# arguments, and pass them positionally to lookup(). This is why context
# is not passed through kwargs!
if self.context_pos < len(args):
context = args[self.context_pos] or {}
else:
context = kwargs.get('context') or {}
ckey = [(k, context[k]) for k in self.accepted_keys if k in context]
# Beware: do not take the context from args!
key = key0 + args[self.skiparg:self.context_pos] + tuple(ckey)
try:
r = d[key]
counter.hit += 1
return r
except KeyError:
counter.miss += 1
value = d[key] = self.method(*args, **kwargs)
return value
except TypeError:
counter.err += 1
return self.method(*args, **kwargs)
class ormcache_multi(ormcache):
def __init__(self, skiparg=2, size=8192, multi=3):
assert skiparg <= multi
super(ormcache_multi, self).__init__(skiparg, size)
self.multi = multi
def lookup(self, method, *args, **kwargs):
d, key0, counter = self.lru(args[0])
base_key = key0 + args[self.skiparg:self.multi] + args[self.multi+1:]
ids = args[self.multi]
result = {}
missed = []
# first take what is available in the cache
for i in ids:
key = base_key + (i,)
try:
result[i] = d[key]
counter.hit += 1
except Exception:
counter.miss += 1
missed.append(i)
if missed:
# call the method for the ids that were not in the cache
args = list(args)
args[self.multi] = missed
result.update(method(*args, **kwargs))
# store those new results back in the cache
for i in missed:
key = base_key + (i,)
d[key] = result[i]
return result
class dummy_cache(object):
""" Cache decorator replacement to actually do no caching. """
def __init__(self, *l, **kw):
pass
def __call__(self, fn):
fn.clear_cache = self.clear
return fn
def clear(self, *l, **kw):
pass
def log_ormcache_stats(sig=None, frame=None):
""" Log statistics of ormcache usage by database, model, and method. """
from openerp.modules.registry import RegistryManager
import threading
me = threading.currentThread()
me_dbname = me.dbname
entries = defaultdict(int)
for dbname, reg in RegistryManager.registries.iteritems():
for key in reg.cache.iterkeys():
entries[(dbname,) + key[:2]] += 1
for key, count in sorted(entries.items()):
dbname, model_name, method = key
me.dbname = dbname
stat = STAT[key]
_logger.info("%6d entries, %6d hit, %6d miss, %6d err, %4.1f%% ratio, for %s.%s",
count, stat.hit, stat.miss, stat.err, stat.ratio, model_name, method.__name__)
me.dbname = me_dbname
# For backward compatibility
cache = ormcache
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
H1ghT0p/kitsune | kitsune/users/migrations/0008_auto_20150610_2214.py | 18 | 3304 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import kitsune.sumo.models
class Migration(migrations.Migration):
dependencies = [
('users', '0007_auto_add_screen_share_permission'),
]
operations = [
migrations.AddField(
model_name='profile',
name='involved_from',
field=models.DateField(null=True, verbose_name='Involved with Mozilla from', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='profile',
name='locale',
field=kitsune.sumo.models.LocaleField(default=b'en-US', max_length=7, verbose_name='Preferred language', choices=[(b'af', 'Afrikaans'), (b'ar', '\u0639\u0631\u0628\u064a'), (b'az', 'Az\u0259rbaycanca'), (b'bg', '\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438'), (b'bn-BD', '\u09ac\u09be\u0982\u09b2\u09be (\u09ac\u09be\u0982\u09b2\u09be\u09a6\u09c7\u09b6)'), (b'bn-IN', '\u09ac\u09be\u0982\u09b2\u09be (\u09ad\u09be\u09b0\u09a4)'), (b'bs', 'Bosanski'), (b'ca', 'catal\xe0'), (b'cs', '\u010ce\u0161tina'), (b'da', 'Dansk'), (b'de', 'Deutsch'), (b'ee', '\xc8\u028begbe'), (b'el', '\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac'), (b'en-US', 'English'), (b'es', 'Espa\xf1ol'), (b'et', 'eesti keel'), (b'eu', 'Euskara'), (b'fa', '\u0641\u0627\u0631\u0633\u06cc'), (b'fi', 'suomi'), (b'fr', 'Fran\xe7ais'), (b'fy-NL', 'Frysk'), (b'ga-IE', 'Gaeilge (\xc9ire)'), (b'gl', 'Galego'), (b'gu-IN', '\u0a97\u0ac1\u0a9c\u0ab0\u0abe\u0aa4\u0ac0'), (b'ha', '\u0647\u064e\u0631\u0652\u0634\u064e\u0646 \u0647\u064e\u0648\u0652\u0633\u064e'), (b'he', '\u05e2\u05d1\u05e8\u05d9\u05ea'), (b'hi-IN', '\u0939\u093f\u0928\u094d\u0926\u0940 (\u092d\u093e\u0930\u0924)'), (b'hr', 'Hrvatski'), (b'hu', 'Magyar'), (b'id', 'Bahasa Indonesia'), (b'ig', 'As\u1ee5s\u1ee5 Igbo'), (b'it', 'Italiano'), (b'ja', '\u65e5\u672c\u8a9e'), (b'km', '\u1781\u17d2\u1798\u17c2\u179a'), (b'kn', '\u0c95\u0ca8\u0ccd\u0ca8\u0ca1'), (b'ko', '\ud55c\uad6d\uc5b4'), (b'ln', 'Ling\xe1la'), (b'lt', 'lietuvi\u0173 kalba'), (b'ml', '\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02'), (b'ne-NP', '\u0928\u0947\u092a\u093e\u0932\u0940'), (b'nl', 'Nederlands'), (b'no', 'Norsk'), (b'pl', 'Polski'), (b'pt-BR', 'Portugu\xeas (do Brasil)'), (b'pt-PT', 'Portugu\xeas (Europeu)'), (b'ro', 'rom\xe2n\u0103'), (b'ru', '\u0420\u0443\u0441\u0441\u043a\u0438\u0439'), (b'si', '\u0dc3\u0dd2\u0d82\u0dc4\u0dbd'), (b'sk', 'sloven\u010dina'), (b'sl', 'sloven\u0161\u010dina'), (b'sq', 'Shqip'), (b'sr-Cyrl', '\u0421\u0440\u043f\u0441\u043a\u0438'), (b'sw', 'Kiswahili'), (b'sv', 'Svenska'), (b'ta', '\u0ba4\u0bae\u0bbf\u0bb4\u0bcd'), (b'ta-LK', '\u0ba4\u0bae\u0bbf\u0bb4\u0bcd (\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8)'), (b'te', '\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41'), (b'th', '\u0e44\u0e17\u0e22'), (b'tn', 'Setswana'), (b'tr', 'T\xfcrk\xe7e'), (b'uk', '\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430'), (b'ur', '\u0627\u064f\u0631\u062f\u0648'), (b'vi', 'Ti\u1ebfng Vi\u1ec7t'), (b'wo', 'Wolof'), (b'xh', 'isiXhosa'), (b'yo', '\xe8d\xe8 Yor\xf9b\xe1'), (b'zh-CN', '\u4e2d\u6587 (\u7b80\u4f53)'), (b'zh-TW', '\u6b63\u9ad4\u4e2d\u6587 (\u7e41\u9ad4)'), (b'zu', 'isiZulu')]),
preserve_default=True,
),
]
| bsd-3-clause |
calvingit21/h2o-2 | R/tests/run.py | 8 | 49355 | #!/usr/bin/python
import sys
import os
import shutil
import signal
import time
import random
import getpass
import re
import subprocess
import ConfigParser
class H2OUseCloudNode:
"""
A class representing one node in an H2O cloud which was specified by the user.
Don't try to build or tear down this kind of node.
use_ip: The given ip of the cloud.
use_port: The given port of the cloud.
"""
def __init__(self, use_ip, use_port):
self.use_ip = use_ip
self.use_port = use_port
def start(self):
pass
def stop(self):
pass
def terminate(self):
pass
def get_ip(self):
return self.use_ip
def get_port(self):
return self.use_port
class H2OUseCloud:
"""
A class representing an H2O clouds which was specified by the user.
Don't try to build or tear down this kind of cloud.
"""
def __init__(self, cloud_num, use_ip, use_port):
self.cloud_num = cloud_num
self.use_ip = use_ip
self.use_port = use_port
self.nodes = []
node = H2OUseCloudNode(self.use_ip, self.use_port)
self.nodes.append(node)
def start(self):
pass
def wait_for_cloud_to_be_up(self):
pass
def stop(self):
pass
def terminate(self):
pass
def get_ip(self):
node = self.nodes[0]
return node.get_ip()
def get_port(self):
node = self.nodes[0]
return node.get_port()
class H2OCloudNode:
"""
A class representing one node in an H2O cloud.
Note that the base_port is only a request for H2O.
H2O may choose to ignore our request and pick any port it likes.
So we have to scrape the real port number from stdout as part of cloud startup.
port: The actual port chosen at run time.
pid: The process id of the node.
output_file_name: Where stdout and stderr go. They are merged.
child: subprocess.Popen object.
terminated: Only from a signal. Not normal shutdown.
"""
def __init__(self, cloud_num, nodes_per_cloud, node_num, cloud_name, h2o_jar, ip, base_port, xmx, output_dir):
"""
Create a node in a cloud.
@param cloud_num: Dense 0-based cloud index number.
@param nodes_per_cloud: How many H2O java instances are in a cloud. Clouds are symmetric.
@param node_num: This node's dense 0-based node index number.
@param cloud_name: The H2O -name command-line argument.
@param h2o_jar: Path to H2O jar file.
@param base_port: The starting port number we are trying to get our nodes to listen on.
@param xmx: Java memory parameter.
@param output_dir: The directory where we can create an output file for this process.
@return: The node object.
"""
self.cloud_num = cloud_num
self.nodes_per_cloud = nodes_per_cloud
self.node_num = node_num
self.cloud_name = cloud_name
self.h2o_jar = h2o_jar
self.ip = ip
self.base_port = base_port
self.xmx = xmx
self.output_dir = output_dir
self.port = -1
self.pid = -1
self.output_file_name = ""
self.child = None
self.terminated = False
# Choose my base port number here. All math is done here. Every node has the same
# base_port and calculates it's own my_base_port.
ports_per_node = 2
self.my_base_port = \
self.base_port + \
(self.cloud_num * self.nodes_per_cloud * ports_per_node) + \
(self.node_num * ports_per_node)
def start(self):
"""
Start one node of H2O.
(Stash away the self.child and self.pid internally here.)
@return: none
"""
# there is no hdfs currently in ec2, except s3n/hdfs
# the core-site.xml provides s3n info
# it's possible that we can just always hardware the hdfs version
# to match the cdh4 cluster we're hardwiring tests to
# i.e. it won't make s3n/s3 break on ec2
cmd = ["java",
"-Xmx" + self.xmx,
"-ea",
"-jar", self.h2o_jar,
"-name", self.cloud_name,
"-baseport", str(self.my_base_port),
# apparently we don't need -hdfs=hdfs://<namenode> anymore on the java startup?
"-hdfs_version", "cdh4"]
# Add S3N credentials to cmd if they exist.
ec2_hdfs_config_file_name = os.path.expanduser("~/.ec2/core-site.xml")
if (os.path.exists(ec2_hdfs_config_file_name)):
cmd.append("-hdfs_config")
cmd.append(ec2_hdfs_config_file_name)
self.output_file_name = \
os.path.join(self.output_dir, "java_" + str(self.cloud_num) + "_" + str(self.node_num) + ".out.txt")
f = open(self.output_file_name, "w")
self.child = subprocess.Popen(args=cmd,
stdout=f,
stderr=subprocess.STDOUT,
cwd=self.output_dir)
self.pid = self.child.pid
print("+ CMD: " + ' '.join(cmd))
def scrape_port_from_stdout(self):
"""
Look at the stdout log and figure out which port the JVM chose.
Write this to self.port.
This call is blocking.
Exit if this fails.
@return: none
"""
retries = 30
while (retries > 0):
if (self.terminated):
return
f = open(self.output_file_name, "r")
s = f.readline()
while (len(s) > 0):
if (self.terminated):
return
match_groups = re.search(r"Listening for HTTP and REST traffic on http://(\S+):(\d+)", s)
if (match_groups is not None):
port = match_groups.group(2)
if (port is not None):
self.port = port
f.close()
print("H2O Cloud {} Node {} started with output file {}".format(self.cloud_num,
self.node_num,
self.output_file_name))
return
s = f.readline()
f.close()
retries -= 1
if (self.terminated):
return
time.sleep(1)
print("")
print("ERROR: Too many retries starting cloud.")
print("")
sys.exit(1)
def scrape_cloudsize_from_stdout(self, nodes_per_cloud):
"""
Look at the stdout log and wait until the cloud of proper size is formed.
This call is blocking.
Exit if this fails.
@return: none
"""
retries = 60
while (retries > 0):
if (self.terminated):
return
f = open(self.output_file_name, "r")
s = f.readline()
while (len(s) > 0):
if (self.terminated):
return
match_groups = re.search(r"Cloud of size (\d+) formed", s)
if (match_groups is not None):
size = match_groups.group(1)
if (size is not None):
size = int(size)
if (size == nodes_per_cloud):
f.close()
return
s = f.readline()
f.close()
retries -= 1
if (self.terminated):
return
time.sleep(1)
print("")
print("ERROR: Too many retries starting cloud.")
print("")
sys.exit(1)
def stop(self):
"""
Normal node shutdown.
Ignore failures for now.
@return: none
"""
if (self.pid > 0):
print("Killing JVM with PID {}".format(self.pid))
try:
self.child.terminate()
except OSError:
pass
self.pid = -1
def terminate(self):
"""
Terminate a running node. (Due to a signal.)
@return: none
"""
self.terminated = True
self.stop()
def get_ip(self):
""" Return the ip address this node is really listening on. """
return self.ip
def get_port(self):
""" Return the port this node is really listening on. """
return self.port
def __str__(self):
s = ""
s += " node {}\n".format(self.node_num)
s += " xmx: {}\n".format(self.xmx)
s += " my_base_port: {}\n".format(self.my_base_port)
s += " port: {}\n".format(self.port)
s += " pid: {}\n".format(self.pid)
return s
class H2OCloud:
"""
A class representing one of the H2O clouds.
"""
def __init__(self, cloud_num, nodes_per_cloud, h2o_jar, base_port, xmx, output_dir):
"""
Create a cloud.
See node definition above for argument descriptions.
@return: The cloud object.
"""
self.cloud_num = cloud_num
self.nodes_per_cloud = nodes_per_cloud
self.h2o_jar = h2o_jar
self.base_port = base_port
self.xmx = xmx
self.output_dir = output_dir
# Randomly choose a seven digit cloud number.
n = random.randint(1000000, 9999999)
user = getpass.getuser()
user = ''.join(user.split())
self.cloud_name = "H2O_runit_{}_{}".format(user, n)
self.nodes = []
self.jobs_run = 0
for node_num in range(self.nodes_per_cloud):
node = H2OCloudNode(self.cloud_num, self.nodes_per_cloud, node_num,
self.cloud_name,
self.h2o_jar,
"127.0.0.1", self.base_port,
self.xmx, self.output_dir)
self.nodes.append(node)
def start(self):
"""
Start H2O cloud.
The cloud is not up until wait_for_cloud_to_be_up() is called and returns.
@return: none
"""
for node in self.nodes:
node.start()
def wait_for_cloud_to_be_up(self):
"""
Blocking call ensuring the cloud is available.
@return: none
"""
self._scrape_port_from_stdout()
self._scrape_cloudsize_from_stdout()
def stop(self):
"""
Normal cloud shutdown.
@return: none
"""
for node in self.nodes:
node.stop()
def terminate(self):
"""
Terminate a running cloud. (Due to a signal.)
@return: none
"""
for node in self.nodes:
node.terminate()
def get_ip(self):
""" Return an ip to use to talk to this cloud. """
node = self.nodes[0]
return node.get_ip()
def get_port(self):
""" Return a port to use to talk to this cloud. """
node = self.nodes[0]
return node.get_port()
def _scrape_port_from_stdout(self):
for node in self.nodes:
node.scrape_port_from_stdout()
def _scrape_cloudsize_from_stdout(self):
for node in self.nodes:
node.scrape_cloudsize_from_stdout(self.nodes_per_cloud)
def __str__(self):
s = ""
s += "cloud {}\n".format(self.cloud_num)
s += " name: {}\n".format(self.cloud_name)
s += " jobs_run: {}\n".format(self.jobs_run)
for node in self.nodes:
s += str(node)
return s
class Test:
"""
A class representing one Test.
cancelled: Don't start this test.
terminated: Test killed due to signal.
returncode: Exit code of child.
pid: Process id of the test.
ip: IP of cloud to run test.
port: Port of cloud to run test.
child: subprocess.Popen object.
"""
@staticmethod
def test_did_not_complete():
"""
returncode marker to know if the test ran or not.
"""
return -9999999
def __init__(self, test_dir, test_short_dir, test_name, output_dir):
"""
Create a Test.
@param test_dir: Full absolute path to the test directory.
@param test_short_dir: Path from h2o/R/tests to the test directory.
@param test_name: Test filename with the directory removed.
@param output_dir: The directory where we can create an output file for this process.
@return: The test object.
"""
self.test_dir = test_dir
self.test_short_dir = test_short_dir
self.test_name = test_name
self.output_dir = output_dir
self.output_file_name = ""
self.cancelled = False
self.terminated = False
self.returncode = Test.test_did_not_complete()
self.start_seconds = -1
self.pid = -1
self.ip = None
self.port = -1
self.child = None
def start(self, ip, port):
"""
Start the test in a non-blocking fashion.
@param ip: IP address of cloud to run on.
@param port: Port of cloud to run on.
@return: none
"""
if (self.cancelled or self.terminated):
return
self.start_seconds = time.time()
self.ip = ip
self.port = port
cmd = ["R",
"-f",
self.test_name,
"--args",
self.ip + ":" + str(self.port)]
test_short_dir_with_no_slashes = re.sub(r'[\\/]', "_", self.test_short_dir)
self.output_file_name = \
os.path.join(self.output_dir, test_short_dir_with_no_slashes + "_" + self.test_name + ".out.txt")
f = open(self.output_file_name, "w")
self.child = subprocess.Popen(args=cmd,
stdout=f,
stderr=subprocess.STDOUT,
cwd=self.test_dir)
self.pid = self.child.pid
# print("+ CMD: " + ' '.join(cmd))
def is_completed(self):
"""
Check if test has completed.
This has side effects and MUST be called for the normal test queueing to work.
Specifically, child.poll().
@return: True if the test completed, False otherwise.
"""
child = self.child
if (child is None):
return False
child.poll()
if (child.returncode is None):
return False
self.pid = -1
self.returncode = child.returncode
return True
def cancel(self):
"""
Mark this test as cancelled so it never tries to start.
@return: none
"""
if (self.pid <= 0):
self.cancelled = True
def terminate(self):
"""
Terminate a running test. (Due to a signal.)
@return: none
"""
self.terminated = True
if (self.pid > 0):
print("Killing Test with PID {}".format(self.pid))
try:
self.child.terminate()
except OSError:
pass
self.pid = -1
def get_test_dir_file_name(self):
"""
@return: The full absolute path of this test.
"""
return os.path.join(self.test_dir, self.test_name)
def get_test_name(self):
"""
@return: The file name (no directory) of this test.
"""
return self.test_name
def get_seed_used(self):
"""
@return: The seed used by this test.
"""
return self._scrape_output_for_seed()
def get_ip(self):
"""
@return: IP of the cloud where this test ran.
"""
return self.ip
def get_port(self):
"""
@return: Integer port number of the cloud where this test ran.
"""
return int(self.port)
def get_passed(self):
"""
@return: True if the test passed, False otherwise.
"""
return (self.returncode == 0)
def get_nopass(self):
"""
Some tests are known not to fail and even if they don't pass we don't want
to fail the overall regression PASS/FAIL status.
@return: True if the test has been marked as NOPASS, False otherwise.
"""
a = re.compile("NOPASS")
return a.search(self.test_name)
def get_completed(self):
"""
@return: True if the test completed (pass or fail), False otherwise.
"""
return (self.returncode > Test.test_did_not_complete())
def get_output_dir_file_name(self):
"""
@return: Full path to the output file which you can paste to a terminal window.
"""
return (os.path.join(self.output_dir, self.output_file_name))
def _scrape_output_for_seed(self):
"""
@return: The seed scraped from the outpul file.
"""
res = ""
with open(self.get_output_dir_file_name(), "r") as f:
for line in f:
if "SEED used" in line:
line = line.strip().split(' ')
res = line[-1]
break
return res
def __str__(self):
s = ""
s += "Test: {}/{}\n".format(self.test_dir, self.test_name)
return s
class RUnitRunner:
"""
A class for running the RUnit tests.
The tests list contains an object for every test.
The tests_not_started list acts as a job queue.
The tests_running list is polled for jobs that have finished.
"""
def __init__(self,
test_root_dir,
use_cloud, use_cloud2, cloud_config, use_ip, use_port,
num_clouds, nodes_per_cloud, h2o_jar, base_port, xmx, output_dir, failed_output_dir):
"""
Create a runner.
@param test_root_dir: h2o/R/tests directory.
@param use_cloud: Use this one user-specified cloud. Overrides num_clouds.
@param use_cloud2: Use the cloud_config to define the list of H2O clouds.
@param cloud_config: (if use_cloud2) the config file listing the H2O clouds.
@param use_ip: (if use_cloud) IP of one cloud to use.
@param use_port: (if use_cloud) Port of one cloud to use.
@param num_clouds: Number of H2O clouds to start.
@param nodes_per_cloud: Number of H2O nodes to start per cloud.
@param h2o_jar: Path to H2O jar file to run.
@param base_port: Base H2O port (e.g. 54321) to start choosing from.
@param xmx: Java -Xmx parameter.
@param output_dir: Directory for output files.
@return: The runner object.
"""
self.test_root_dir = test_root_dir
self.use_cloud = use_cloud
self.use_cloud2 = use_cloud2
# Valid if use_cloud is True
self.use_ip = use_ip
self.use_port = use_port
# Valid if use_cloud is False
self.num_clouds = num_clouds
self.nodes_per_cloud = nodes_per_cloud
self.h2o_jar = h2o_jar
self.base_port = base_port
self.output_dir = output_dir
self.failed_output_dir = failed_output_dir
self.start_seconds = time.time()
self.terminated = False
self.clouds = []
self.tests = []
self.tests_not_started = []
self.tests_running = []
self.regression_passed = False
self._create_output_dir()
self._create_failed_output_dir()
if (use_cloud):
node_num = 0
cloud = H2OUseCloud(node_num, use_ip, use_port)
self.clouds.append(cloud)
elif (use_cloud2):
clouds = RUnitRunner.read_config(cloud_config)
node_num = 0
for c in clouds:
cloud = H2OUseCloud(node_num,c[0],c[1])
self.clouds.append(cloud)
node_num += 1
else:
for i in range(self.num_clouds):
cloud = H2OCloud(i, self.nodes_per_cloud, h2o_jar, self.base_port, xmx, self.output_dir)
self.clouds.append(cloud)
@staticmethod
def find_test(test_to_run):
"""
Be nice and try to help find the test if possible.
If the test is actually found without looking, then just use it.
Otherwise, search from the script's down directory down.
"""
if (os.path.exists(test_to_run)):
abspath_test = os.path.abspath(test_to_run)
return abspath_test
for d, subdirs, files in os.walk(os.path.dirname(os.path.realpath(__file__))):
for f in files:
if (f == test_to_run):
return os.path.join(d, f)
# Not found, return the file, which will result in an error downstream when it can't be found.
print("")
print("ERROR: Test does not exist: " + test_to_run)
print("")
sys.exit(1)
@staticmethod
def read_config(config_file):
clouds = [] # a list of lists. Inner lists have [node_num, ip, port]
cfg = ConfigParser.RawConfigParser()
cfg.read(config_file)
for s in cfg.sections():
items = cfg.items(s)
cloud = [items[0][1], int(items[1][1])]
clouds.append(cloud)
return clouds
def read_test_list_file(self, test_list_file):
"""
Read in a test list file line by line. Each line in the file is a test
to add to the test run.
@param test_list_file: Filesystem path to a file with a list of tests to run.
@return: none
"""
try:
f = open(test_list_file, "r")
s = f.readline()
while (len(s) != 0):
stripped = s.strip()
if (len(stripped) == 0):
s = f.readline()
continue
if (stripped.startswith("#")):
s = f.readline()
continue
found_stripped = RUnitRunner.find_test(stripped)
self.add_test(found_stripped)
s = f.readline()
f.close()
except IOError as e:
print("")
print("ERROR: Failure reading test list: " + test_list_file)
print(" (errno {0}): {1}".format(e.errno, e.strerror))
print("")
sys.exit(1)
def build_test_list(self, test_group, run_small, run_medium, run_large, run_xlarge):
"""
Recursively find the list of tests to run and store them in the object.
Fills in self.tests and self.tests_not_started.
@param test_group: Name of the test group of tests to run.
@return: none
"""
if (self.terminated):
return
for root, dirs, files in os.walk(self.test_root_dir):
if (root.endswith("Util")):
continue
for f in files:
if (not re.match(".*runit.*\.[rR]$", f)):
continue
is_small = False
is_medium = False
is_large = False
is_xlarge= False
if "xlarge" in f: is_xlarge = True
elif "medium" in f: is_medium = True
elif "large" in f: is_large = True
else: is_small = True
if is_small and not run_small: continue
if is_medium and not run_medium: continue
if is_large and not run_large: continue
if is_xlarge and not run_xlarge: continue
if (test_group is not None):
test_short_dir = self._calc_test_short_dir(os.path.join(root, f))
if (test_group.lower() not in test_short_dir) and test_group.lower() not in f:
continue
self.add_test(os.path.join(root, f))
def add_test(self, test_path):
"""
Add one test to the list of tests to run.
@param test_path: File system path to the test.
@return: none
"""
abs_test_path = os.path.abspath(test_path)
abs_test_dir = os.path.dirname(abs_test_path)
test_file = os.path.basename(abs_test_path)
if (not os.path.exists(abs_test_path)):
print("")
print("ERROR: Test does not exist: " + abs_test_path)
print("")
sys.exit(1)
test_short_dir = self._calc_test_short_dir(test_path)
test = Test(abs_test_dir, test_short_dir, test_file, self.output_dir)
self.tests.append(test)
self.tests_not_started.append(test)
def start_clouds(self):
"""
Start all H2O clouds.
@return: none
"""
if (self.terminated):
return
if (self.use_cloud):
return
print("")
print("Starting clouds...")
print("")
for cloud in self.clouds:
if (self.terminated):
return
cloud.start()
print("")
print("Waiting for H2O nodes to come up...")
print("")
for cloud in self.clouds:
if (self.terminated):
return
cloud.wait_for_cloud_to_be_up()
def run_tests(self):
"""
Run all tests.
@return: none
"""
if (self.terminated):
return
self._log("")
self._log("Setting up R H2O package...")
if (False):
out_file_name = os.path.join(self.output_dir, "runnerSetupPackage.out.txt")
out = open(out_file_name, "w")
cloud = self.clouds[0]
port = cloud.get_port()
ip = "127.0.0.1:"
if g_use_cloud or g_use_cloud2:
ip = cloud.get_ip()+":"
cmd = ["R",
"--quiet",
"-f",
os.path.join(self.test_root_dir, "Utils/runnerSetupPackage.R"),
"--args",
ip + str(port)]
child = subprocess.Popen(args=cmd,
stdout=out,
stderr=subprocess.STDOUT)
rv = child.wait()
if (self.terminated):
return
if (rv != 0):
print("")
print("ERROR: Utils/runnerSetupPackage.R failed.")
print(" (See " + out_file_name + ")")
print("")
sys.exit(1)
out.close()
num_tests = len(self.tests)
num_nodes = self.num_clouds * self.nodes_per_cloud
self._log("")
if (self.use_cloud):
self._log("Starting {} tests...".format(num_tests))
elif (self.use_cloud2):
self._log("Starting {} tests on {} clouds...".format(num_tests, len(self.clouds)))
else:
self._log("Starting {} tests on {} clouds with {} total H2O nodes...".format(num_tests,
self.num_clouds,
num_nodes))
self._log("")
# Start the first n tests, where n is the lesser of the total number of tests and the total number of clouds.
start_count = min(len(self.tests_not_started), len(self.clouds), 30)
if (g_use_cloud2):
start_count = min(start_count, 75) # only open up 30 processes locally
for i in range(start_count):
cloud = self.clouds[i]
ip = cloud.get_ip()
port = cloud.get_port()
self._start_next_test_on_ip_port(ip, port)
# As each test finishes, send a new one to the cloud that just freed up.
while (len(self.tests_not_started) > 0):
if (self.terminated):
return
completed_test = self._wait_for_one_test_to_complete()
if (self.terminated):
return
self._report_test_result(completed_test)
ip_of_completed_test = completed_test.get_ip()
port_of_completed_test = completed_test.get_port()
self._start_next_test_on_ip_port(ip_of_completed_test, port_of_completed_test)
# Wait for remaining running tests to complete.
while (len(self.tests_running) > 0):
if (self.terminated):
return
completed_test = self._wait_for_one_test_to_complete()
if (self.terminated):
return
self._report_test_result(completed_test)
def stop_clouds(self):
"""
Stop all H2O clouds.
@return: none
"""
if (self.terminated):
return
if (self.use_cloud or self.use_cloud2):
print("")
print("All tests completed...")
print("")
return
print("")
print("All tests completed; tearing down clouds...")
print("")
for cloud in self.clouds:
cloud.stop()
def report_summary(self):
"""
Report some summary information when the tests have finished running.
@return: none
"""
passed = 0
nopass_but_tolerate = 0
failed = 0
notrun = 0
total = 0
true_fail_list = []
for test in self.tests:
if (test.get_passed()):
passed += 1
else:
if (test.get_nopass()):
nopass_but_tolerate += 1
if (test.get_completed()):
failed += 1
if (not test.get_nopass()):
true_fail_list.append(test.test_name)
else:
notrun += 1
total += 1
if ((passed + nopass_but_tolerate) == total):
self.regression_passed = True
else:
self.regression_passed = False
end_seconds = time.time()
delta_seconds = end_seconds - self.start_seconds
run = total - notrun
self._log("")
self._log("----------------------------------------------------------------------")
self._log("")
self._log("SUMMARY OF RESULTS")
self._log("")
self._log("----------------------------------------------------------------------")
self._log("")
self._log("Total tests: " + str(total))
self._log("Passed: " + str(passed))
self._log("Did not pass: " + str(failed))
self._log("Did not complete: " + str(notrun))
self._log("Tolerated NOPASS: " + str(nopass_but_tolerate))
self._log("")
self._log("Total time: %.2f sec" % delta_seconds)
if (run > 0):
self._log("Time/completed test: %.2f sec" % (delta_seconds / run))
else:
self._log("Time/completed test: N/A")
self._log("")
self._log("True fail list: " + ", ".join(true_fail_list))
self._log("")
def terminate(self):
"""
Terminate all running clouds. (Due to a signal.)
@return: none
"""
self.terminated = True
for test in self.tests:
test.cancel()
for test in self.tests:
test.terminate()
for cloud in self.clouds:
cloud.terminate()
def get_regression_passed(self):
"""
Return whether the overall regression passed or not.
@return: true if the exit value should be 0, false otherwise.
"""
return self.regression_passed
#--------------------------------------------------------------------
# Private methods below this line.
#--------------------------------------------------------------------
def _calc_test_short_dir(self, test_path):
"""
Calculate directory of test relative to test_root_dir.
@param test_path: Path to test file.
@return: test_short_dir, relative directory containing test (relative to test_root_dir).
"""
abs_test_root_dir = os.path.abspath(self.test_root_dir)
abs_test_path = os.path.abspath(test_path)
abs_test_dir = os.path.dirname(abs_test_path)
test_short_dir = abs_test_dir
prefix = os.path.join(abs_test_root_dir, "")
if (test_short_dir.startswith(prefix)):
test_short_dir = test_short_dir.replace(prefix, "", 1)
return test_short_dir
def _create_failed_output_dir(self):
try:
os.makedirs(self.failed_output_dir)
except OSError as e:
print("")
print("mkdir failed (errno {0}): {1}".format(e.errno, e.strerror))
print(" " + self.failed_output_dir)
print("")
print("(try adding --wipe)")
print("")
sys.exit(1)
def _create_output_dir(self):
try:
os.makedirs(self.output_dir)
except OSError as e:
print("")
print("mkdir failed (errno {0}): {1}".format(e.errno, e.strerror))
print(" " + self.output_dir)
print("")
print("(try adding --wipe)")
print("")
sys.exit(1)
def _start_next_test_on_ip_port(self, ip, port):
test = self.tests_not_started.pop(0)
self.tests_running.append(test)
test.start(ip, port)
def _wait_for_one_test_to_complete(self):
while (True):
for test in self.tests_running:
if (self.terminated):
return None
if (test.is_completed()):
self.tests_running.remove(test)
return test
if (self.terminated):
return
time.sleep(1)
def _report_test_result(self, test):
port = test.get_port()
now = time.time()
duration = now - test.start_seconds
if (test.get_passed()):
s = "PASS %d %4ds %-60s" % (port, duration, test.get_test_name())
self._log(s)
else:
s = " FAIL %d %4ds %-60s %s %s" % \
(port, duration, test.get_test_name(), test.get_output_dir_file_name(), test.get_seed_used())
self._log(s)
f = self._get_failed_filehandle_for_appending()
f.write(test.get_test_dir_file_name() + "\n")
f.close()
# Copy failed test output into directory failed
if not test.get_nopass():
shutil.copy(test.get_output_dir_file_name(), self.failed_output_dir)
def _log(self, s):
f = self._get_summary_filehandle_for_appending()
print(s)
sys.stdout.flush()
f.write(s + "\n")
f.close()
def _get_summary_filehandle_for_appending(self):
summary_file_name = os.path.join(self.output_dir, "summary.txt")
f = open(summary_file_name, "a")
return f
def _get_failed_filehandle_for_appending(self):
summary_file_name = os.path.join(self.output_dir, "failed.txt")
f = open(summary_file_name, "a")
return f
def __str__(self):
s = "\n"
s += "test_root_dir: {}\n".format(self.test_root_dir)
s += "output_dir: {}\n".format(self.output_dir)
s += "h2o_jar: {}\n".format(self.h2o_jar)
s += "num_clouds: {}\n".format(self.num_clouds)
s += "nodes_per_cloud: {}\n".format(self.nodes_per_cloud)
s += "base_port: {}\n".format(self.base_port)
s += "\n"
for c in self.clouds:
s += str(c)
s += "\n"
# for t in self.tests:
# s += str(t)
return s
#--------------------------------------------------------------------
# Main program
#--------------------------------------------------------------------
# Global variables that can be set by the user.
g_script_name = ""
g_base_port = 40000
g_num_clouds = 5
g_nodes_per_cloud = 1
g_wipe_test_state = False
g_wipe_output_dir = False
g_test_to_run = None
g_test_list_file = None
g_test_group = None
g_run_small = True
g_run_medium = True
g_run_large = True
g_run_xlarge = True
g_use_cloud = False
g_use_cloud2 = False
g_config = None
g_use_ip = None
g_use_port = None
g_no_run = False
g_jvm_xmx = "1g"
# Global variables that are set internally.
g_output_dir = None
g_runner = None
g_handling_signal = False
def use(x):
""" Hack to remove compiler warning. """
if False:
print(x)
def signal_handler(signum, stackframe):
global g_runner
global g_handling_signal
use(stackframe)
if (g_handling_signal):
# Don't do this recursively.
return
g_handling_signal = True
print("")
print("----------------------------------------------------------------------")
print("")
print("SIGNAL CAUGHT (" + str(signum) + "). TEARING DOWN CLOUDS.")
print("")
print("----------------------------------------------------------------------")
g_runner.terminate()
def usage():
print("")
print("Usage: " + g_script_name +
" [--wipeall]"
" [--wipe]"
" [--baseport port]"
" [--numclouds n]"
" [--nodespercloud n]"
" [--test path/to/test.R]"
" [--testlist path/to/list/file]"
" [--testgroup group]"
" [--testsize (s|m|l)]"
" [--usecloud ip:port]"
" [--norun]")
print("")
print(" (Output dir is: " + g_output_dir + ")")
print(" (Default number of clouds is: " + str(g_num_clouds) + ")")
print("")
print(" --wipeall Remove all prior test state before starting, particularly")
print(" random seeds.")
print(" (Removes master_seed file and all Rsandbox directories.")
print(" Also wipes the output dir before starting.)")
print("")
print(" --wipe Wipes the output dir before starting. Keeps old random seeds.")
print("")
print(" --baseport The first port at which H2O starts searching for free ports.")
print("")
print(" --numclouds The number of clouds to start.")
print(" Each test is randomly assigned to a cloud.")
print("")
print(" --numnodes The number of nodes in the cloud.")
print(" When this is specified, numclouds must be 1.")
print("")
print(" --test If you only want to run one test, specify it like this.")
print("")
print(" --testlist A file containing a list of tests to run (for example the")
print(" 'failed.txt' file from the output directory).")
print("")
print(" --testgroup Test a group of tests by function:")
print(" pca, coxph, glm, kmeans, gbm, rf, deeplearning, algos, golden, munging")
print("")
print(" --testsize Sizes (and by extension length) of tests to run:")
print(" s=small (seconds), m=medium (a minute or two), l=large (longer)")
print(" (Default is to run all tests.)")
print("")
print(" --usecloud ip:port of cloud to send tests to instead of starting clouds.")
print(" (When this is specified, numclouds is ignored.)")
print("")
print(" --usecloud2 cloud.cfg: Use a set clouds defined in cloud.config to run tests on.")
print(" (When this is specified, numclouds, numnodes, and usecloud are ignored.)")
print("")
print(" --norun Perform side effects like wipe, but don't actually run tests.")
print("")
print(" --jvm.xmx Configure size of launched JVM running H2O. E.g. '--jvm.xmx 3g'")
print("")
print(" If neither --test nor --testlist is specified, then the list of tests is")
print(" discovered automatically as files matching '*runit*.R'.")
print("")
print("")
print("Examples:")
print("")
print(" Just accept the defaults and go (note: output dir must not exist):")
print(" "+g_script_name)
print("")
print(" Remove all random seeds (i.e. make new ones) but don't run any tests:")
print(" "+g_script_name+" --wipeall --norun")
print("")
print(" For a powerful laptop with 8 cores (keep default numclouds):")
print(" "+g_script_name+" --wipeall")
print("")
print(" For a big server with 32 cores:")
print(" "+g_script_name+" --wipeall --numclouds 16")
print("")
print(" Just run the tests that finish quickly")
print(" "+g_script_name+" --wipeall --testsize s")
print("")
print(" Run one specific test, keeping old random seeds:")
print(" "+g_script_name+" --wipe --test path/to/test.R")
print("")
print(" Rerunning failures from a previous run, keeping old random seeds:")
print(" # Copy failures.txt, otherwise --wipe removes the directory with the list!")
print(" cp " + os.path.join(g_output_dir, "failures.txt") + " .")
print(" "+g_script_name+" --wipe --numclouds 16 --testlist failed.txt")
print("")
print(" Run tests on a pre-existing cloud (e.g. in a debugger), keeping old random seeds:")
print(" "+g_script_name+" --wipe --usecloud ip:port")
sys.exit(1)
def unknown_arg(s):
print("")
print("ERROR: Unknown argument: " + s)
print("")
usage()
def bad_arg(s):
print("")
print("ERROR: Illegal use of (otherwise valid) argument: " + s)
print("")
usage()
def error(s):
print("")
print("ERROR: " + s)
print("")
usage()
def parse_args(argv):
global g_base_port
global g_num_clouds
global g_nodes_per_cloud
global g_wipe_test_state
global g_wipe_output_dir
global g_test_to_run
global g_test_list_file
global g_test_group
global g_run_small
global g_run_medium
global g_run_large
global g_run_xlarge
global g_use_cloud
global g_use_cloud2
global g_config
global g_use_ip
global g_use_port
global g_no_run
global g_jvm_xmx
i = 1
while (i < len(argv)):
s = argv[i]
if (s == "--baseport"):
i += 1
if (i > len(argv)):
usage()
g_base_port = int(argv[i])
elif (s == "--numclouds"):
i += 1
if (i > len(argv)):
usage()
g_num_clouds = int(argv[i])
elif (s == "--numnodes"):
i += 1
if (i > len(argv)):
usage()
g_nodes_per_cloud = int(argv[i])
elif (s == "--wipeall"):
g_wipe_test_state = True
g_wipe_output_dir = True
elif (s == "--wipe"):
g_wipe_output_dir = True
elif (s == "--test"):
i += 1
if (i > len(argv)):
usage()
g_test_to_run = RUnitRunner.find_test(argv[i])
elif (s == "--testlist"):
i += 1
if (i > len(argv)):
usage()
g_test_list_file = argv[i]
elif (s == "--testgroup"):
i += 1
if (i > len(argv)):
usage()
g_test_group = argv[i]
elif (s == "--testsize"):
i += 1
if (i > len(argv)):
usage()
v = argv[i]
if (re.match(r'(s)?(m)?(l)?', v)):
if (not 's' in v):
g_run_small = False
if (not 'm' in v):
g_run_medium = False
if (not 'l' in v):
g_run_large = False
if not "x" in v:
g_run_xlarge = False
else:
bad_arg(s)
elif (s == "--usecloud"):
i += 1
if (i > len(argv)):
usage()
s = argv[i]
m = re.match(r'(\S+):([1-9][0-9]*)', s)
if (m is None):
unknown_arg(s)
g_use_cloud = True
g_use_ip = m.group(1)
port_string = m.group(2)
g_use_port = int(port_string)
elif (s == "--usecloud2"):
i += 1
if (i > len(argv)):
usage()
s = argv[i]
if (s is None):
unknown_arg(s)
g_use_cloud2 = True
g_config = s
elif (s == "--jvm.xmx"):
i += 1
if (i > len(argv)):
usage()
g_jvm_xmx = argv[i]
elif (s == "--norun"):
g_no_run = True
elif (s == "-h" or s == "--h" or s == "-help" or s == "--help"):
usage()
else:
unknown_arg(s)
i += 1
def wipe_output_dir():
print("")
print("Wiping output directory...")
try:
if (os.path.exists(g_output_dir)):
shutil.rmtree(g_output_dir)
except OSError as e:
print("")
print("ERROR: Removing output directory failed: " + g_output_dir)
print(" (errno {0}): {1}".format(e.errno, e.strerror))
print("")
sys.exit(1)
def wipe_test_state(test_root_dir):
print("")
print("Wiping test state (including random seeds)...")
if (True):
possible_seed_file = os.path.join(test_root_dir, str("master_seed"))
if (os.path.exists(possible_seed_file)):
try:
os.remove(possible_seed_file)
except OSError as e:
print("")
print("ERROR: Removing seed file failed: " + possible_seed_file)
print(" (errno {0}): {1}".format(e.errno, e.strerror))
print("")
sys.exit(1)
for d, subdirs, files in os.walk(test_root_dir):
for s in subdirs:
if ("Rsandbox" in s):
rsandbox_dir = os.path.join(d, s)
try:
shutil.rmtree(rsandbox_dir)
except OSError as e:
print("")
print("ERROR: Removing RSandbox directory failed: " + rsandbox_dir)
print(" (errno {0}): {1}".format(e.errno, e.strerror))
print("")
sys.exit(1)
def main(argv):
"""
Main program.
@return: none
"""
global g_script_name
global g_num_clouds
global g_nodes_per_cloud
global g_output_dir
global g_failed_output_dir
global g_test_to_run
global g_test_list_file
global g_test_group
global g_runner
g_script_name = os.path.basename(argv[0])
test_root_dir = os.path.dirname(os.path.realpath(__file__))
# Calculate global variables.
g_output_dir = os.path.join(test_root_dir, str("results"))
g_failed_output_dir = os.path.join(g_output_dir, str("failed"))
# Calculate and set other variables.
h2o_jar = os.path.abspath(
os.path.join(os.path.join(os.path.join(os.path.join(
test_root_dir, ".."), ".."), "target"), "h2o.jar"))
# Override any defaults with the user's choices.
parse_args(argv)
# Wipe output directory if requested.
if (g_wipe_output_dir):
wipe_output_dir()
# Wipe persistent test state if requested.
if (g_wipe_test_state):
wipe_test_state(test_root_dir)
# Create runner object.
# Just create one cloud if we're only running one test, even if the user specified more.
if (g_test_to_run is not None):
g_num_clouds = 1
g_runner = RUnitRunner(test_root_dir,
g_use_cloud, g_use_cloud2, g_config, g_use_ip, g_use_port,
g_num_clouds, g_nodes_per_cloud, h2o_jar, g_base_port, g_jvm_xmx,
g_output_dir, g_failed_output_dir)
# Build test list.
if (g_test_to_run is not None):
g_runner.add_test(g_test_to_run)
elif (g_test_list_file is not None):
g_runner.read_test_list_file(g_test_list_file)
else:
# Test group can be None or not.
g_runner.build_test_list(g_test_group, g_run_small, g_run_medium, g_run_large, g_run_xlarge)
# If no run is specified, then do an early exit here.
if (g_no_run):
sys.exit(0)
# Handle killing the runner.
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
# Sanity check existence of H2O jar file before starting the cloud.
if (not os.path.exists(h2o_jar)):
print("")
print("ERROR: H2O jar not found: " + h2o_jar)
print("")
sys.exit(1)
# Run.
try:
g_runner.start_clouds()
g_runner.run_tests()
finally:
g_runner.stop_clouds()
g_runner.report_summary()
# If the overall regression did not pass then exit with a failure status code.
if (not g_runner.get_regression_passed()):
sys.exit(1)
if __name__ == "__main__":
main(sys.argv)
| apache-2.0 |
cosmo-ethz/hope | hope/_library.py | 1 | 67054 | # Copyright (c) 2014 ETH Zurich, Institute of Astronomy, Lukas Gamper <lukas.gamper@usystems.ch>
from __future__ import print_function, division, absolute_import, unicode_literals
LIBRARY_HOPE_EXP = """
#ifndef M_LN2
#define M_LN2 0.693147180559945309417
#endif
static const double hope_exp_data[] = {1.04097186427,1.04089394448,1.04081609252,1.04073830834,1.04066059189,1.04058294314,1.04050536203,1.04042784852,1.04035040257,1.04027302414,1.04019571317,1.04011846963,1.04004129347,1.03996418465,1.03988714312,1.03981016883,
1.03973326176,1.03965642184,1.03957964904,1.03950294331,1.03942630461,1.03934973289,1.03927322812,1.03919679024,1.03912041922,1.039044115,1.03896787756,1.03889170684,1.03881560279,1.03873956538,1.03866359457,1.0385876903,
1.03851185254,1.03843608125,1.03836037637,1.03828473787,1.0382091657,1.03813365982,1.03805822019,1.03798284676,1.0379075395,1.03783229835,1.03775712328,1.03768201424,1.03760697118,1.03753199408,1.03745708288,1.03738223754,
1.03730745801,1.03723274427,1.03715809626,1.03708351394,1.03700899726,1.0369345462,1.03686016069,1.03678584071,1.03671158621,1.03663739714,1.03656327347,1.03648921514,1.03641522213,1.03634129439,1.03626743187,1.03619363454,
1.03611990235,1.03604623526,1.03597263323,1.03589909621,1.03582562417,1.03575221706,1.03567887484,1.03560559747,1.0355323849,1.03545923711,1.03538615403,1.03531313564,1.03524018189,1.03516729275,1.03509446816,1.03502170808,
1.03494901248,1.03487638132,1.03480381455,1.03473131213,1.03465887402,1.03458650019,1.03451419058,1.03444194516,1.03436976388,1.03429764671,1.03422559361,1.03415360453,1.03408167943,1.03400981827,1.03393802102,1.03386628762,
1.03379461805,1.03372301225,1.03365147019,1.03357999184,1.03350857714,1.03343722605,1.03336593854,1.03329471457,1.0332235541,1.03315245708,1.03308142347,1.03301045324,1.03293954635,1.03286870275,1.0327979224,1.03272720527,
1.03265655131,1.03258596049,1.03251543276,1.03244496809,1.03237456643,1.03230422774,1.03223395199,1.03216373914,1.03209358913,1.03202350195,1.03195347754,1.03188351587,1.03181361689,1.03174378057,1.03167400687,1.03160429574,
1.03153464716,1.03146506107,1.03139553745,1.03132607624,1.03125667742,1.03118734093,1.03111806675,1.03104885484,1.03097970514,1.03091061764,1.03084159228,1.03077262902,1.03070372784,1.03063488868,1.03056611152,1.0304973963,
1.030428743,1.03036015157,1.03029162198,1.03022315419,1.03015474815,1.03008640383,1.03001812119,1.02994990019,1.0298817408,1.02981364297,1.02974560667,1.02967763186,1.02960971849,1.02954186654,1.02947407596,1.02940634671,
1.02933867876,1.02927107207,1.0292035266,1.02913604231,1.02906861916,1.02900125712,1.02893395615,1.02886671621,1.02879953726,1.02873241926,1.02866536218,1.02859836598,1.02853143061,1.02846455605,1.02839774226,1.02833098919,
1.02826429681,1.02819766508,1.02813109397,1.02806458344,1.02799813344,1.02793174394,1.02786541491,1.02779914631,1.02773293809,1.02766679023,1.02760070269,1.02753467542,1.02746870839,1.02740280156,1.0273369549,1.02727116837,
1.02720544193,1.02713977554,1.02707416918,1.02700862279,1.02694313635,1.02687770981,1.02681234315,1.02674703632,1.02668178928,1.026616602,1.02655147445,1.02648640658,1.02642139837,1.02635644976,1.02629156073,1.02622673124,
1.02616196126,1.02609725074,1.02603259965,1.02596800796,1.02590347562,1.0258390026,1.02577458887,1.02571023439,1.02564593912,1.02558170302,1.02551752607,1.02545340822,1.02538934944,1.02532534969,1.02526140893,1.02519752713,
1.02513370426,1.02506994027,1.02500623514,1.02494258882,1.02487900129,1.02481547249,1.02475200241,1.02468859099,1.02462523822,1.02456194404,1.02449870844,1.02443553136,1.02437241277,1.02430935265,1.02424635095,1.02418340763,
1.02412052267,1.02405769603,1.02399492767,1.02393221755,1.02386956565,1.02380697192,1.02374443633,1.02368195885,1.02361953944,1.02355717806,1.02349487468,1.02343262927,1.02337044179,1.02330831221,1.02324624048,1.02318422658,
1.02312227047,1.02306037211,1.02299853148,1.02293674853,1.02287502323,1.02281335555,1.02275174545,1.02269019289,1.02262869785,1.02256726029,1.02250588017,1.02244455745,1.02238329211,1.02232208412,1.02226093342,1.02219984,
1.02213880381,1.02207782483,1.02201690301,1.02195603833,1.02189523074,1.02183448022,1.02177378673,1.02171315024,1.02165257071,1.02159204811,1.0215315824,1.02147117355,1.02141082153,1.0213505263,1.02129028783,1.02123010608,
1.02116998103,1.02110991263,1.02104990085,1.02098994567,1.02093004703,1.02087020493,1.0208104193,1.02075069014,1.02069101739,1.02063140103,1.02057184103,1.02051233734,1.02045288994,1.0203934988,1.02033416387,1.02027488513,
1.02021566255,1.02015649608,1.0200973857,1.02003833138,1.01997933307,1.01992039075,1.01986150438,1.01980267394,1.01974389938,1.01968518067,1.01962651779,1.0195679107,1.01950935936,1.01945086374,1.01939242381,1.01933403954,
1.01927571089,1.01921743783,1.01915922033,1.01910105836,1.01904295187,1.01898490085,1.01892690525,1.01886896505,1.01881108021,1.0187532507,1.01869547648,1.01863775753,1.01858009381,1.01852248529,1.01846493194,1.01840743372,
1.0183499906,1.01829260255,1.01823526954,1.01817799153,1.0181207685,1.0180636004,1.01800648722,1.01794942891,1.01789242544,1.01783547679,1.01777858291,1.01772174378,1.01766495937,1.01760822964,1.01755155457,1.01749493411,
1.01743836824,1.01738185692,1.01732540013,1.01726899784,1.01721265,1.01715635659,1.01710011758,1.01704393293,1.01698780262,1.01693172661,1.01687570487,1.01681973736,1.01676382407,1.01670796495,1.01665215997,1.01659640911,
1.01654071233,1.01648506959,1.01642948088,1.01637394615,1.01631846538,1.01626303854,1.01620766558,1.01615234649,1.01609708123,1.01604186977,1.01598671208,1.01593160812,1.01587655787,1.0158215613,1.01576661837,1.01571172905,
1.01565689332,1.01560211114,1.01554738247,1.0154927073,1.01543808558,1.0153835173,1.0153290024,1.01527454088,1.01522013269,1.01516577781,1.0151114762,1.01505722783,1.01500303267,1.0149488907,1.01489480188,1.01484076618,
1.01478678357,1.01473285402,1.0146789775,1.01462515397,1.01457138342,1.01451766581,1.0144640011,1.01441038927,1.01435683029,1.01430332412,1.01424987074,1.01419647012,1.01414312222,1.01408982702,1.01403658448,1.01398339459,
1.01393025729,1.01387717258,1.0138241404,1.01377116075,1.01371823358,1.01366535886,1.01361253657,1.01355976668,1.01350704915,1.01345438396,1.01340177108,1.01334921047,1.01329670211,1.01324424597,1.01319184201,1.01313949021,
1.01308719054,1.01303494297,1.01298274747,1.012930604,1.01287851255,1.01282647308,1.01277448556,1.01272254996,1.01267066625,1.0126188344,1.01256705439,1.01251532618,1.01246364974,1.01241202505,1.01236045208,1.01230893079,
1.01225746117,1.01220604316,1.01215467676,1.01210336193,1.01205209864,1.01200088686,1.01194972656,1.01189861772,1.0118475603,1.01179655428,1.01174559963,1.01169469631,1.0116438443,1.01159304357,1.0115422941,1.01149159584,
1.01144094878,1.01139035288,1.01133980812,1.01128931447,1.0112388719,1.01118848037,1.01113813987,1.01108785036,1.01103761182,1.01098742421,1.0109372875,1.01088720168,1.01083716671,1.01078718255,1.01073724919,1.0106873666,
1.01063753474,1.01058775359,1.01053802312,1.0104883433,1.01043871411,1.01038913551,1.01033960747,1.01029012998,1.01024070299,1.01019132649,1.01014200044,1.01009272482,1.01004349959,1.00999432474,1.00994520023,1.00989612603,
1.00984710211,1.00979812846,1.00974920503,1.00970033181,1.00965150876,1.00960273585,1.00955401307,1.00950534037,1.00945671774,1.00940814515,1.00935962256,1.00931114995,1.00926272729,1.00921435456,1.00916603172,1.00911775876,
1.00906953563,1.00902136232,1.0089732388,1.00892516503,1.008877141,1.00882916667,1.00878124201,1.00873336701,1.00868554163,1.00863776584,1.00859003962,1.00854236294,1.00849473578,1.00844715809,1.00839962987,1.00835215108,
1.00830472169,1.00825734168,1.00821001101,1.00816272967,1.00811549763,1.00806831485,1.00802118132,1.00797409699,1.00792706186,1.00788007589,1.00783313905,1.00778625131,1.00773941266,1.00769262305,1.00764588248,1.0075991909,
1.00755254829,1.00750595463,1.00745940989,1.00741291404,1.00736646706,1.00732006891,1.00727371958,1.00722741903,1.00718116724,1.00713496419,1.00708880983,1.00704270416,1.00699664714,1.00695063874,1.00690467895,1.00685876773,
1.00681290505,1.0067670909,1.00672132524,1.00667560804,1.00662993929,1.00658431895,1.006538747,1.00649322342,1.00644774817,1.00640232123,1.00635694257,1.00631161217,1.00626633001,1.00622109604,1.00617591026,1.00613077263,
1.00608568313,1.00604064173,1.0059956484,1.00595070313,1.00590580588,1.00586095662,1.00581615534,1.005771402,1.00572669658,1.00568203905,1.00563742939,1.00559286758,1.00554835358,1.00550388737,1.00545946893,1.00541509823,
1.00537077524,1.00532649994,1.0052822723,1.00523809229,1.0051939599,1.0051498751,1.00510583785,1.00506184814,1.00501790594,1.00497401122,1.00493016396,1.00488636413,1.00484261171,1.00479890667,1.00475524899,1.00471163864,
1.0046680756,1.00462455983,1.00458109132,1.00453767004,1.00449429597,1.00445096908,1.00440768934,1.00436445673,1.00432127122,1.00427813279,1.00423504142,1.00419199708,1.00414899974,1.00410604937,1.00406314597,1.00402028949,
1.00397747991,1.00393471722,1.00389200138,1.00384933236,1.00380671016,1.00376413473,1.00372160605,1.00367912411,1.00363668887,1.00359430031,1.0035519584,1.00350966313,1.00346741446,1.00342521238,1.00338305685,1.00334094785,
1.00329888536,1.00325686936,1.00321489981,1.0031729767,1.00313109999,1.00308926967,1.00304748571,1.00300574809,1.00296405677,1.00292241175,1.00288081298,1.00283926046,1.00279775415,1.00275629403,1.00271488007,1.00267351226,
1.00263219056,1.00259091495,1.00254968541,1.00250850192,1.00246736444,1.00242627296,1.00238522746,1.0023442279,1.00230327426,1.00226236652,1.00222150466,1.00218068865,1.00213991846,1.00209919408,1.00205851548,1.00201788263,
1.00197729552,1.00193675411,1.00189625838,1.00185580832,1.00181540389,1.00177504507,1.00173473184,1.00169446417,1.00165424205,1.00161406544,1.00157393433,1.00153384868,1.00149380848,1.0014538137,1.00141386432,1.00137396032,
1.00133410166,1.00129428833,1.00125452031,1.00121479757,1.00117512008,1.00113548783,1.00109590079,1.00105635894,1.00101686224,1.00097741069,1.00093800426,1.00089864291,1.00085932664,1.00082005541,1.00078082921,1.000741648,
1.00070251177,1.0006634205,1.00062437415,1.00058537271,1.00054641616,1.00050750446,1.0004686376,1.00042981556,1.0003910383,1.00035230582,1.00031361808,1.00027497506,1.00023637674,1.00019782309,1.0001593141,1.00012084974,
1.00008242998,1.00004405481,1.00000572419,0.999967438121,0.999929196562,0.999890999496,0.999852846898,0.999814738748,0.999776675021,0.999738655695,0.999700680748,0.999662750156,0.999624863896,0.999587021947,0.999549224285,0.999511470889,
0.999473761734,0.999436096798,0.99939847606,0.999360899496,0.999323367084,0.999285878801,0.999248434624,0.999211034532,0.999173678502,0.999136366511,0.999099098536,0.999061874556,0.999024694548,0.99898755849,0.998950466359,0.998913418133,
0.99887641379,0.998839453306,0.998802536661,0.998765663832,0.998728834795,0.998692049531,0.998655308015,0.998618610226,0.998581956142,0.99854534574,0.998508778998,0.998472255895,0.998435776408,0.998399340515,0.998362948194,0.998326599423,
0.99829029418,0.998254032443,0.99821781419,0.998181639399,0.998145508048,0.998109420115,0.998073375579,0.998037374416,0.998001416606,0.997965502127,0.997929630957,0.997893803073,0.997858018455,0.99782227708,0.997786578927,0.997750923974,
0.997715312198,0.99767974358,0.997644218096,0.997608735725,0.997573296446,0.997537900236,0.997502547075,0.997467236941,0.997431969811,0.997396745665,0.997361564481,0.997326426238,0.997291330913,0.997256278486,0.997221268936,0.997186302239,
0.997151378376,0.997116497325,0.997081659064,0.997046863571,0.997012110827,0.996977400809,0.996942733496,0.996908108866,0.9968735269,0.996838987574,0.996804490868,0.996770036761,0.996735625231,0.996701256258,0.996666929819,0.996632645895,
0.996598404464,0.996564205504,0.996530048995,0.996495934915,0.996461863244,0.99642783396,0.996393847043,0.996359902471,0.996326000224,0.99629214028,0.996258322619,0.996224547219,0.996190814059,0.99615712312,0.996123474379,0.996089867816,
0.996056303411,0.996022781142,0.995989300988,0.995955862929,0.995922466943,0.995889113011,0.995855801112,0.995822531224,0.995789303327,0.9957561174,0.995722973423,0.995689871375,0.995656811236,0.995623792984,0.995590816599,0.99555788206,
0.995524989348,0.995492138441,0.995459329319,0.995426561961,0.995393836347,0.995361152457,0.995328510269,0.995295909764,0.995263350921,0.99523083372,0.995198358139,0.99516592416,0.995133531762,0.995101180923,0.995068871624,0.995036603845,
0.995004377565,0.994972192764,0.994940049422,0.994907947519,0.994875887033,0.994843867946,0.994811890236,0.994779953885,0.994748058871,0.994716205174,0.994684392774,0.994652621652,0.994620891787,0.994589203159,0.994557555748,0.994525949534,
0.994494384497,0.994462860617,0.994431377873,0.994399936247,0.994368535718,0.994337176266,0.994305857871,0.994274580514,0.994243344174,0.994212148831,0.994180994466,0.994149881059,0.994118808591,0.99408777704,0.994056786388,0.994025836614,
0.9939949277,0.993964059625,0.993933232369,0.993902445913,0.993871700238,0.993840995323,0.993810331148,0.993779707695,0.993749124943,0.993718582874,0.993688081467,0.993657620702,0.993627200561,0.993596821024,0.993566482071,0.993536183683,
0.99350592584,0.993475708522,0.993445531711,0.993415395387,0.993385299531,0.993355244122,0.993325229142,0.993295254572,0.993265320391,0.993235426581,0.993205573123,0.993175759996,0.993145987182,0.993116254662,0.993086562416,0.993056910424,
0.993027298669,0.99299772713,0.992968195788,0.992938704624,0.99290925362,0.992879842755,0.992850472011,0.992821141369,0.992791850809,0.992762600312,0.99273338986,0.992704219433,0.992675089013,0.992645998579,0.992616948114,0.992587937599,
0.992558967013,0.992530036339,0.992501145558,0.99247229465,0.992443483596,0.992414712378,0.992385980977,0.992357289374,0.992328637551,0.992300025487,0.992271453165,0.992242920566,0.992214427671,0.99218597446,0.992157560917,0.992129187021,
0.992100852754,0.992072558097,0.992044303032,0.992016087539,0.991987911601,0.991959775199,0.991931678314,0.991903620927,0.991875603019,0.991847624573,0.99181968557,0.991791785991,0.991763925817,0.991736105031,0.991708323613,0.991680581545,
0.991652878809,0.991625215386,0.991597591258,0.991570006406,0.991542460812,0.991514954457,0.991487487324,0.991460059393,0.991432670647,0.991405321067,0.991378010635,0.991350739332,0.991323507141,0.991296314042,0.991269160018,0.991242045051,
0.991214969121,0.991187932212,0.991160934305,0.991133975381,0.991107055423,0.991080174412,0.99105333233,0.991026529159,0.990999764881,0.990973039479,0.990946352932,0.990919705225,0.990893096339,0.990866526255,0.990839994955,0.990813502423,
0.990787048639,0.990760633586,0.990734257245,0.9907079196,0.990681620631,0.990655360321,0.990629138653,0.990602955607,0.990576811167,0.990550705315,0.990524638032,0.990498609301,0.990472619104,0.990446667423,0.990420754241,0.99039487954,
0.990369043301,0.990343245508,0.990317486142,0.990291765186,0.990266082622,0.990240438433,0.9902148326,0.990189265106,0.990163735934,0.990138245066,0.990112792485,0.990087378172,0.99006200211,0.990036664281,0.990011364669,0.989986103255,
0.989960880023,0.989935694953,0.98991054803,0.989885439235,0.989860368552,0.989835335962,0.989810341448,0.989785384993,0.98976046658,0.98973558619,0.989710743807,0.989685939414,0.989661172992,0.989636444525,0.989611753996,0.989587101386,
0.989562486679,0.989537909858,0.989513370905,0.989488869802,0.989464406534,0.989439981082,0.989415593429,0.989391243559,0.989366931454,0.989342657096,0.989318420469,0.989294221555,0.989270060338,0.989245936801,0.989221850925,0.989197802695,
0.989173792093,0.989149819102,0.989125883705,0.989101985885,0.989078125625,0.989054302908,0.989030517717,0.989006770035,0.988983059845,0.988959387131,0.988935751875,0.98891215406,0.98888859367,0.988865070687,0.988841585095,0.988818136877,
0.988794726017,0.988771352496,0.988748016299,0.988724717408,0.988701455808,0.98867823148,0.988655044409,0.988631894577,0.988608781969,0.988585706566,0.988562668353,0.988539667313,0.988516703429,0.988493776684,0.988470887062,0.988448034547,
0.988425219121,0.988402440768,0.988379699471,0.988356995214,0.988334327981,0.988311697754,0.988289104517,0.988266548254,0.988244028948,0.988221546582,0.988199101141,0.988176692607,0.988154320965,0.988131986198,0.988109688288,0.988087427221,
0.98806520298,0.988043015547,0.988020864907,0.987998751044,0.987976673941,0.987954633582,0.987932629949,0.987910663029,0.987888732802,0.987866839255,0.98784498237,0.98782316213,0.987801378521,0.987779631525,0.987757921127,0.987736247309,
0.987714610057,0.987693009353,0.987671445182,0.987649917527,0.987628426373,0.987606971703,0.987585553501,0.987564171751,0.987542826437,0.987521517543,0.987500245052,0.98747900895,0.987457809219,0.987436645843,0.987415518808,0.987394428096,
0.987373373692,0.987352355579,0.987331373743,0.987310428166,0.987289518833,0.987268645729,0.987247808836,0.98722700814,0.987206243624,0.987185515272,0.98716482307,0.987144167,0.987123547046,0.987102963195,0.987082415428,0.987061903731,
0.987041428088,0.987020988483,0.987000584901,0.986980217325,0.986959885739,0.986939590129,0.986919330479,0.986899106772,0.986878918993,0.986858767127,0.986838651158,0.986818571069,0.986798526847,0.986778518474,0.986758545936,0.986738609216,
0.986718708299,0.986698843171,0.986679013814,0.986659220214,0.986639462355,0.986619740221,0.986600053798,0.986580403069,0.986560788019,0.986541208633,0.986521664895,0.98650215679,0.986482684302,0.986463247416,0.986443846117,0.986424480388,
0.986405150216,0.986385855584,0.986366596477,0.98634737288,0.986328184778,0.986309032154,0.986289914994,0.986270833283,0.986251787006,0.986232776146,0.986213800689,0.98619486062,0.986175955923,0.986157086583,0.986138252585,0.986119453914,
0.986100690554,0.986081962491,0.986063269709,0.986044612193,0.986025989928,0.986007402899,0.985988851091,0.985970334489,0.985951853077,0.985933406841,0.985914995765,0.985896619835,0.985878279036,0.985859973352,0.985841702768,0.98582346727,
0.985805266842,0.98578710147,0.985768971138,0.985750875832,0.985732815537,0.985714790237,0.985696799918,0.985678844566,0.985660924164,0.985643038699,0.985625188155,0.985607372517,0.985589591772,0.985571845903,0.985554134896,0.985536458736,
0.985518817409,0.9855012109,0.985483639194,0.985466102275,0.985448600131,0.985431132745,0.985413700103,0.98539630219,0.985378938992,0.985361610494,0.985344316681,0.985327057539,0.985309833052,0.985292643207,0.985275487989,0.985258367383,
0.985241281375,0.985224229949,0.985207213092,0.985190230788,0.985173283024,0.985156369784,0.985139491055,0.985122646821,0.985105837069,0.985089061783,0.985072320949,0.985055614553,0.985038942581,0.985022305017,0.985005701847,0.984989133058,
0.984972598634,0.984956098561,0.984939632825,0.984923201411,0.984906804305,0.984890441493,0.98487411296,0.984857818692,0.984841558675,0.984825332895,0.984809141336,0.984792983985,0.984776860828,0.984760771849,0.984744717036,0.984728696374,
0.984712709848,0.984696757444,0.984680839149,0.984664954947,0.984649104825,0.984633288769,0.984617506764,0.984601758796,0.984586044852,0.984570364916,0.984554718976,0.984539107016,0.984523529022,0.984507984982,0.984492474879,0.984476998702,
0.984461556434,0.984446148063,0.984430773575,0.984415432954,0.984400126188,0.984384853263,0.984369614163,0.984354408877,0.984339237388,0.984324099684,0.984308995751,0.984293925574,0.98427888914,0.984263886435,0.984248917444,0.984233982155,
0.984219080553,0.984204212624,0.984189378354,0.984174577731,0.984159810739,0.984145077365,0.984130377595,0.984115711415,0.984101078812,0.984086479772,0.984071914281,0.984057382326,0.984042883892,0.984028418966,0.984013987533,0.983999589582,
0.983985225097,0.983970894065,0.983956596473,0.983942332306,0.983928101551,0.983913904195,0.983899740224,0.983885609623,0.983871512381,0.983857448482,0.983843417914,0.983829420662,0.983815456714,0.983801526055,0.983787628673,0.983773764553,
0.983759933682,0.983746136047,0.983732371634,0.98371864043,0.983704942421,0.983691277593,0.983677645934,0.983664047429,0.983650482066,0.98363694983,0.983623450709,0.983609984689,0.983596551757,0.983583151899,0.983569785102,0.983556451352,
0.983543150637,0.983529882943,0.983516648256,0.983503446563,0.983490277851,0.983477142107,0.983464039317,0.983450969468,0.983437932547,0.983424928541,0.983411957436,0.983399019218,0.983386113876,0.983373241395,0.983360401763,0.983347594965,
0.98333482099,0.983322079824,0.983309371453,0.983296695865,0.983284053046,0.983271442984,0.983258865664,0.983246321075,0.983233809202,0.983221330034,0.983208883556,0.983196469756,0.98318408862,0.983171740136,0.983159424291,0.983147141071,
0.983134890463,0.983122672455,0.983110487034,0.983098334185,0.983086213898,0.983074126158,0.983062070953,0.983050048269,0.983038058094,0.983026100415,0.983014175218,0.983002282492,0.982990422223,0.982978594398,0.982966799004,0.982955036028,
0.982943305459,0.982931607282,0.982919941484,0.982908308054,0.982896706979,0.982885138244,0.982873601839,0.982862097749,0.982850625962,0.982839186466,0.982827779247,0.982816404293,0.982805061592,0.982793751129,0.982782472894,0.982771226872,
0.982760013052,0.98274883142,0.982737681964,0.982726564671,0.982715479529,0.982704426525,0.982693405647,0.982682416881,0.982671460215,0.982660535636,0.982649643133,0.982638782692,0.9826279543,0.982617157946,0.982606393616,0.982595661298,
0.98258496098,0.982574292649,0.982563656292,0.982553051898,0.982542479453,0.982531938944,0.982521430361,0.982510953689,0.982500508917,0.982490096032,0.982479715022,0.982469365874,0.982459048576,0.982448763116,0.98243850948,0.982428287657,
0.982418097635,0.9824079394,0.982397812941,0.982387718245,0.9823776553,0.982367624094,0.982357624614,0.982347656848,0.982337720783,0.982327816408,0.982317943709,0.982308102676,0.982298293295,0.982288515554,0.982278769441,0.982269054944,
0.98225937205,0.982249720748,0.982240101025,0.982230512869,0.982220956267,0.982211431209,0.98220193768,0.98219247567,0.982183045166,0.982173646155,0.982164278627,0.982154942568,0.982145637967,0.982136364811,0.982127123089,0.982117912788,
0.982108733896,0.982099586402,0.982090470292,0.982081385556,0.98207233218,0.982063310154,0.982054319465,0.9820453601,0.982036432049,0.982027535299,0.982018669837,0.982009835653,0.982001032734,0.981992261067,0.981983520643,0.981974811447,
0.981966133469,0.981957486696,0.981948871116,0.981940286718,0.98193173349,0.98192321142,0.981914720496,0.981906260706,0.981897832038,0.981889434481,0.981881068022,0.98187273265,0.981864428354,0.98185615512,0.981847912937,0.981839701795,
0.98183152168,0.981823372581,0.981815254486,0.981807167384,0.981799111263,0.981791086111,0.981783091916,0.981775128667,0.981767196352,0.981759294959,0.981751424477,0.981743584893,0.981735776197,0.981727998377,0.98172025142,0.981712535316,
0.981704850052,0.981697195617,0.981689572,0.981681979188,0.981674417171,0.981666885937,0.981659385473,0.981651915769,0.981644476813,0.981637068593,0.981629691098,0.981622344317,0.981615028237,0.981607742848,0.981600488137,0.981593264094,
0.981586070706,0.981578907963,0.981571775853,0.981564674364,0.981557603485,0.981550563204,0.981543553511,0.981536574393,0.98152962584,0.981522707839,0.98151582038,0.98150896345,0.98150213704,0.981495341137,0.981488575729,0.981481840806,
0.981475136357,0.981468462369,0.981461818832,0.981455205734,0.981448623065,0.981442070811,0.981435548963,0.98142905751,0.981422596439,0.981416165739,0.9814097654,0.98140339541,0.981397055757,0.981390746432,0.981384467421,0.981378218715,
0.981372000301,0.981365812169,0.981359654308,0.981353526706,0.981347429353,0.981341362236,0.981335325345,0.981329318669,0.981323342196,0.981317395916,0.981311479818,0.981305593889,0.98129973812,0.981293912498,0.981288117014,0.981282351656,
0.981276616412,0.981270911272,0.981265236225,0.981259591259,0.981253976364,0.981248391529,0.981242836742,0.981237311993,0.981231817271,0.981226352564,0.981220917861,0.981215513153,0.981210138427,0.981204793672,0.981199478879,0.981194194035,
0.98118893913,0.981183714153,0.981178519094,0.98117335394,0.981168218682,0.981163113308,0.981158037807,0.981152992169,0.981147976383,0.981142990438,0.981138034323,0.981133108027,0.981128211539,0.981123344849,0.981118507946,0.981113700819,
0.981108923457,0.981104175849,0.981099457985,0.981094769854,0.981090111444,0.981085482746,0.981080883749,0.981076314441,0.981071774812,0.981067264852,0.981062784549,0.981058333894,0.981053912874,0.98104952148,0.981045159701,0.981040827527,
0.981036524945,0.981032251947,0.981028008521,0.981023794656,0.981019610342,0.981015455569,0.981011330325,0.981007234601,0.981003168385,0.980999131666,0.980995124436,0.980991146682,0.980987198394,0.980983279562,0.980979390174,0.980975530222,
0.980971699693,0.980967898578,0.980964126866,0.980960384546,0.980956671609,0.980952988043,0.980949333837,0.980945708982,0.980942113468,0.980938547283,0.980935010416,0.980931502859,0.9809280246,0.980924575628,0.980921155934,0.980917765507,
0.980914404337,0.980911072413,0.980907769724,0.980904496261,0.980901252013,0.980898036969,0.98089485112,0.980891694455,0.980888566963,0.980885468635,0.98088239946,0.980879359427,0.980876348526,0.980873366748,0.980870414081,0.980867490516,
0.980864596042,0.980861730649,0.980858894326,0.980856087064,0.980853308852,0.98085055968,0.980847839537,0.980845148414,0.9808424863,0.980839853185,0.980837249059,0.980834673912,0.980832127733,0.980829610512,0.980827122239,0.980824662904,
0.980822232497,0.980819831008,0.980817458426,0.980815114742,0.980812799944,0.980810514024,0.980808256971,0.980806028775,0.980803829426,0.980801658913,0.980799517227,0.980797404358,0.980795320295,0.980793265029,0.980791238549,0.980789240845,
0.980787271908,0.980785331727,0.980783420293,0.980781537595,0.980779683623,0.980777858368,0.980776061819,0.980774293967,0.9807725548,0.980770844311,0.980769162487,0.980767509321,0.980765884801,0.980764288918,0.980762721661,0.980761183021,
0.980759672989,0.980758191553,0.980756738704,0.980755314433,0.980753918729,0.980752551583,0.980751212984,0.980749902924,0.980748621391,0.980747368376,0.98074614387,0.980744947862,0.980743780342,0.980742641302,0.98074153073,0.980740448618,
0.980739394955,0.980738369732,0.980737372939,0.980736404566,0.980735464603,0.980734553041,0.98073366987,0.98073281508,0.980731988662,0.980731190605,0.9807304209,0.980729679538,0.980728966508,0.980728281801,0.980727625407,0.980726997317,
0.980726397521,0.980725826008,0.980725282771,0.980724767798,0.980724281081,0.980723822609,0.980723392373,0.980722990364,0.980722616572,0.980722270986,0.980721953599,0.980721664399,0.980721403378,0.980721170525,0.980720965832,0.980720789289,
0.980720640886,0.980720520613,0.980720428462,0.980720364422,0.980720328484,0.980720320639,0.980720340877,0.980720389188,0.980720465563,0.980720569993,0.980720702468,0.980720862979,0.980721051515,0.980721268069,0.98072151263,0.980721785188,
0.980722085735,0.98072241426,0.980722770756,0.980723155211,0.980723567617,0.980724007964,0.980724476243,0.980724972445,0.98072549656,0.980726048578,0.980726628491,0.980727236289,0.980727871963,0.980728535503,0.9807292269,0.980729946145,
0.980730693228,0.98073146814,0.980732270872,0.980733101414,0.980733959758,0.980734845893,0.980735759811,0.980736701502,0.980737670958,0.980738668168,0.980739693123,0.980740745815,0.980741826234,0.980742934371,0.980744070216,0.980745233761,
0.980746424996,0.980747643912,0.9807488905,0.98075016475,0.980751466654,0.980752796202,0.980754153385,0.980755538194,0.980756950619,0.980758390653,0.980759858284,0.980761353506,0.980762876307,0.98076442668,0.980766004614,0.980767610101,
0.980769243133,0.980770903698,0.98077259179,0.980774307398,0.980776050513,0.980777821127,0.980779619231,0.980781444814,0.980783297869,0.980785178386,0.980787086356,0.980789021771,0.98079098462,0.980792974896,0.980794992589,0.98079703769,
0.98079911019,0.98080121008,0.980803337352,0.980805491996,0.980807674003,0.980809883364,0.980812120071,0.980814384115,0.980816675485,0.980818994175,0.980821340174,0.980823713473,0.980826114065,0.98082854194,0.980830997088,0.980833479502,
0.980835989172,0.98083852609,0.980841090246,0.980843681631,0.980846300238,0.980848946057,0.980851619078,0.980854319294,0.980857046696,0.980859801274,0.98086258302,0.980865391925,0.98086822798,0.980871091177,0.980873981507,0.98087689896,
0.980879843529,0.980882815204,0.980885813976,0.980888839837,0.980891892779,0.980894972792,0.980898079867,0.980901213997,0.980904375171,0.980907563382,0.980910778621,0.980914020879,0.980917290147,0.980920586417,0.98092390968,0.980927259927,
0.98093063715,0.98093404134,0.980937472488,0.980940930586,0.980944415625,0.980947927596,0.980951466492,0.98095493117,0.980958536205,0.980962155819,0.980965802323,0.980969475708,0.980973175965,0.980976903085,0.980980657061,0.980984437883,
0.980988245543,0.980992080032,0.980995941342,0.980999829464,0.98100374439,0.981007686111,0.981011654619,0.981015649904,0.98101967196,0.981023720776,0.981027796345,0.981031898659,0.981036027707,0.981040183483,0.981044365978,0.981048575183,
0.98105281109,0.98105707369,0.981061362974,0.981065678935,0.981070021565,0.981074390853,0.981078786793,0.981083209376,0.981087658593,0.981092134435,0.981096636896,0.981101165965,0.981105721635,0.981110303897,0.981114912744,0.981119548166,
0.981124210155,0.981128898703,0.981133613802,0.981138355443,0.981143123618,0.981147918318,0.981152739536,0.981157587263,0.98116246149,0.981167362209,0.981172289413,0.981177243092,0.981182223239,0.981187229845,0.981192262901,0.981197322401,
0.981202408335,0.981207520695,0.981212659472,0.98121782466,0.981223016249,0.981228234231,0.981233478597,0.981238749341,0.981244046453,0.981249369926,0.981254719751,0.981260095919,0.981265498424,0.981270927256,0.981276382407,0.98128186387,
0.981287371635,0.981292905696,0.981298466044,0.98130405267,0.981309665566,0.981315304725,0.981320970138,0.981326661798,0.981332379695,0.981338123823,0.981343894172,0.981349690735,0.981355513503,0.981361362469,0.981367237625,0.981373138962,
0.981379066473,0.981385020148,0.981390999981,0.981397005964,0.981403038087,0.981409096344,0.981415180726,0.981421291224,0.981427427832,0.981433590541,0.981439779343,0.981445994231,0.981452235195,0.981458502228,0.981464795323,0.981471114471,
0.981477459664,0.981483830894,0.981490228153,0.981496651434,0.981503100729,0.981509576028,0.981516077326,0.981522604613,0.981529157881,0.981535737124,0.981542342332,0.981548973498,0.981555630614,0.981562313673,0.981569022666,0.981575757585,
0.981582518423,0.981589305172,0.981596117823,0.98160295637,0.981609820803,0.981616711116,0.981623627301,0.981630569349,0.981637537253,0.981644531005,0.981651550597,0.981658596022,0.981665667271,0.981672764337,0.981679887212,0.981687035888,
0.981694210358,0.981701410613,0.981708636646,0.98171588845,0.981723166016,0.981730469336,0.981737798404,0.981745153211,0.981752533749,0.981759940011,0.981767371989,0.981774829675,0.981782313062,0.981789822142,0.981797356907,0.981804917349,
0.981812503461,0.981820115236,0.981827752665,0.981835415741,0.981843104456,0.981850818802,0.981858558773,0.981866324359,0.981874115554,0.981881932351,0.98188977474,0.981897642715,0.981905536269,0.981913455392,0.981921400079,0.981929370321,
0.981937366111,0.98194538744,0.981953434303,0.98196150669,0.981969604594,0.981977728009,0.981985876926,0.981994051337,0.982002251236,0.982010476614,0.982018727464,0.982027003779,0.982035305551,0.982043632773,0.982051985436,0.982060363534,
0.982068767059,0.982077196004,0.982085650361,0.982094130122,0.98210263528,0.982111165828,0.982119721758,0.982128303062,0.982136909734,0.982145541765,0.982154199149,0.982162881878,0.982171589944,0.98218032334,0.982189082059,0.982197866093,
0.982206675435,0.982215510077,0.982224370012,0.982233255233,0.982242165731,0.982251101501,0.982260062534,0.982269048823,0.982278060361,0.98228709714,0.982296159153,0.982305246392,0.982314358851,0.982323496522,0.982332659397,0.98234184747,
0.982351060732,0.982360299178,0.982369562798,0.982378851587,0.982388165536,0.982397504638,0.982406868887,0.982416258275,0.982425672794,0.982435112437,0.982444577198,0.982454067068,0.98246358204,0.982473122108,0.982482687264,0.982492277501,
0.982501892811,0.982511533188,0.982521198623,0.98253088911,0.982540604642,0.982550345212,0.982560110812,0.982569901434,0.982579717073,0.98258955772,0.982599423368,0.982609314011,0.982619229641,0.982629170251,0.982639135834,0.982649126382,
0.982659141889,0.982669182348,0.98267924775,0.982689338089,0.982699453359,0.982709593551,0.982719758659,0.982729948676,0.982740163594,0.982750403406,0.982760668106,0.982770957686,0.982781272139,0.982791611458,0.982801975636,0.982812364666,
0.98282277854,0.982833217253,0.982843680796,0.982854169163,0.982864682346,0.982875220339,0.982885783135,0.982896370726,0.982906983106,0.982917620267,0.982928282202,0.982938968906,0.982949680369,0.982960416586,0.98297117755,0.982981963253,
0.982992773688,0.983003608849,0.983014468729,0.98302535332,0.983036262616,0.983047196609,0.983058155293,0.983069138662,0.983080146706,0.983091179421,0.983102236799,0.983113318833,0.983124425516,0.983135556842,0.983146712802,0.983157893392,
0.983169098602,0.983180328428,0.983191582861,0.983202861895,0.983214165523,0.983225493738,0.983236846533,0.983248223902,0.983259625837,0.983271052332,0.98328250338,0.983293978974,0.983305479107,0.983317003773,0.983328552964,0.983340126674,
0.983351724895,0.983363347622,0.983374994847,0.983386666563,0.983398362764,0.983410083443,0.983421828593,0.983433598208,0.98344539228,0.983457210803,0.983469053769,0.983480921173,0.983492813008,0.983504729266,0.983516669942,0.983528635027,
0.983540624517,0.983552638403,0.983564676679,0.983576739338,0.983588826374,0.98360093778,0.98361307355,0.983625233675,0.983637418151,0.98364962697,0.983661860125,0.983674117611,0.983686399419,0.983698705544,0.983711035978,0.983723390716,
0.98373576975,0.983748173074,0.983760600681,0.983773052565,0.983785528719,0.983798029136,0.98381055381,0.983823102733,0.9838356759,0.983848273304,0.983860894939,0.983873540796,0.983886210871,0.983898905157,0.983911623646,0.983924366332,
0.983937133209,0.983949924271,0.983962739509,0.983975578919,0.983988442494,0.984001330226,0.984014242109,0.984027178138,0.984040138304,0.984053122603,0.984066131027,0.984079163569,0.984092220223,0.984105300984,0.984118405843,0.984131534795,
0.984144687833,0.984157864951,0.984171066142,0.9841842914,0.984197540718,0.98421081409,0.984224111509,0.984237432969,0.984250778464,0.984264147986,0.98427754153,0.984290959089,0.984304400656,0.984317866226,0.984331355791,0.984344869346,
0.984358406883,0.984371968397,0.984385553881,0.984399163329,0.984412796734,0.98442645409,0.98444013539,0.984453840629,0.984467569799,0.984481322894,0.984495099909,0.984508900836,0.984522725669,0.984536574402,0.984550447029,0.984564343543,
0.984578263938,0.984592208207,0.984606176344,0.984620168343,0.984634184198,0.984648223902,0.984662287449,0.984676374832,0.984690486046,0.984704621084,0.984718779939,0.984732962606,0.984747169078,0.984761399348,0.984775653411,0.984789931261,
0.98480423289,0.984818558293,0.984832907464,0.984847280396,0.984861677083,0.984876097518,0.984890541696,0.984905009611,0.984919501255,0.984934016623,0.984948555709,0.984963118506,0.984977705008,0.984992315209,0.985006949103,0.985021606684,
0.985036287945,0.98505099288,0.985065721483,0.985080473747,0.985095249668,0.985110049238,0.985124872451,0.985139719301,0.985154589783,0.985169483889,0.985184401614,0.985199342951,0.985214307895,0.985229296439,0.985244308577,0.985259344303,
0.985274403611,0.985289486495,0.985304592949,0.985319722966,0.985334876541,0.985350053667,0.985365254338,0.985380478548,0.985395726292,0.985410997562,0.985426292354,0.98544161066,0.985456952476,0.985472317794,0.985487706609,0.985503118914,
0.985518554704,0.985534013973,0.985549496714,0.985565002921,0.98558053259,0.985596085712,0.985611662283,0.985627262297,0.985642885746,0.985658532627,0.985674202931,0.985689896654,0.98570561379,0.985721354332,0.985737118274,0.985752905611,
0.985768716336,0.985784550444,0.985800407928,0.985816288783,0.985832193003,0.985848120581,0.985864071512,0.98588004579,0.985896043408,0.985912064362,0.985928108644,0.98594417625,0.985960267173,0.985976381407,0.985992518946,0.986008679785,
0.986024863917,0.986041071337,0.986057302038,0.986073556016,0.986089833263,0.986106133774,0.986122457544,0.986138804565,0.986155174834,0.986171568342,0.986187985086,0.986204425058,0.986220888254,0.986237374666,0.98625388429,0.986270417119,
0.986286973148,0.986303552371,0.986320154782,0.986336780375,0.986353429144,0.986370101084,0.986386796189,0.986403514452,0.986420255869,0.986437020433,0.986453808139,0.98647061898,0.986487452952,0.986504310047,0.986521190261,0.986538093588,
0.986555020022,0.986571969557,0.986588942187,0.986605937907,0.98662295671,0.986639998592,0.986657063547,0.986674151568,0.98669126265,0.986708396787,0.986725553974,0.986742734204,0.986759937473,0.986777163774,0.986794413102,0.98681168545,
0.986828980814,0.986846299188,0.986863640565,0.986881004941,0.986898392309,0.986915802664,0.986933236,0.986950692312,0.986968171594,0.98698567384,0.987003199044,0.987020747202,0.987038318306,0.987055912353,0.987073529335,0.987091169248,
0.987108832085,0.987126517841,0.987144226511,0.987161958089,0.98717971257,0.987197489946,0.987215290214,0.987233113368,0.987250959401,0.987268828309,0.987286720085,0.987304634724,0.987322572221,0.98734053257,0.987358515765,0.987376521802,
0.987394550673,0.987412602374,0.987430676899,0.987448774243,0.9874668944,0.987485037365,0.987503203132,0.987521391695,0.987539603049,0.987557837188,0.987576094107,0.987594373801,0.987612676264,0.98763100149,0.987649349474,0.98766772021,
0.987686113693,0.987704529918,0.987722968878,0.987741430569,0.987759914985,0.98777842212,0.987796951969,0.987815504527,0.987834079788,0.987852677747,0.987871298397,0.987889941735,0.987908607754,0.987927296448,0.987946007813,0.987964741843,
0.987983498533,0.988002277876,0.988021079869,0.988039904504,0.988058751778,0.988077621684,0.988096514217,0.988115429371,0.988134367142,0.988153327524,0.988172310512,0.988191316099,0.988210344281,0.988229395053,0.988248468408,0.988267564342,
0.988286682849,0.988305823925,0.988324987562,0.988344173757,0.988363382504,0.988382613797,0.988401867632,0.988421144002,0.988440442903,0.988459764329,0.988479108274,0.988498474734,0.988517863704,0.988537275177,0.988556709149,0.988576165614,
0.988595644567,0.988615146003,0.988634669916,0.988654216302,0.988673785154,0.988693376467,0.988712990237,0.988732626458,0.988752285125,0.988771966232,0.988791669774,0.988811395747,0.988831144144,0.98885091496,0.988870708191,0.98889052383,
0.988910361874,0.988930222316,0.988950105151,0.988970010374,0.98898993798,0.989009887963,0.989029860319,0.989049855042,0.989069872128,0.98908991157,0.989109973363,0.989130057503,0.989150163984,0.989170292801,0.989190443949,0.989210617423,
0.989230813217,0.989251031326,0.989271271746,0.989291534471,0.989311819495,0.989332126814,0.989352456423,0.989372808316,0.989393182488,0.989413578934,0.98943399765,0.989454438629,0.989474901867,0.989495387358,0.989515895098,0.989536425081,
0.989556977303,0.989577551757,0.98959814844,0.989618767346,0.989639408469,0.989660071805,0.989680757349,0.989701465096,0.98972219504,0.989742947176,0.9897637215,0.989784518005,0.989805336689,0.989826177544,0.989847040566,0.989867925751,
0.989888833092,0.989909762585,0.989930714225,0.989951688007,0.989972683926,0.989993701977,0.990014742154,0.990035804454,0.990056888869,0.990077995397,0.990099124031,0.990120274767,0.9901414476,0.990162642524,0.990183859535,0.990205098627,
0.990226359797,0.990247643038,0.990268948346,0.990290275715,0.990311625141,0.990332996619,0.990354390144,0.990375805711,0.990397243315,0.99041870295,0.990440184613,0.990461688298,0.990483213999,0.990504761713,0.990526331434,0.990547923158,
0.990569536879,0.990591172592,0.990612830293,0.990634509976,0.990656211637,0.990677935271,0.990699680873,0.990721448438,0.99074323796,0.990765049436,0.990786882861,0.990808738228,0.990830615534,0.990852514774,0.990874435942,0.990896379035,
0.990918344046,0.990940330971,0.990962339806,0.990984370545,0.991006423184,0.991028497717,0.99105059414,0.991072712448,0.991094852636,0.9911170147,0.991139198634,0.991161404434,0.991183632095,0.991205881611,0.991228152979,0.991250446193,
0.991272761249,0.991295098142,0.991317456866,0.991339837418,0.991362239792,0.991384663983,0.991407109987,0.9914295778,0.991452067415,0.991474578829,0.991497112036,0.991519667033,0.991542243813,0.991564842373,0.991587462707,0.991610104811,
0.991632768681,0.99165545431,0.991678161695,0.991700890831,0.991723641713,0.991746414336,0.991769208696,0.991792024788,0.991814862607,0.991837722149,0.991860603408,0.99188350638,0.99190643106,0.991929377444,0.991952345527,0.991975335304,
0.991998346771,0.992021379922,0.992044434753,0.99206751126,0.992090609438,0.992113729281,0.992136870786,0.992160033948,0.992183218762,0.992206425223,0.992229653326,0.992252903068,0.992276174443,0.992299467447,0.992322782075,0.992346118323,
0.992369476185,0.992392855657,0.992416256735,0.992439679414,0.992463123689,0.992486589556,0.99251007701,0.992533586046,0.992557116661,0.992580668848,0.992604242604,0.992627837924,0.992651454803,0.992675093237,0.992698753221,0.992722434751,
0.992746137822,0.99276986243,0.992793608569,0.992817376235,0.992841165425,0.992864976132,0.992888808353,0.992912662084,0.992936537318,0.992960434053,0.992984352283,0.993008292004,0.993032253211,0.993056235899,0.993080240066,0.993104265704,
0.993128312811,0.993152381382,0.993176471411,0.993200582896,0.99322471583,0.99324887021,0.993273046031,0.993297243288,0.993321461978,0.993345702095,0.993369963635,0.993394246594,0.993418550967,0.993442876749,0.993467223936,0.993491592525,
0.993515982509,0.993540393885,0.993564826648,0.993589280794,0.993613756318,0.993638253216,0.993662771484,0.993687311116,0.993711872109,0.993736454458,0.993761058159,0.993785683206,0.993810329597,0.993834997326,0.993859686388,0.99388439678,
0.993909128497,0.993933881535,0.993958655889,0.993983451555,0.994008268528,0.994033106804,0.994057966379,0.994082847247,0.994107749406,0.99413267285,0.994157617575,0.994182583577,0.994207570851,0.994232579393,0.994257609198,0.994282660262,
0.994307732582,0.994332826151,0.994357940967,0.994383077024,0.994408234319,0.994433412847,0.994458612603,0.994483833584,0.994509075784,0.9945343392,0.994559623828,0.994584929662,0.994610256699,0.994635604935,0.994660974364,0.994686364983,
0.994711776787,0.994737209773,0.994762663935,0.994788139269,0.994813635772,0.994839153439,0.994864692265,0.994890252246,0.994915833378,0.994941435657,0.994967059079,0.994992703638,0.995018369331,0.995044056154,0.995069764103,0.995095493172,
0.995121243358,0.995147014656,0.995172807063,0.995198620574,0.995224455185,0.995250310891,0.995276187688,0.995302085573,0.99532800454,0.995353944586,0.995379905706,0.995405887897,0.995431891153,0.995457915471,0.995483960846,0.995510027275,
0.995536114753,0.995562223275,0.995588352838,0.995614503438,0.99564067507,0.99566686773,0.995693081414,0.995719316117,0.995745571836,0.995771848566,0.995798146304,0.995824465044,0.995850804783,0.995877165517,0.995903547241,0.995929949952,
0.995956373644,0.995982818315,0.99600928396,0.996035770574,0.996062278153,0.996088806694,0.996115356193,0.996141926644,0.996168518045,0.99619513039,0.996221763676,0.996248417899,0.996275093054,0.996301789137,0.996328506145,0.996355244073,
0.996382002917,0.996408782673,0.996435583337,0.996462404904,0.996489247371,0.996516110734,0.996542994988,0.99656990013,0.996596826155,0.996623773059,0.996650740838,0.996677729488,0.996704739005,0.996731769386,0.996758820625,0.996785892719,
0.996812985663,0.996840099455,0.996867234089,0.996894389561,0.996921565868,0.996948763006,0.99697598097,0.997003219757,0.997030479362,0.997057759781,0.997085061011,0.997112383047,0.997139725886,0.997167089523,0.997194473954,0.997221879175,
0.997249305182,0.997276751972,0.99730421954,0.997331707883,0.997359216995,0.997386746874,0.997414297516,0.997441868915,0.997469461069,0.997497073973,0.997524707624,0.997552362017,0.997580037148,0.997607733014,0.99763544961,0.997663186933,
0.997690944979,0.997718723743,0.997746523221,0.997774343411,0.997802184307,0.997830045906,0.997857928204,0.997885831197,0.997913754881,0.997941699252,0.997969664306,0.997997650039,0.998025656448,0.998053683528,0.998081731276,0.998109799687,
0.998137888758,0.998165998484,0.998194128863,0.998222279889,0.998250451559,0.99827864387,0.998306856817,0.998335090396,0.998363344604,0.998391619436,0.998419914889,0.998448230959,0.998476567642,0.998504924934,0.998533302831,0.99856170133,
0.998590120426,0.998618560116,0.998647020395,0.998675501261,0.998704002709,0.998732524735,0.998761067335,0.998789630506,0.998818214244,0.998846818544,0.998875443404,0.998904088819,0.998932754785,0.998961441299,0.998990148357,0.999018875954,
0.999047624088,0.999076392754,0.999105181949,0.999133991668,0.999162821909,0.999191672666,0.999220543937,0.999249435717,0.999278348003,0.999307280791,0.999336234077,0.999365207858,0.999394202129,0.999423216887,0.999452252128,0.999481307848,
0.999510384044,0.999539480712,0.999568597847,0.999597735447,0.999626893508,0.999656072025,0.999685270995,0.999714490415,0.999743730279,0.999772990586,0.999802271331,0.99983157251,0.999860894119,0.999890236156,0.999919598615,0.999948981494,
0.999978384789,1.0000078085,1.00003725261,1.00006671713,1.00009620205,1.00012570737,1.00015523308,1.00018477918,1.00021434566,1.00024393253,1.00027353978,1.0003031674,1.00033281539,1.00036248376,1.00039217248,1.00042188157,
1.00045161101,1.0004813608,1.00051113095,1.00054092144,1.00057073227,1.00060056344,1.00063041494,1.00066028677,1.00069017894,1.00072009142,1.00075002423,1.00077997735,1.00080995078,1.00083994453,1.00086995858,1.00089999293,
1.00093004758,1.00096012252,1.00099021776,1.00102033328,1.00105046908,1.00108062517,1.00111080153,1.00114099816,1.00117121507,1.00120145224,1.00123170967,1.00126198736,1.0012922853,1.0013226035,1.00135294194,1.00138330063,
1.00141367956,1.00144407873,1.00147449813,1.00150493775,1.00153539761,1.00156587769,1.00159637798,1.00162689849,1.00165743922,1.00168800015,1.00171858129,1.00174918262,1.00177980416,1.00181044589,1.00184110781,1.00187178992,
1.00190249221,1.00193321468,1.00196395733,1.00199472015,1.00202550314,1.0020563063,1.00208712962,1.00211797309,1.00214883673,1.00217972051,1.00221062444,1.00224154852,1.00227249274,1.0023034571,1.00233444159,1.00236544621,
1.00239647096,1.00242751583,1.00245858082,1.00248966594,1.00252077116,1.00255189649,1.00258304193,1.00261420748,1.00264539312,1.00267659886,1.00270782469,1.00273907061,1.00277033661,1.0028016227,1.00283292887,1.00286425511,
1.00289560142,1.0029269678,1.00295835425,1.00298976076,1.00302118732,1.00305263395,1.00308410062,1.00311558734,1.0031470941,1.00317862091,1.00321016775,1.00324173462,1.00327332153,1.00330492847,1.00333655542,1.0033682024,
1.0033998694,1.00343155641,1.00346326343,1.00349499045,1.00352673748,1.00355850451,1.00359029154,1.00362209855,1.00365392556,1.00368577256,1.00371763954,1.00374952649,1.00378143343,1.00381336034,1.00384530721,1.00387727405,
1.00390926086,1.00394126763,1.00397329435,1.00400534102,1.00403740764,1.00406949421,1.00410160073,1.00413372718,1.00416587357,1.00419803989,1.00423022614,1.00426243231,1.00429465841,1.00432690443,1.00435917036,1.00439145621,
1.00442376196,1.00445608763,1.00448843319,1.00452079865,1.00455318401,1.00458558927,1.00461801441,1.00465045944,1.00468292435,1.00471540914,1.00474791381,1.00478043835,1.00481298276,1.00484554704,1.00487813118,1.00491073518,
1.00494335903,1.00497600274,1.0050086663,1.00504134971,1.00507405296,1.00510677605,1.00513951898,1.00517228174,1.00520506433,1.00523786675,1.00527068899,1.00530353105,1.00533639293,1.00536927463,1.00540217614,1.00543509745,
1.00546803857,1.00550099949,1.0055339802,1.00556698072,1.00560000102,1.00563304111,1.00566610099,1.00569918065,1.00573228009,1.0057653993,1.00579853829,1.00583169704,1.00586487556,1.00589807385,1.00593129189,1.00596452969,
1.00599778724,1.00603106454,1.00606436159,1.00609767838,1.00613101492,1.00616437119,1.00619774719,1.00623114292,1.00626455839,1.00629799357,1.00633144848,1.0063649231,1.00639841745,1.0064319315,1.00646546526,1.00649901872,
1.00653259189,1.00656618476,1.00659979732,1.00663342958,1.00666708153,1.00670075316,1.00673444448,1.00676815547,1.00680188615,1.00683563649,1.00686940651,1.0069031962,1.00693700555,1.00697083456,1.00700468323,1.00703855156,
1.00707243953,1.00710634716,1.00714027443,1.00717422135,1.0072081879,1.0072421741,1.00727617992,1.00731020538,1.00734425046,1.00737831517,1.0074123995,1.00744650344,1.007480627,1.00751477018,1.00754893296,1.00758311535,
1.00761731734,1.00765153893,1.00768578012,1.0077200409,1.00775432127,1.00778862122,1.00782294077,1.00785727989,1.00789163859,1.00792601687,1.00796041471,1.00799483213,1.00802926912,1.00806372566,1.00809820177,1.00813269743,
1.00816721265,1.00820174741,1.00823630173,1.00827087559,1.00830546899,1.00834008193,1.00837471441,1.00840936642,1.00844403795,1.00847872902,1.00851343961,1.00854816972,1.00858291934,1.00861768848,1.00865247714,1.0086872853,
1.00872211296,1.00875696013,1.0087918268,1.00882671297,1.00886161863,1.00889654377,1.00893148841,1.00896645253,1.00900143614,1.00903643922,1.00907146178,1.00910650381,1.00914156531,1.00917664628,1.00921174671,1.0092468666,
1.00928200595,1.00931716476,1.00935234301,1.00938754072,1.00942275787,1.00945799447,1.0094932505,1.00952852598,1.00956382089,1.00959913522,1.00963446899,1.00966982219,1.0097051948,1.00974058684,1.00977599829,1.00981142916,
1.00984687943,1.00988234912,1.00991783821,1.0099533467,1.0099888746,1.01002442189,1.01005998857,1.01009557464,1.0101311801,1.01016680495,1.01020244917,1.01023811278,1.01027379576,1.01030949812,1.01034521985,1.01038096094,
1.0104167214,1.01045250122,1.0104883004,1.01052411893,1.01055995682,1.01059581406,1.01063169065,1.01066758658,1.01070350185,1.01073943646,1.01077539041,1.01081136368,1.0108473563,1.01088336823,1.01091939949,1.01095545008,
1.01099151998,1.0110276092,1.01106371773,1.01109984557,1.01113599272,1.01117215918,1.01120834493,1.01124454999,1.01128077434,1.01131701799,1.01135328092,1.01138956314,1.01142586465,1.01146218544,1.01149852551,1.01153488486,
1.01157126348,1.01160766137,1.01164407853,1.01168051495,1.01171697064,1.01175344558,1.01178993979,1.01182645324,1.01186298595,1.01189953791,1.01193610911,1.01197269956,1.01200930924,1.01204593817,1.01208258633,1.01211925372,
1.01215594034,1.01219264618,1.01222937125,1.01226611554,1.01230287905,1.01233966177,1.01237646371,1.01241328485,1.01245012521,1.01248698476,1.01252386352,1.01256076148,1.01259767864,1.01263461498,1.01267157052,1.01270854525,
1.01274553916,1.01278255225,1.01281958452,1.01285663597,1.0128937066,1.01293079639,1.01296790536,1.01300503349,1.01304218078,1.01307934724,1.01311653285,1.01315373762,1.01319096154,1.01322820461,1.01326546683,1.01330274819,
1.01334004869,1.01337736834,1.01341470711,1.01345206503,1.01348944207,1.01352683824,1.01356425354,1.01360168796,1.0136391415,1.01367661416,1.01371410594,1.01375161682,1.01378914682,1.01382669592,1.01386426413,1.01390185144,
1.01393945784,1.01397708335,1.01401472794,1.01405239163,1.01409007441,1.01412777627,1.01416549722,1.01420323724,1.01424099635,1.01427877453,1.01431657178,1.0143543881,1.01439222349,1.01443007794,1.01446795145,1.01450584403,
1.01454375566,1.01458168634,1.01461963608,1.01465760487,1.0146955927,1.01473359957,1.01477162549,1.01480967044,1.01484773443,1.01488581746,1.01492391951,1.01496204059,1.0150001807,1.01503833983,1.01507651798,1.01511471515,
1.01515293133,1.01519116653,1.01522942073,1.01526769395,1.01530598617,1.01534429738,1.0153826276,1.01542097682,1.01545934503,1.01549773223,1.01553613843,1.0155745636,1.01561300777,1.01565147091,1.01568995303,1.01572845413,
1.01576697421,1.01580551325,1.01584407127,1.01588264825,1.01592124419,1.0159598591,1.01599849296,1.01603714578,1.01607581755,1.01611450828,1.01615321795,1.01619194657,1.01623069413,1.01626946063,1.01630824608,1.01634705045,
1.01638587377,1.01642471601,1.01646357718,1.01650245727,1.01654135629,1.01658027423,1.01661921109,1.01665816687,1.01669714155,1.01673613515,1.01677514766,1.01681417907,1.01685322939,1.0168922986,1.01693138672,1.01697049373,
1.01700961963,1.01704876443,1.01708792811,1.01712711068,1.01716631213,1.01720553247,1.01724477168,1.01728402977,1.01732330673,1.01736260256,1.01740191726,1.01744125083,1.01748060326,1.01751997456,1.01755936471,1.01759877372,
1.01763820158,1.01767764829,1.01771711385,1.01775659826,1.01779610152,1.01783562361,1.01787516454,1.01791472431,1.01795430292,1.01799390036,1.01803351662,1.01807315172,1.01811280564,1.01815247838,1.01819216994,1.01823188031,
1.01827160951,1.01831135751,1.01835112433,1.01839090995,1.01843071438,1.01847053761,1.01851037964,1.01855024047,1.0185901201,1.01863001852,1.01866993573,1.01870987173,1.01874982651,1.01878980008,1.01882979243,1.01886980356,
1.01890983347,1.01894988215,1.0189899496,1.01903003582,1.01907014081,1.01911026456,1.01915040708,1.01919056835,1.01923074838,1.01927094717,1.01931116471,1.019351401,1.01939165604,1.01943192983,1.01947222236,1.01951253363,
1.01955286364,1.01959321238,1.01963357986,1.01967396607,1.01971437101,1.01975479468,1.01979523707,1.01983569819,1.01987617802,1.01991667658,1.01995719384,1.01999772982,1.02003828451,1.02007885791,1.02011945002,1.02016006083,
1.02020069034,1.02024133855,1.02028200546,1.02032269106,1.02036339535,1.02040411834,1.02044486001,1.02048562036,1.0205263994,1.02056719712,1.02060801352,1.0206488486,1.02068970234,1.02073057476,1.02077146585,1.02081237561,
1.02085330403,1.02089425111,1.02093521685,1.02097620125,1.02101720431,1.02105822602,1.02109926638,1.02114032539,1.02118140304,1.02122249934,1.02126361428,1.02130474786,1.02134590008,1.02138707094,1.02142826042,1.02146946854,
1.02151069528,1.02155194066,1.02159320465,1.02163448727,1.0216757885,1.02171710836,1.02175844682,1.02179980391,1.0218411796,1.0218825739,1.0219239868,1.02196541831,1.02200686842,1.02204833713,1.02208982444,1.02213133034,
1.02217285483,1.02221439791,1.02225595959,1.02229753984,1.02233913869,1.02238075611,1.02242239211,1.02246404669,1.02250571985,1.02254741157,1.02258912187,1.02263085074,1.02267259817,1.02271436417,1.02275614872,1.02279795184,
1.02283977351,1.02288161374,1.02292347253,1.02296534986,1.02300724574,1.02304916017,1.02309109314,1.02313304465,1.02317501471,1.0232170033,1.02325901043,1.02330103609,1.02334308028,1.023385143,1.02342722425,1.02346932402,
1.02351144231,1.02355357913,1.02359573446,1.02363790831,1.02368010068,1.02372231155,1.02376454094,1.02380678883,1.02384905523,1.02389134013,1.02393364353,1.02397596543,1.02401830583,1.02406066472,1.0241030421,1.02414543798,
1.02418785234,1.02423028519,1.02427273652,1.02431520634,1.02435769463,1.02440020141,1.02444272665,1.02448527038,1.02452783257,1.02457041323,1.02461301236,1.02465562995,1.02469826601,1.02474092053,1.02478359351,1.02482628494,
1.02486899483,1.02491172317,1.02495446996,1.02499723519,1.02504001888,1.02508282101,1.02512564158,1.02516848059,1.02521133804,1.02525421392,1.02529710824,1.02534002099,1.02538295217,1.02542590177,1.0254688698,1.02551185626,
1.02555486113,1.02559788443,1.02564092614,1.02568398627,1.02572706481,1.02577016176,1.02581327712,1.02585641088,1.02589956305,1.02594273363,1.0259859226,1.02602912997,1.02607235574,1.02611559991,1.02615886246,1.02620214341,
1.02624544275,1.02628876047,1.02633209657,1.02637545106,1.02641882393,1.02646221518,1.0265056248,1.0265490528,1.02659249917,1.02663596391,1.02667944701,1.02672294849,1.02676646832,1.02681000652,1.02685356308,1.026897138,
1.02694073128,1.02698434291,1.02702797289,1.02707162122,1.02711528789,1.02715897292,1.02720267629,1.027246398,1.02729013805,1.02733389644,1.02737767316,1.02742146822,1.02746528161,1.02750911333,1.02755296338,1.02759683176,
1.02764071846,1.02768462348,1.02772854682,1.02777248848,1.02781644846,1.02786042675,1.02790442336,1.02794843827,1.02799247149,1.02803652302,1.02808059286,1.02812468099,1.02816878743,1.02821291217,1.0282570552,1.02830121653,
1.02834539615,1.02838959406,1.02843381026,1.02847804474,1.02852229752,1.02856656857,1.02861085791,1.02865516552,1.02869949142,1.02874383558,1.02878819803,1.02883257874,1.02887697772,1.02892139497,1.02896583049,1.02901028427,
1.02905475631,1.02909924662,1.02914375518,1.02918828199,1.02923282706,1.02927739039,1.02932197196,1.02936657178,1.02941118985,1.02945582617,1.02950048072,1.02954515352,1.02958984456,1.02963455383,1.02967928134,1.02972402708,
1.02976879106,1.02981357326,1.02985956509,1.0299030991,1.02994793598,1.02999279108,1.0300376644,1.03008255594,1.03012746569,1.03017239366,1.03021733984,1.03026230424,1.03030728684,1.03035228765,1.03039730666,1.03044234388,
1.03048739929,1.03053247291,1.03057756473,1.03062267474,1.03066780295,1.03071294934,1.03075811393,1.03080329671,1.03084849767,1.03089371682,1.03093895415,1.03098420967,1.03102948336,1.03107477523,1.03112008527,1.03116541349,
1.03121075988,1.03125612445,1.03130150718,1.03134690807,1.03139232713,1.03143776436,1.03148321974,1.03152869328,1.03157418499,1.03161969484,1.03166522285,1.03171076902,1.03175633333,1.03180191579,1.0318475164,1.03189313515,
1.03193877204,1.03198442708,1.03203010026,1.03207579157,1.03212150102,1.0321672286,1.03221297431,1.03225873816,1.03230452013,1.03235032023,1.03239613846,1.03244197481,1.03248782928,1.03253370187,1.03257959257,1.0326255014,
1.03267142833,1.03271737338,1.03276333655,1.03280931782,1.03285531719,1.03290133468,1.03294737026,1.03299342395,1.03303949574,1.03308558562,1.03313169361,1.03317781968,1.03322396386,1.03327012612,1.03331630647,1.03336250491,
1.03340872144,1.03345495605,1.03350120874,1.03354747951,1.03359376837,1.0336400753,1.0336864003,1.03373274338,1.03377910453,1.03382548376,1.03387188105,1.0339182964,1.03396472983,1.03401118131,1.03405765086,1.03410413847,
1.03415064414,1.03419716786,1.03424370964,1.03429026947,1.03433684735,1.03438344329,1.03443005727,1.0344766893,1.03452333937,1.03457000748,1.03461669364,1.03466339783,1.03471012007,1.03475686034,1.03480361864,1.03485039498,
1.03489718935,1.03494400174,1.03499083217,1.03503768062,1.03508454709,1.03513143159,1.03517833411,1.03522525465,1.0352721932,1.03531914977,1.03536612436,1.03541311696,1.03546012756,1.03550715618,1.03555420281,1.03560126744,
1.03564835007,1.03569545071,1.03574256935,1.03578970599,1.03583686062,1.03588403325,1.03593122388,1.0359784325,1.0360256591,1.0360729037,1.03612016629,1.03616744686,1.03621474541,1.03626206195,1.03630939647,1.03635674897,
1.03640411945,1.0364515079,1.03649891433,1.03654633873,1.0365937811,1.03664124144,1.03668871975,1.03673621602,1.03678373026,1.03683126246,1.03687881262,1.03692638075,1.03697396683,1.03702157087,1.03706919286,1.03711683281,
1.0371644907,1.03721216655,1.03725986035,1.03730757209,1.03735530178,1.03740304941,1.03745081499,1.0374985985,1.03754639995,1.03759421934,1.03764205667,1.03768991193,1.03773778512,1.03778567625,1.0378335853,1.03788151228,
1.03792945719,1.03797742002,1.03802540077,1.03807339945,1.03812141604,1.03816945055,1.03821750298,1.03826557333,1.03831366159,1.03836176776,1.03840989184,1.03845803383,1.03850619372,1.03855437152,1.03860256723,1.03865078084,
1.03869901235,1.03874726176,1.03879552906,1.03884381427,1.03889211736,1.03894043835,1.03898877723,1.03903713401,1.03908550867,1.03913390121,1.03918231165,1.03923073996,1.03927918616,1.03932765024,1.0393761322,1.03942463204,
1.03947314975,1.03952168533,1.03957023879,1.03961881013,1.03966739933,1.0397160064,1.03976463133,1.03981327414,1.0398619348,1.03991061333,1.03995930972,1.04000802397,1.04005675608,1.04010550604,1.04015427386,1.04020305953,
1.04025186305,1.04030068442,1.04034952365,1.04039838071,1.04044725563,1.04049614839,1.04054505899,1.04059398743,1.04064293372,1.04069189784,1.04074087979,1.04078987959,1.04083889721,1.04088793267,1.04093698596,1.04098605708,
};
static const double hope_exp_a = (double)(1 << 20) / M_LN2;
static const double hope_exp_b = 1023 * (1 << 20) - 60801;
enum { hope_exp_mask = (1 << 12) - 1 };
union hope_exp_union_t {
hope_exp_union_t(int32_t i0, int32_t i1) { i[0] = i0; i[1] = i1; };
int32_t i[2];
double d;
} hope_exp_bounds(0x7ff << 20, 0);
inline double hope_exp(double x) {
if(x > 1022 * M_LN2)
return hope_exp_bounds.d;
int32_t tmp = hope_exp_a * x + hope_exp_b;
hope_exp_union_t y(0, tmp);
y.d *= hope_exp_data[hope_exp_mask & (tmp >> (20 - 12))];
return y.d;
};
"""
LIBRARY_NATIVE_MOD = """
template<typename T> inline typename std::enable_if<std::is_unsigned<T>::value, T>::type native_mod(T a, T b) {
return a % b;
}
template<typename T> inline typename std::enable_if<std::is_signed<T>::value, T>::type native_mod(T a, T b) {
T aa = b < 0 ? -a : a;
T bb = std::abs(b);
T c = aa < 0 ? ((-aa % bb) == 0 ? 0 : bb - (-aa % bb)) : aa % bb;
return b < 0 ? -c : c;
}
"""
LIBRARY_NATIVE_SIGN = """
template<typename T> inline T native_sign(T arg) {
return T(T(0) < arg) - T(arg < T(0));
}
"""
LIBRARY_NATIVE_RANGECHECK = """
#include <string>
inline int native_rangecheck(int x, int u, int l, std::string idxname, std::string varname) {
if (x < u || x >= l) {
PyErr_Format(PyExc_ValueError, "%s (%d) not in range %d to %d of %s", idxname.c_str(), x, u, l, varname.c_str());
throw std::exception();
}
return x;
}
"""
LIBRARY_NUMPY_INTERP = """
template<typename X0, typename X, typename Y> inline Y numpy_interp(X0 x0, X * x, Y * y, npy_intp size) {
npy_intp l, r, m;
for (l = 0, r = size - 1, m = (l + r) / 2; 1 < r - l; m = (l + r) / 2)
if (x0 < x[m])
r = m;
else
l = m;
auto b = (x0 - x[l]) / (x[r] - x[l]);
return (1 - b) * y[l] + b * y[r];
}
"""
LIBRARY_IMPORTS = """
#define PY_ARRAY_UNIQUE_SYMBOL fkt_ARRAY_API
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include <Python.h>
#include <numpy/arrayobject.h>
#include <numpy/arrayscalars.h>
#include <cmath>
#include <tuple>
#include <numeric>
#include <cstdint>
#include <cstdlib>
#include <exception>
#include <functional>
#include <type_traits>
"""
LIBRARY_PYOBJ_DEF = """
struct PyObj {
typedef PyObject * ptr_t;
typedef PyArrayObject * arrptr_t;
PyObj(): dec(false), ptr(NULL) {}
PyObj(ptr_t p): dec(false), ptr(p) {}
~PyObj() { if(dec) Py_DECREF(ptr); }
PyObj & operator=(ptr_t p) { if(dec) Py_DECREF(ptr); ptr = p; dec = false; return *this; }
PyObj & incref(ptr_t p) { if(dec) Py_DECREF(ptr); ptr = p; dec = (p != NULL); return *this; }
operator bool() const { return ptr; }
operator ptr_t() const { return ptr; }
operator arrptr_t() const { return (arrptr_t)ptr; }
bool dec;
ptr_t ptr;
};
"""
LIBRARY_METHODS_DECL_PY2 ="""
PyMethodDef {fktname}Methods[] = {{
{{ \"set_create_signature\", (PyCFunction)set_create_signature, METH_VARARGS }},
{{ \"run\", (PyCFunction)run, METH_VARARGS }},
{{ NULL, NULL }}
}};
"""
LIBRARY_INIT_DECL_PY2 = """
PyMODINIT_FUNC init{filename}(void) {{
import_array();
PyImport_ImportModule(\"numpy\");
(void)Py_InitModule(\"{filename}\", {fktname}Methods);
}}
"""
LIBRARY_METHODS_DECL_PY3 = """
PyMethodDef {fktname}Methods[] = {{
{{ \"set_create_signature\", set_create_signature, METH_VARARGS, \"signal handler\" }},
{{ \"run\", run, METH_VARARGS, \"module function\" }},
{{ NULL, NULL, 0, NULL }}
}};
"""
LIBRARY_MODULE_DECL_PY3 = """
static struct PyModuleDef {fktname}module = {{
PyModuleDef_HEAD_INIT,
\"{fktname}\",
NULL,
-1,
{fktname}Methods
}};
"""
LIBRARY_INIT_DECL_PY3 = """
PyMODINIT_FUNC PyInit_{filename}(void) {{
import_array();
PyImport_ImportModule(\"numpy\");
return PyModule_Create(&{fktname}module);
}}
"""
LIBRARY_SIGHANDLER = """
#include <string>
#include <sstream>
#include <iostream>
#include <cxxabi.h>
#include <execinfo.h>
#include <signal.h>
void sighandler(int sig);
void sighandler(int sig) {
std::ostringstream buffer;
buffer << "Abort by " << (sig == SIGSEGV ? "segfault" : "bus error") << std::endl;
void * stack[64];
std::size_t depth = backtrace(stack, 64);
if (!depth)
buffer << " <empty stacktrace, possibly corrupt>" << std::endl;
else {
char ** symbols = backtrace_symbols(stack, depth);
for (std::size_t i = 1; i < depth; ++i) {
std::string symbol = symbols[i];
if (symbol.find_first_of(' ', 59) != std::string::npos) {
std::string name = symbol.substr(59, symbol.find_first_of(' ', 59) - 59);
int status;
char * demangled = abi::__cxa_demangle(name.c_str(), NULL, NULL, &status);
if (!status) {
buffer << " "
<< symbol.substr(0, 59)
<< demangled
<< symbol.substr(59 + name.size())
<< std::endl;
free(demangled);
} else
buffer << " " << symbol << std::endl;
} else
buffer << " " << symbol << std::endl;
}
free(symbols);
}
std::cerr << buffer.str();
std::exit(EXIT_FAILURE);
}
"""
LIBRARY_CREATE_SIGNATURE = """
PyObject * create_signature;
struct sigaction slot;
PyObject * set_create_signature(PyObject * self, PyObject * args) {
if (!PyArg_ParseTuple(args, "O", &create_signature)) {
PyErr_SetString(PyExc_ValueError, "Invalid Argument to set_create_signature!");
return NULL;
}
Py_INCREF(create_signature);
memset(&slot, 0, sizeof(slot));
slot.sa_handler = &sighandler;
sigaction(SIGSEGV, &slot, NULL);
sigaction(SIGBUS, &slot, NULL);
Py_INCREF(Py_None);
return Py_None;
}
""" | gpl-3.0 |
zdrjson/DDKit | python/iMacFirstPythonPragrammer/FindSameNameImage.py | 1 | 1755 | import os, sys, re, shutil
if __name__ == '__main__':
used_map = {}
resPath = "./MagnetPF/Res/"
depDir = "Deprecated"
skipDir = ["message"]
for root, dirs, files in os.walk("./"):
for file in files:
if file.endswith(".m"):
filepath = os.path.join(root, file)
f = open(filepath, "r")
for line in f:
match = re.findall(".*?@\"(Res.*?.png)\".*?", line)
if match:
for image in match:
used_map[image] = 1
skipDir.append(depDir)
for root, dirs, files in os.walk(resPath):
for file in files:
orginfile = os.path.join(root, file)
match = re.findall(".*?(Res.*?.png).*?", orginfile)
if match:
matchfile = match[0].replace("@2x","").replace("@3x","")
print matchfile
if not used_map.has_key(matchfile):
filename = orginfile.split(os.path.sep)[-1]
relPath = orginfile.replace(resPath,"")
originDir = relPath.split(os.path.sep)[0]
tofile = resPath + depDir + "/" + relPath
topath = tofile.replace(filename,"")
if not originDir in skipDir:
if not os.path.exists(topath):
os.mkdir(topath)
print "from: " + orginfile
print " to:" + tofile
print ""
shutil.move(orginfile, tofile) | mit |
owlabs/incubator-airflow | airflow/contrib/operators/snowflake_operator.py | 1 | 3034 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.contrib.hooks.snowflake_hook import SnowflakeHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class SnowflakeOperator(BaseOperator):
"""
Executes sql code in a Snowflake database
:param snowflake_conn_id: reference to specific snowflake connection id
:type snowflake_conn_id: str
:param sql: the sql code to be executed. (templated)
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
:param warehouse: name of warehouse (will overwrite any warehouse
defined in the connection's extra JSON)
:type warehouse: str
:param database: name of database (will overwrite database defined
in connection)
:type database: str
:param schema: name of schema (will overwrite schema defined in
connection)
:type schema: str
:param role: name of role (will overwrite any role defined in
connection's extra JSON)
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, snowflake_conn_id='snowflake_default', parameters=None,
autocommit=True, warehouse=None, database=None, role=None,
schema=None, *args, **kwargs):
super(SnowflakeOperator, self).__init__(*args, **kwargs)
self.snowflake_conn_id = snowflake_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
self.warehouse = warehouse
self.database = database
self.role = role
self.schema = schema
def get_hook(self):
return SnowflakeHook(snowflake_conn_id=self.snowflake_conn_id,
warehouse=self.warehouse, database=self.database,
role=self.role, schema=self.schema)
def execute(self, context):
self.log.info('Executing: %s', self.sql)
hook = self.get_hook()
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.