repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
axt/angr | angr/analyses/identifier/functions/malloc.py | 5 | 3826 |
from ..func import Func, TestData
from ....errors import SimMemoryError
class malloc(Func):
def __init__(self):
super(malloc, self).__init__() #pylint disable=useless-super-delegation
def num_args(self):
return 1
def get_name(self):
return "malloc"
def gen_input_output_pair(self):
return None
def pre_test(self, func, runner):
# we should not get a real output from the function with a value this large
num = 0xffff0000
test_input = [num]
test_output = [None]
return_val = None
max_steps = 10
test = TestData(test_input, test_output, return_val, max_steps)
state = runner.get_out_state(func, test, concrete_rand=True)
if state is not None and 0x10 < state.se.eval(state.regs.eax) < 0xfffffff0:
return False
# we should be able to get different outputs if we call malloc multiple times
num = 0x80
test_input = [num]
test_output = [None]
return_val = None
max_steps = 40
test = TestData(test_input, test_output, return_val, max_steps)
returned_locs = []
state = runner.get_out_state(func, test, concrete_rand=True)
if state is None:
return False
returned_locs.append(state.se.eval(state.regs.eax))
for i in range(6): #pylint disable=unused-variable
state = runner.get_out_state(func, test, initial_state=state, concrete_rand=True)
if state is None:
return False
returned_locs.append(state.se.eval(state.regs.eax))
if any(a < 0x3000 for a in returned_locs):
return False
# if we got the same value 2x it didnt work
if len(set(returned_locs)) != len(returned_locs):
return False
# if we got 0 it didn't work
if any(a == 0 for a in returned_locs):
return False
# if they are all multiples of 0x1000 it seems to be always calling allocate
if all(a % 0x1000 == returned_locs[0] % 0x1000 for a in returned_locs):
return False
# they all should be writable/readable
try:
if any(state.se.eval(state.memory.permissions(a)) & 3 != 3 for a in returned_locs):
return False
except SimMemoryError:
return False
# we should be able to call malloc 0xf00 afterwards
num = 0xf00
test_input = [num]
test_output = [None]
return_val = None
max_steps = 40
test = TestData(test_input, test_output, return_val, max_steps)
returned_locs = []
state = runner.get_out_state(func, test, initial_state=state, concrete_rand=True)
if state is None:
return False
res = state.se.eval(state.regs.eax)
if res < 0x10 or res > 0xfffffff0:
return False
# we should get different values if we try with a different size
num = 0x320
test_input = [num]
test_output = [None]
return_val = None
max_steps = 10
test = TestData(test_input, test_output, return_val, max_steps)
returned_locs2 = []
state = runner.get_out_state(func, test, concrete_rand=True)
if state is None:
return False
returned_locs2.append(state.se.eval(state.regs.eax))
for i in range(10):
state = runner.get_out_state(func, test, initial_state=state, concrete_rand=True)
if state is None:
return False
returned_locs2.append(state.se.eval(state.regs.eax))
if any(a < 0x3000 for a in returned_locs2):
return False
if returned_locs == returned_locs2:
return False
return True
| bsd-2-clause |
makerbot/ReplicatorG | skein_engines/skeinforge-50/fabmetheus_utilities/geometry/manipulation_shapes/mirror.py | 12 | 2021 | """
Add material to support overhang or remove material at the overhang angle.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.geometry.creation import solid
from fabmetheus_utilities.geometry.geometry_tools import face
from fabmetheus_utilities.geometry.geometry_utilities import evaluate
from fabmetheus_utilities.geometry.geometry_utilities import matrix
from fabmetheus_utilities.geometry.manipulation_shapes import flip
from fabmetheus_utilities.geometry.solids import triangle_mesh
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import euclidean
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Art of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/02/05 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
globalExecutionOrder = 200
def getManipulatedGeometryOutput(elementNode, geometryOutput, prefix):
'Get equated geometryOutput.'
flippedGeometryOutput = triangle_mesh.getGeometryOutputCopy(geometryOutput)
flip.flipPoints(elementNode, matrix.getVertexes(flippedGeometryOutput), prefix)
if flip.getShouldReverse(elementNode, prefix):
flippedFaces = face.getFaces(flippedGeometryOutput)
for flippedFace in flippedFaces:
flippedFace.vertexIndexes.reverse()
return {'union' : {'shapes' : [flippedGeometryOutput, geometryOutput]}}
def getManipulatedPaths(close, elementNode, loop, prefix, sideLength):
'Get flipped paths.'
return [loop + flip.getFlippedLoop(elementNode, euclidean.getPathCopy(loop), prefix)]
def getNewDerivation(elementNode, prefix, sideLength):
'Get new derivation.'
return evaluate.EmptyObject()
def processElementNode(elementNode):
'Process the xml element.'
solid.processElementNodeByFunctionPair(elementNode, getManipulatedGeometryOutput, getManipulatedPaths)
| gpl-2.0 |
NorfolkDataSci/presentations | 2018-01_chatbot/serverless-chatbots-workshop-master/LambdaFunctions/nlp/nltk/twitter/util.py | 7 | 4541 | # -*- coding: utf-8 -*-
# Natural Language Toolkit: Twitter client
#
# Copyright (C) 2001-2016 NLTK Project
# Author: Ewan Klein <ewan@inf.ed.ac.uk>
# Lorenzo Rubio <lrnzcig@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Authentication utilities to accompany :module:`twitterclient`.
"""
from __future__ import print_function
import os
import pprint
from twython import Twython
def credsfromfile(creds_file=None, subdir=None, verbose=False):
"""
Convenience function for authentication
"""
return Authenticate().load_creds(creds_file=creds_file, subdir=subdir, verbose=verbose)
class Authenticate(object):
"""
Methods for authenticating with Twitter.
"""
def __init__(self):
self.creds_file = 'credentials.txt'
self.creds_fullpath = None
self.oauth = {}
try:
self.twitter_dir = os.environ['TWITTER']
self.creds_subdir = self.twitter_dir
except KeyError:
self.twitter_dir = None
self.creds_subdir = None
def load_creds(self, creds_file=None, subdir=None, verbose=False):
"""
Read OAuth credentials from a text file.
::
File format for OAuth 1
=======================
app_key=YOUR_APP_KEY
app_secret=YOUR_APP_SECRET
oauth_token=OAUTH_TOKEN
oauth_token_secret=OAUTH_TOKEN_SECRET
::
File format for OAuth 2
=======================
app_key=YOUR_APP_KEY
app_secret=YOUR_APP_SECRET
access_token=ACCESS_TOKEN
:param str file_name: File containing credentials. ``None`` (default) reads\
data from `TWITTER/'credentials.txt'`
"""
if creds_file is not None:
self.creds_file = creds_file
if subdir is None:
if self.creds_subdir is None:
msg = "Supply a value to the 'subdir' parameter or" +\
" set the TWITTER environment variable."
raise ValueError(msg)
else:
self.creds_subdir = subdir
self.creds_fullpath =\
os.path.normpath(os.path.join(self.creds_subdir, self.creds_file))
if not os.path.isfile(self.creds_fullpath):
raise OSError('Cannot find file {}'.format(self.creds_fullpath))
with open(self.creds_fullpath) as infile:
if verbose:
print('Reading credentials file {}'.format(self.creds_fullpath))
for line in infile:
if '=' in line:
name, value = line.split('=', 1)
self.oauth[name.strip()] = value.strip()
self._validate_creds_file(verbose=verbose)
return self.oauth
def _validate_creds_file(self, verbose=False):
"""Check validity of a credentials file."""
oauth1 = False
oauth1_keys = ['app_key', 'app_secret', 'oauth_token', 'oauth_token_secret']
oauth2 = False
oauth2_keys = ['app_key', 'app_secret', 'access_token']
if all(k in self.oauth for k in oauth1_keys):
oauth1 = True
elif all(k in self.oauth for k in oauth2_keys):
oauth2 = True
if not (oauth1 or oauth2):
msg = 'Missing or incorrect entries in {}\n'.format(self.creds_file)
msg += pprint.pformat(self.oauth)
raise ValueError(msg)
elif verbose:
print('Credentials file "{}" looks good'.format(self.creds_file))
def add_access_token(creds_file=None):
"""
For OAuth 2, retrieve an access token for an app and append it to a
credentials file.
"""
if creds_file is None:
path = os.path.dirname(__file__)
creds_file = os.path.join(path, 'credentials2.txt')
oauth2 = credsfromfile(creds_file=creds_file)
app_key = oauth2['app_key']
app_secret = oauth2['app_secret']
twitter = Twython(app_key, app_secret, oauth_version=2)
access_token = twitter.obtain_access_token()
tok = 'access_token={}\n'.format(access_token)
with open(creds_file, 'a') as infile:
print(tok, file=infile)
def guess_path(pth):
"""
If the path is not absolute, guess that it is a subdirectory of the
user's home directory.
:param str pth: The pathname of the directory where files of tweets should be written
"""
if os.path.isabs(pth):
return pth
else:
return os.path.expanduser(os.path.join("~", pth))
| mit |
appier/h5sparse | setup.py | 1 | 1432 | #!/usr/bin/env python
from setuptools import setup
setup_requires = [
'nose',
'coverage',
]
install_requires = [
'h5py',
'numpy',
'scipy',
'six',
]
tests_require = []
description = "Scipy sparse matrix in HDF5."
long_description = """\
Please visit the `Github repository <https://github.com/appier/h5sparse>`_
for more information.\n
"""
with open('README.rst') as fp:
long_description += fp.read()
setup(
name='h5sparse',
version="0.1.0",
description=description,
long_description=long_description,
author='Appier Inc.',
url='https://github.com/appier/h5sparse',
setup_requires=setup_requires,
install_requires=install_requires,
tests_require=tests_require,
license="MIT",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering',
'Topic :: Database',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
],
test_suite='nose.collector',
packages=[
'h5sparse',
],
)
| mit |
macarthur-lab/xbrowse | xbrowse/utils/basic_utils_tests.py | 1 | 1045 | from django.test import TestCase
from xbrowse.utils.basic_utils import _encode_name, _decode_name
class BasicUtilsTest(TestCase):
def test_encode_decode_name(self):
# test special cases
for test_string in [
"SPECIAL_CHARS_TEST1_.,#*$[]{}()_1_.,#*$[]{}()/\\",
"SPECIAL_CHARS_TEST2_..,,##**$$[[]]{{}}(())////\\\\",
"SPECIAL_CHARS_TEST3__$dot$__$comma$__$hash$___$star$__$lp$__$rp$__$lsb$__$rsb$__$lcb$__$rcb$_",
".SPECIAL_START_CHAR",
"*SPECIAL_START_CHAR",
"__SPECIAL_START_CHAR",
"__$SPECIAL_START_CHAR",
"_$SPECIAL_START_CHAR",
]:
decoded_test_string = _decode_name(_encode_name(test_string))
self.assertEqual(test_string, decoded_test_string)
# test all ascii chars
for acii_code in range(32, 127):
test_char = chr(acii_code)
decoded_test_char = _decode_name(_encode_name(test_char))
self.assertEqual(test_char, decoded_test_char)
| agpl-3.0 |
heeraj123/oh-mainline | vendor/packages/Django/django/contrib/redirects/middleware.py | 215 | 1649 | from __future__ import unicode_literals
from django.conf import settings
from django.contrib.redirects.models import Redirect
from django.contrib.sites.models import get_current_site
from django.core.exceptions import ImproperlyConfigured
from django import http
class RedirectFallbackMiddleware(object):
def __init__(self):
if 'django.contrib.sites' not in settings.INSTALLED_APPS:
raise ImproperlyConfigured(
"You cannot use RedirectFallbackMiddleware when "
"django.contrib.sites is not installed."
)
def process_response(self, request, response):
if response.status_code != 404:
return response # No need to check for a redirect for non-404 responses.
full_path = request.get_full_path()
current_site = get_current_site(request)
r = None
try:
r = Redirect.objects.get(site=current_site, old_path=full_path)
except Redirect.DoesNotExist:
pass
if settings.APPEND_SLASH and not request.path.endswith('/'):
# Try appending a trailing slash.
path_len = len(request.path)
full_path = full_path[:path_len] + '/' + full_path[path_len:]
try:
r = Redirect.objects.get(site=current_site, old_path=full_path)
except Redirect.DoesNotExist:
pass
if r is not None:
if r.new_path == '':
return http.HttpResponseGone()
return http.HttpResponsePermanentRedirect(r.new_path)
# No redirect was found. Return the response.
return response
| agpl-3.0 |
baroquebobcat/pants | src/python/pants/cache/artifact_cache.py | 2 | 6002 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import os
import sys
# Note throughout the distinction between the artifact_root (which is where the artifacts are
# originally built and where the cache restores them to) and the cache root path/URL (which is
# where the artifacts are cached).
logger = logging.getLogger(__name__)
class ArtifactCacheError(Exception):
pass
class NonfatalArtifactCacheError(Exception):
pass
class UnreadableArtifact(object):
"""A False-y value to indicate a read-failure (vs a normal cache-miss)
See docstring on `ArtifactCache.use_cached_files` for details.
"""
def __init__(self, key, err=None):
"""
:param CacheKey key: The key of the artifact that encountered an error
:param err: Any additional information on the nature of the read error.
"""
self.key = key
self.err = err
# For python 3
def __bool__(self):
return False
# For python 2
def __nonzero__(self):
return self.__bool__()
def __str__(self):
return "key={} err={}".format(self.key, self.err)
class ArtifactCache(object):
"""A map from cache key to a set of build artifacts.
The cache key must uniquely identify the inputs (sources, compiler flags etc.) needed to
build the artifacts. Cache keys are typically obtained from a CacheKeyGenerator.
Subclasses implement the methods below to provide this functionality.
"""
def __init__(self, artifact_root):
"""Create an ArtifactCache.
All artifacts must be under artifact_root.
"""
self.artifact_root = artifact_root
def prune(self):
"""Prune stale cache files
Remove old unused cache files
:return:
"""
pass
def insert(self, cache_key, paths, overwrite=False):
"""Cache the output of a build.
By default, checks cache.has(key) first, only proceeding to create and insert an artifact
if it is not already in the cache (though `overwrite` can be used to skip the check and
unconditionally insert).
:param CacheKey cache_key: A CacheKey object.
:param list<str> paths: List of absolute paths to generated dirs/files.
These must be under the artifact_root.
:param bool overwrite: Skip check for existing, insert even if already in cache.
"""
missing_files = filter(lambda f: not os.path.exists(f), paths)
if missing_files:
raise ArtifactCacheError('Tried to cache nonexistent files {0}'.format(missing_files))
if not overwrite:
if self.has(cache_key):
logger.debug('Skipping insert of existing artifact: {0}'.format(cache_key))
return False
try:
self.try_insert(cache_key, paths)
return True
except NonfatalArtifactCacheError as e:
logger.error('Error while writing to artifact cache: {0}'.format(e))
return False
def try_insert(self, cache_key, paths):
"""Attempt to cache the output of a build, without error-handling.
:param CacheKey cache_key: A CacheKey object.
:param list<str> paths: List of absolute paths to generated dirs/files. These must be under the artifact_root.
"""
pass
def has(self, cache_key):
pass
def use_cached_files(self, cache_key, results_dir=None):
"""Use the files cached for the given key.
Returned result indicates whether or not an artifact was successfully found
and decompressed to the `artifact_root`:
`True` if artifact was found and successfully decompressed
`False` if not in the cache
Implementations may choose to return an UnreadableArtifact instance instead
of `False` to indicate an artifact was in the cache but could not be read,
due to an error or corruption. UnreadableArtifact evaluates as False-y, so
callers can treat the result as a boolean if they are only concerned with
whether or not an artifact was read.
Callers may also choose to attempt to repair or report corrupted artifacts
differently, as these are unexpected, unlike normal cache misses.
:param CacheKey cache_key: A CacheKey object.
"""
pass
def delete(self, cache_key):
"""Delete the artifacts for the specified key.
Deleting non-existent artifacts is a no-op.
:param CacheKey cache_key: A CacheKey object.
"""
pass
def call_use_cached_files(tup):
"""Importable helper for multi-proc calling of ArtifactCache.use_cached_files on a cache instance.
Multiprocessing map/apply/etc require functions which can be imported, not bound methods.
To call a bound method, instead call a helper like this and pass tuple of the instance and args.
The helper can then call the original method on the deserialized instance.
:param tup: A tuple of an ArtifactCache and args (eg CacheKey) for ArtifactCache.use_cached_files.
"""
try:
cache, key, results_dir = tup
res = cache.use_cached_files(key, results_dir)
if res:
sys.stderr.write('.')
else:
sys.stderr.write(' ')
sys.stderr.flush()
return res
except NonfatalArtifactCacheError as e:
logger.warn('Error calling use_cached_files in artifact cache: {0}'.format(e))
return False
def call_insert(tup):
"""Importable helper for multi-proc calling of ArtifactCache.insert on an ArtifactCache instance.
See docstring on call_use_cached_files explaining why this is useful.
:param tup: A 4-tuple of an ArtifactCache and the 3 args passed to ArtifactCache.insert:
eg (some_cache_instance, cache_key, [some_file, another_file], False)
"""
try:
cache, key, files, overwrite = tup
return cache.insert(key, files, overwrite)
except NonfatalArtifactCacheError as e:
logger.warn('Error while inserting into artifact cache: {0}'.format(e))
return False
| apache-2.0 |
Kongsea/tensorflow | tensorflow/compiler/tests/image_ops_test.py | 3 | 5612 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for image ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_image_ops
from tensorflow.python.platform import test
class ResizeBilinearTest(XLATestCase):
def _assertForwardOpMatchesExpected(self,
image_np,
target_shape,
expected=None):
if expected is None:
self.fail("expected must be specified")
with self.test_session() as sess, self.test_scope():
image = array_ops.placeholder(image_np.dtype)
resized = gen_image_ops.resize_bilinear(
image, target_shape, align_corners=True)
out = sess.run(resized, {image: image_np[np.newaxis, :, :, np.newaxis]})
self.assertAllClose(expected[np.newaxis, :, :, np.newaxis], out)
def _assertBackwardOpMatchesExpected(self,
grads_np,
input_shape=None,
dtype=None,
expected=None):
if input_shape is None:
self.fail("input_shape must be specified")
if expected is None:
self.fail("expected must be specified")
with self.test_session() as sess, self.test_scope():
dtype = dtype or np.float32
grads = array_ops.placeholder(np.float32)
resized = gen_image_ops._resize_bilinear_grad(
grads,
np.zeros([1, input_shape[0], input_shape[1], 1], dtype=dtype),
align_corners=True)
out = sess.run(resized, {grads: grads_np[np.newaxis, :, :, np.newaxis]})
self.assertAllClose(expected[np.newaxis, :, :, np.newaxis], out)
def testAlignCorners1x2To3x2(self):
for dtype in self.float_types:
self._assertForwardOpMatchesExpected(
np.array([[1, 2]], dtype=dtype), [3, 3],
expected=np.array(
[[1, 1.5, 2], [1, 1.5, 2], [1, 1.5, 2]], dtype=np.float32))
def testAlignCorners1x2To3x2Grad(self):
for dtype in self.float_types:
self._assertBackwardOpMatchesExpected(
np.array([[1, 2], [3, 4], [5, 6]], dtype=np.float32),
input_shape=[1, 2],
dtype=dtype,
expected=np.array([[9, 12]], dtype=np.float32))
def testAlignCorners2x2To1x1(self):
for dtype in self.float_types:
self._assertForwardOpMatchesExpected(
np.array([[1, 2], [3, 4]], dtype=dtype), [1, 1],
expected=np.array([[1]], dtype=np.float32))
def testAlignCorners2x2To1x1Grad(self):
for dtype in self.float_types:
self._assertBackwardOpMatchesExpected(
np.array([[7]], dtype=np.float32),
input_shape=[2, 2],
dtype=dtype,
expected=np.array([[7, 0], [0, 0]], dtype=np.float32))
def testAlignCorners2x2To3x3(self):
for dtype in self.float_types:
self._assertForwardOpMatchesExpected(
np.array([[1, 2], [3, 4]], dtype=dtype), [3, 3],
expected=np.array(
[[1, 1.5, 2], [2, 2.5, 3], [3, 3.5, 4]], dtype=np.float32))
def testAlignCorners2x2To3x3Grad(self):
self._assertBackwardOpMatchesExpected(
np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32),
input_shape=[2, 2],
expected=np.array([[5.25, 8.25], [14.25, 17.25]], dtype=np.float32))
def testAlignCorners3x3To2x2(self):
for dtype in self.float_types:
self._assertForwardOpMatchesExpected(
np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=dtype), [2, 2],
expected=np.array([[1, 3], [7, 9]], dtype=np.float32))
def testAlignCorners3x3To2x2Grad(self):
for dtype in self.float_types:
self._assertBackwardOpMatchesExpected(
np.array([[7, 13], [22, 4]], dtype=np.float32),
input_shape=[3, 3],
dtype=dtype,
expected=np.array(
[[7, 0, 13], [0, 0, 0], [22, 0, 4]], dtype=np.float32))
def testAlignCorners4x4To3x3(self):
for dtype in self.float_types:
self._assertForwardOpMatchesExpected(
np.array(
[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13, 14, 15, 16]],
dtype=dtype), [3, 3],
expected=np.array(
[[1, 2.5, 4], [7, 8.5, 10], [13, 14.5, 16]], dtype=np.float32))
def testAlignCorners4x4To3x3Grad(self):
for dtype in self.float_types:
self._assertBackwardOpMatchesExpected(
np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]], dtype=np.float32),
input_shape=[4, 4],
dtype=dtype,
expected=np.array(
[[1, 1, 1, 3], [2, 1.25, 1.25, 3], [2, 1.25, 1.25, 3],
[7, 4, 4, 9]],
dtype=np.float32))
if __name__ == "__main__":
test.main()
| apache-2.0 |
amarouni/incubator-beam | sdks/python/apache_beam/transforms/timeutil.py | 9 | 4081 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Timestamp utilities."""
from __future__ import absolute_import
from abc import ABCMeta
from abc import abstractmethod
__all__ = [
'TimeDomain',
]
class TimeDomain(object):
"""Time domain for streaming timers."""
WATERMARK = 'WATERMARK'
REAL_TIME = 'REAL_TIME'
DEPENDENT_REAL_TIME = 'DEPENDENT_REAL_TIME'
@staticmethod
def from_string(domain):
if domain in (TimeDomain.WATERMARK,
TimeDomain.REAL_TIME,
TimeDomain.DEPENDENT_REAL_TIME):
return domain
raise ValueError('Unknown time domain: %s' % domain)
class TimestampCombinerImpl(object):
"""Implementation of TimestampCombiner."""
__metaclass__ = ABCMeta
@abstractmethod
def assign_output_time(self, window, input_timestamp):
pass
@abstractmethod
def combine(self, output_timestamp, other_output_timestamp):
pass
def combine_all(self, merging_timestamps):
"""Apply combine to list of timestamps."""
combined_output_time = None
for output_time in merging_timestamps:
if combined_output_time is None:
combined_output_time = output_time
else:
combined_output_time = self.combine(
combined_output_time, output_time)
return combined_output_time
def merge(self, unused_result_window, merging_timestamps):
"""Default to returning the result of combine_all."""
return self.combine_all(merging_timestamps)
class DependsOnlyOnWindow(TimestampCombinerImpl):
"""TimestampCombinerImpl that only depends on the window."""
__metaclass__ = ABCMeta
def combine(self, output_timestamp, other_output_timestamp):
return output_timestamp
def merge(self, result_window, unused_merging_timestamps):
# Since we know that the result only depends on the window, we can ignore
# the given timestamps.
return self.assign_output_time(result_window, None)
class OutputAtEarliestInputTimestampImpl(TimestampCombinerImpl):
"""TimestampCombinerImpl outputting at earliest input timestamp."""
def assign_output_time(self, window, input_timestamp):
return input_timestamp
def combine(self, output_timestamp, other_output_timestamp):
"""Default to returning the earlier of two timestamps."""
return min(output_timestamp, other_output_timestamp)
class OutputAtEarliestTransformedInputTimestampImpl(TimestampCombinerImpl):
"""TimestampCombinerImpl outputting at earliest input timestamp."""
def __init__(self, window_fn):
self.window_fn = window_fn
def assign_output_time(self, window, input_timestamp):
return self.window_fn.get_transformed_output_time(window, input_timestamp)
def combine(self, output_timestamp, other_output_timestamp):
return min(output_timestamp, other_output_timestamp)
class OutputAtLatestInputTimestampImpl(TimestampCombinerImpl):
"""TimestampCombinerImpl outputting at latest input timestamp."""
def assign_output_time(self, window, input_timestamp):
return input_timestamp
def combine(self, output_timestamp, other_output_timestamp):
return max(output_timestamp, other_output_timestamp)
class OutputAtEndOfWindowImpl(DependsOnlyOnWindow):
"""TimestampCombinerImpl outputting at end of window."""
def assign_output_time(self, window, unused_input_timestamp):
return window.end
| apache-2.0 |
mxOBS/deb-pkg_trusty_chromium-browser | tools/gyp/test/ios/gyptest-app-ios-assets-catalog.py | 107 | 1726 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that ios app bundles are built correctly.
"""
import TestGyp
import TestMac
import os.path
import sys
# Xcode supports for assets catalog was introduced in Xcode 6.0
if sys.platform == 'darwin' and TestMac.Xcode.Version() >= '0600':
test_gyp_path = 'test-assets-catalog.gyp'
test_app_path = 'Test App Assets Catalog Gyp.app'
test = TestGyp.TestGyp(formats=['xcode', 'ninja'])
test.run_gyp(test_gyp_path, chdir='app-bundle')
test.build(test_gyp_path, test.ALL, chdir='app-bundle')
# Test that the extension is .bundle
test.built_file_must_exist(
os.path.join(test_app_path, 'Test App Assets Catalog Gyp'),
chdir='app-bundle')
# Info.plist
info_plist = test.built_file_path(
os.path.join(test_app_path, 'Info.plist'),
chdir='app-bundle')
# Resources
test.built_file_must_exist(
os.path.join(test_app_path, 'English.lproj/InfoPlist.strings'),
chdir='app-bundle')
test.built_file_must_exist(
os.path.join(test_app_path, 'English.lproj/MainMenu.nib'),
chdir='app-bundle')
test.built_file_must_exist(
os.path.join(test_app_path, 'English.lproj/Main_iPhone.storyboardc'),
chdir='app-bundle')
test.built_file_must_exist(
os.path.join(test_app_path, 'Assets.car'),
chdir='app-bundle')
# Packaging
test.built_file_must_exist(
os.path.join(test_app_path, 'PkgInfo'),
chdir='app-bundle')
test.built_file_must_match(
os.path.join(test_app_path, 'PkgInfo'), 'APPLause',
chdir='app-bundle')
test.pass_test()
| bsd-3-clause |
minzastro/lookup | providers/vizier.py | 1 | 1290 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 9 10:02:47 2016
@author: mints
"""
from providers.basic import BasicLookup
class VizierLookup(BasicLookup):
"""
Provider for Vizier data.
"""
CATALOGS = {
'2MASS': 'II/246/out',
'AllWISE': 'II/328/allwise',
'SDSS': 'V/139/sdss9',
'GLIMPSE': 'II/293/glimpse',
'PPMXL': 'I/317/sample',
'NOMAD': 'I/297/out',
'UKIDSS': 'II/319/las9',
'APASS': 'II/336/apass9',
'URAT1': 'I/329/urat1'
}
#DEBUG=True
URL = 'http://vizier.u-strasbg.fr/viz-bin/VizieR'
XPATH = '//div[@id="CDScore"]/table[@class="sort"]'
def _prepare_request_data(self, catalog, ra, dec, radius):
return {'-source': self.CATALOGS[catalog],
'-out.add': '_r',
'-c.ra': ra,
'-c.dec': dec,
'-c.rs': radius}
def _post_process_table(self, table):
"""
First column contains references to full record on vizier - need
to correct the URL there.
"""
for element in table.xpath('//a[@class="full"]'):
element.attrib['href'] = 'http://vizier.u-strasbg.fr/viz-bin/' + \
element.attrib['href']
return table
| gpl-3.0 |
rajalokan/nova | nova/virt/vmwareapi/vim_util.py | 8 | 6020 | # Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The VMware API utility module.
"""
from oslo_vmware import vim_util as vutil
import nova.conf
CONF = nova.conf.CONF
def object_to_dict(obj, list_depth=1):
"""Convert Suds object into serializable format.
The calling function can limit the amount of list entries that
are converted.
"""
d = {}
for k, v in dict(obj).items():
if hasattr(v, '__keylist__'):
d[k] = object_to_dict(v, list_depth=list_depth)
elif isinstance(v, list):
d[k] = []
used = 0
for item in v:
used = used + 1
if used > list_depth:
break
if hasattr(item, '__keylist__'):
d[k].append(object_to_dict(item, list_depth=list_depth))
else:
d[k].append(item)
else:
d[k] = v
return d
def get_object_properties(vim, collector, mobj, type, properties):
"""Gets the properties of the Managed object specified."""
client_factory = vim.client.factory
if mobj is None:
return None
usecoll = collector
if usecoll is None:
usecoll = vim.service_content.propertyCollector
property_filter_spec = client_factory.create('ns0:PropertyFilterSpec')
property_spec = client_factory.create('ns0:PropertySpec')
property_spec.all = (properties is None or len(properties) == 0)
property_spec.pathSet = properties
property_spec.type = type
object_spec = client_factory.create('ns0:ObjectSpec')
object_spec.obj = mobj
object_spec.skip = False
property_filter_spec.propSet = [property_spec]
property_filter_spec.objectSet = [object_spec]
options = client_factory.create('ns0:RetrieveOptions')
options.maxObjects = CONF.vmware.maximum_objects
return vim.RetrievePropertiesEx(usecoll, specSet=[property_filter_spec],
options=options)
def get_objects(vim, type, properties_to_collect=None, all=False):
"""Gets the list of objects of the type specified."""
return vutil.get_objects(vim, type, CONF.vmware.maximum_objects,
properties_to_collect, all)
def get_inner_objects(vim, base_obj, path, inner_type,
properties_to_collect=None, all=False):
"""Gets the list of inner objects of the type specified."""
client_factory = vim.client.factory
base_type = base_obj._type
traversal_spec = vutil.build_traversal_spec(client_factory, 'inner',
base_type, path, False, [])
object_spec = vutil.build_object_spec(client_factory,
base_obj,
[traversal_spec])
property_spec = vutil.build_property_spec(client_factory, type_=inner_type,
properties_to_collect=properties_to_collect,
all_properties=all)
property_filter_spec = vutil.build_property_filter_spec(client_factory,
[property_spec], [object_spec])
options = client_factory.create('ns0:RetrieveOptions')
options.maxObjects = CONF.vmware.maximum_objects
return vim.RetrievePropertiesEx(
vim.service_content.propertyCollector,
specSet=[property_filter_spec], options=options)
def get_prop_spec(client_factory, spec_type, properties):
"""Builds the Property Spec Object."""
prop_spec = client_factory.create('ns0:PropertySpec')
prop_spec.type = spec_type
prop_spec.pathSet = properties
return prop_spec
def get_obj_spec(client_factory, obj, select_set=None):
"""Builds the Object Spec object."""
obj_spec = client_factory.create('ns0:ObjectSpec')
obj_spec.obj = obj
obj_spec.skip = False
if select_set is not None:
obj_spec.selectSet = select_set
return obj_spec
def get_prop_filter_spec(client_factory, obj_spec, prop_spec):
"""Builds the Property Filter Spec Object."""
prop_filter_spec = client_factory.create('ns0:PropertyFilterSpec')
prop_filter_spec.propSet = prop_spec
prop_filter_spec.objectSet = obj_spec
return prop_filter_spec
def get_properties_for_a_collection_of_objects(vim, type,
obj_list, properties):
"""Gets the list of properties for the collection of
objects of the type specified.
"""
client_factory = vim.client.factory
if len(obj_list) == 0:
return []
prop_spec = get_prop_spec(client_factory, type, properties)
lst_obj_specs = []
for obj in obj_list:
lst_obj_specs.append(get_obj_spec(client_factory, obj))
prop_filter_spec = get_prop_filter_spec(client_factory,
lst_obj_specs, [prop_spec])
options = client_factory.create('ns0:RetrieveOptions')
options.maxObjects = CONF.vmware.maximum_objects
return vim.RetrievePropertiesEx(
vim.service_content.propertyCollector,
specSet=[prop_filter_spec], options=options)
def get_about_info(vim):
"""Get the About Info from the service content."""
return vim.service_content.about
def get_entity_name(session, entity):
return session._call_method(vutil, 'get_object_property',
entity, 'name')
| apache-2.0 |
Rokt33r/electron | script/upload-node-headers.py | 141 | 4490 | #!/usr/bin/env python
import argparse
import glob
import os
import shutil
import sys
import tarfile
from lib.config import PLATFORM, get_target_arch, s3_config
from lib.util import execute, safe_mkdir, scoped_cwd, s3put
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
NODE_DIR = os.path.join(SOURCE_ROOT, 'vendor', 'node')
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
HEADERS_SUFFIX = [
'.h',
'.gypi',
]
HEADERS_DIRS = [
'src',
'deps/http_parser',
'deps/zlib',
'deps/uv',
'deps/npm',
'deps/mdb_v8',
]
HEADERS_FILES = [
'common.gypi',
'config.gypi',
]
def main():
safe_mkdir(DIST_DIR)
args = parse_args()
node_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version))
iojs_headers_dir = os.path.join(DIST_DIR, 'iojs-{0}'.format(args.version))
iojs2_headers_dir = os.path.join(DIST_DIR,
'iojs-{0}-headers'.format(args.version))
copy_headers(node_headers_dir)
create_header_tarball(node_headers_dir)
copy_headers(iojs_headers_dir)
create_header_tarball(iojs_headers_dir)
copy_headers(iojs2_headers_dir)
create_header_tarball(iojs2_headers_dir)
# Upload node's headers to S3.
bucket, access_key, secret_key = s3_config()
upload_node(bucket, access_key, secret_key, args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload sumsha file')
parser.add_argument('-v', '--version', help='Specify the version',
required=True)
return parser.parse_args()
def copy_headers(dist_headers_dir):
safe_mkdir(dist_headers_dir)
# Copy standard node headers from node. repository.
for include_path in HEADERS_DIRS:
abs_path = os.path.join(NODE_DIR, include_path)
for dirpath, _, filenames in os.walk(abs_path):
for filename in filenames:
extension = os.path.splitext(filename)[1]
if extension not in HEADERS_SUFFIX:
continue
copy_source_file(os.path.join(dirpath, filename), NODE_DIR,
dist_headers_dir)
for other_file in HEADERS_FILES:
copy_source_file(os.path.join(NODE_DIR, other_file), NODE_DIR,
dist_headers_dir)
# Copy V8 headers from chromium's repository.
src = os.path.join(SOURCE_ROOT, 'vendor', 'brightray', 'vendor', 'download',
'libchromiumcontent', 'src')
for dirpath, _, filenames in os.walk(os.path.join(src, 'v8')):
for filename in filenames:
extension = os.path.splitext(filename)[1]
if extension not in HEADERS_SUFFIX:
continue
copy_source_file(os.path.join(dirpath, filename), src,
os.path.join(dist_headers_dir, 'deps'))
def create_header_tarball(dist_headers_dir):
target = dist_headers_dir + '.tar.gz'
with scoped_cwd(DIST_DIR):
tarball = tarfile.open(name=target, mode='w:gz')
tarball.add(os.path.relpath(dist_headers_dir))
tarball.close()
def copy_source_file(source, start, destination):
relative = os.path.relpath(source, start=start)
final_destination = os.path.join(destination, relative)
safe_mkdir(os.path.dirname(final_destination))
shutil.copy2(source, final_destination)
def upload_node(bucket, access_key, secret_key, version):
with scoped_cwd(DIST_DIR):
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('node-*.tar.gz'))
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), glob.glob('iojs-*.tar.gz'))
if PLATFORM == 'win32':
if get_target_arch() == 'ia32':
node_lib = os.path.join(DIST_DIR, 'node.lib')
iojs_lib = os.path.join(DIST_DIR, 'win-x86', 'iojs.lib')
else:
node_lib = os.path.join(DIST_DIR, 'x64', 'node.lib')
iojs_lib = os.path.join(DIST_DIR, 'win-x64', 'iojs.lib')
safe_mkdir(os.path.dirname(node_lib))
safe_mkdir(os.path.dirname(iojs_lib))
# Copy atom.lib to node.lib and iojs.lib.
atom_lib = os.path.join(OUT_DIR, 'node.dll.lib')
shutil.copy2(atom_lib, node_lib)
shutil.copy2(atom_lib, iojs_lib)
# Upload the node.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [node_lib])
# Upload the iojs.lib.
s3put(bucket, access_key, secret_key, DIST_DIR,
'atom-shell/dist/{0}'.format(version), [iojs_lib])
if __name__ == '__main__':
sys.exit(main())
| mit |
WhySoGeeky/DroidPot | venv/lib/python2.7/site-packages/django/contrib/postgres/operations.py | 111 | 1298 | from django.contrib.postgres.signals import register_hstore_handler
from django.db.migrations.operations.base import Operation
class CreateExtension(Operation):
reversible = True
def __init__(self, name):
self.name = name
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute("CREATE EXTENSION IF NOT EXISTS %s" % self.name)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute("DROP EXTENSION %s" % self.name)
def describe(self):
return "Creates extension %s" % self.name
class HStoreExtension(CreateExtension):
def __init__(self):
self.name = 'hstore'
def database_forwards(self, app_label, schema_editor, from_state, to_state):
super(HStoreExtension, self).database_forwards(app_label, schema_editor, from_state, to_state)
# Register hstore straight away as it cannot be done before the
# extension is installed, a subsequent data migration would use the
# same connection
register_hstore_handler(schema_editor.connection)
class UnaccentExtension(CreateExtension):
def __init__(self):
self.name = 'unaccent'
| mit |
40123151ChengYu/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/tarfile.py | 728 | 88474 | #!/usr/bin/env python3
#-------------------------------------------------------------------
# tarfile.py
#-------------------------------------------------------------------
# Copyright (C) 2002 Lars Gustaebel <lars@gustaebel.de>
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
"""Read from and write to tar format archives.
"""
version = "0.9.0"
__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)"
__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"
__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
#---------
# Imports
#---------
import sys
import os
import io
import shutil
import stat
import time
import struct
import copy
import re
try:
import grp, pwd
except ImportError:
grp = pwd = None
# os.symlink on Windows prior to 6.0 raises NotImplementedError
symlink_exception = (AttributeError, NotImplementedError)
try:
# WindowsError (1314) will be raised if the caller does not hold the
# SeCreateSymbolicLinkPrivilege privilege
symlink_exception += (WindowsError,)
except NameError:
pass
# from tarfile import *
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
from builtins import open as _open # Since 'open' is TarFile.open
#---------------------------------------------------------
# tar constants
#---------------------------------------------------------
NUL = b"\0" # the null character
BLOCKSIZE = 512 # length of processing blocks
RECORDSIZE = BLOCKSIZE * 20 # length of records
GNU_MAGIC = b"ustar \0" # magic gnu tar string
POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
LENGTH_NAME = 100 # maximum length of a filename
LENGTH_LINK = 100 # maximum length of a linkname
LENGTH_PREFIX = 155 # maximum length of the prefix field
REGTYPE = b"0" # regular file
AREGTYPE = b"\0" # regular file
LNKTYPE = b"1" # link (inside tarfile)
SYMTYPE = b"2" # symbolic link
CHRTYPE = b"3" # character special device
BLKTYPE = b"4" # block special device
DIRTYPE = b"5" # directory
FIFOTYPE = b"6" # fifo special device
CONTTYPE = b"7" # contiguous file
GNUTYPE_LONGNAME = b"L" # GNU tar longname
GNUTYPE_LONGLINK = b"K" # GNU tar longlink
GNUTYPE_SPARSE = b"S" # GNU tar sparse file
XHDTYPE = b"x" # POSIX.1-2001 extended header
XGLTYPE = b"g" # POSIX.1-2001 global header
SOLARIS_XHDTYPE = b"X" # Solaris extended header
USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
GNU_FORMAT = 1 # GNU tar format
PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
DEFAULT_FORMAT = GNU_FORMAT
#---------------------------------------------------------
# tarfile constants
#---------------------------------------------------------
# File types that tarfile supports:
SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
SYMTYPE, DIRTYPE, FIFOTYPE,
CONTTYPE, CHRTYPE, BLKTYPE,
GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# File types that will be treated as a regular file.
REGULAR_TYPES = (REGTYPE, AREGTYPE,
CONTTYPE, GNUTYPE_SPARSE)
# File types that are part of the GNU tar format.
GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# Fields from a pax header that override a TarInfo attribute.
PAX_FIELDS = ("path", "linkpath", "size", "mtime",
"uid", "gid", "uname", "gname")
# Fields from a pax header that are affected by hdrcharset.
PAX_NAME_FIELDS = {"path", "linkpath", "uname", "gname"}
# Fields in a pax header that are numbers, all other fields
# are treated as strings.
PAX_NUMBER_FIELDS = {
"atime": float,
"ctime": float,
"mtime": float,
"uid": int,
"gid": int,
"size": int
}
#---------------------------------------------------------
# Bits used in the mode field, values in octal.
#---------------------------------------------------------
S_IFLNK = 0o120000 # symbolic link
S_IFREG = 0o100000 # regular file
S_IFBLK = 0o060000 # block device
S_IFDIR = 0o040000 # directory
S_IFCHR = 0o020000 # character device
S_IFIFO = 0o010000 # fifo
TSUID = 0o4000 # set UID on execution
TSGID = 0o2000 # set GID on execution
TSVTX = 0o1000 # reserved
TUREAD = 0o400 # read by owner
TUWRITE = 0o200 # write by owner
TUEXEC = 0o100 # execute/search by owner
TGREAD = 0o040 # read by group
TGWRITE = 0o020 # write by group
TGEXEC = 0o010 # execute/search by group
TOREAD = 0o004 # read by other
TOWRITE = 0o002 # write by other
TOEXEC = 0o001 # execute/search by other
#---------------------------------------------------------
# initialization
#---------------------------------------------------------
if os.name in ("nt", "ce"):
ENCODING = "utf-8"
else:
ENCODING = sys.getfilesystemencoding()
#---------------------------------------------------------
# Some useful functions
#---------------------------------------------------------
def stn(s, length, encoding, errors):
"""Convert a string to a null-terminated bytes object.
"""
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL
def nts(s, encoding, errors):
"""Convert a null-terminated bytes object to a string.
"""
p = s.find(b"\0")
if p != -1:
s = s[:p]
return s.decode(encoding, errors)
def nti(s):
"""Convert a number field to a python number.
"""
# There are two possible encodings for a number field, see
# itn() below.
if s[0] in (0o200, 0o377):
n = 0
for i in range(len(s) - 1):
n <<= 8
n += s[i + 1]
if s[0] == 0o377:
n = -(256 ** (len(s) - 1) - n)
else:
try:
n = int(nts(s, "ascii", "strict") or "0", 8)
except ValueError:
raise InvalidHeaderError("invalid header")
return n
def itn(n, digits=8, format=DEFAULT_FORMAT):
"""Convert a python number to a number field.
"""
# POSIX 1003.1-1988 requires numbers to be encoded as a string of
# octal digits followed by a null-byte, this allows values up to
# (8**(digits-1))-1. GNU tar allows storing numbers greater than
# that if necessary. A leading 0o200 or 0o377 byte indicate this
# particular encoding, the following digits-1 bytes are a big-endian
# base-256 representation. This allows values up to (256**(digits-1))-1.
# A 0o200 byte indicates a positive number, a 0o377 byte a negative
# number.
if 0 <= n < 8 ** (digits - 1):
s = bytes("%0*o" % (digits - 1, n), "ascii") + NUL
elif format == GNU_FORMAT and -256 ** (digits - 1) <= n < 256 ** (digits - 1):
if n >= 0:
s = bytearray([0o200])
else:
s = bytearray([0o377])
n = 256 ** digits + n
for i in range(digits - 1):
s.insert(1, n & 0o377)
n >>= 8
else:
raise ValueError("overflow in number field")
return s
def calc_chksums(buf):
"""Calculate the checksum for a member's header by summing up all
characters except for the chksum field which is treated as if
it was filled with spaces. According to the GNU tar sources,
some tars (Sun and NeXT) calculate chksum with signed char,
which will be different if there are chars in the buffer with
the high bit set. So we calculate two checksums, unsigned and
signed.
"""
unsigned_chksum = 256 + sum(struct.unpack_from("148B8x356B", buf))
signed_chksum = 256 + sum(struct.unpack_from("148b8x356b", buf))
return unsigned_chksum, signed_chksum
def copyfileobj(src, dst, length=None):
"""Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
"""
if length == 0:
return
if length is None:
shutil.copyfileobj(src, dst)
return
BUFSIZE = 16 * 1024
blocks, remainder = divmod(length, BUFSIZE)
for b in range(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
raise IOError("end of file reached")
dst.write(buf)
if remainder != 0:
buf = src.read(remainder)
if len(buf) < remainder:
raise IOError("end of file reached")
dst.write(buf)
return
def filemode(mode):
"""Deprecated in this location; use stat.filemode."""
import warnings
warnings.warn("deprecated in favor of stat.filemode",
DeprecationWarning, 2)
return stat.filemode(mode)
class TarError(Exception):
"""Base exception."""
pass
class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadable tar archives."""
pass
class CompressionError(TarError):
"""Exception for unavailable compression methods."""
pass
class StreamError(TarError):
"""Exception for unsupported operations on stream-like TarFiles."""
pass
class HeaderError(TarError):
"""Base exception for header errors."""
pass
class EmptyHeaderError(HeaderError):
"""Exception for empty headers."""
pass
class TruncatedHeaderError(HeaderError):
"""Exception for truncated headers."""
pass
class EOFHeaderError(HeaderError):
"""Exception for end of file headers."""
pass
class InvalidHeaderError(HeaderError):
"""Exception for invalid headers."""
pass
class SubsequentHeaderError(HeaderError):
"""Exception for missing and invalid extended headers."""
pass
#---------------------------
# internal stream interface
#---------------------------
class _LowLevelFile:
"""Low-level file object. Supports reading and writing.
It is used instead of a regular file object for streaming
access.
"""
def __init__(self, name, mode):
mode = {
"r": os.O_RDONLY,
"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
}[mode]
if hasattr(os, "O_BINARY"):
mode |= os.O_BINARY
self.fd = os.open(name, mode, 0o666)
def close(self):
os.close(self.fd)
def read(self, size):
return os.read(self.fd, size)
def write(self, s):
os.write(self.fd, s)
class _Stream:
"""Class that serves as an adapter between TarFile and
a stream-like object. The stream-like object only
needs to have a read() or write() method and is accessed
blockwise. Use of gzip or bzip2 compression is possible.
A stream-like object could be for example: sys.stdin,
sys.stdout, a socket, a tape device etc.
_Stream is intended to be used only internally.
"""
def __init__(self, name, mode, comptype, fileobj, bufsize):
"""Construct a _Stream object.
"""
self._extfileobj = True
if fileobj is None:
fileobj = _LowLevelFile(name, mode)
self._extfileobj = False
if comptype == '*':
# Enable transparent compression detection for the
# stream interface
fileobj = _StreamProxy(fileobj)
comptype = fileobj.getcomptype()
self.name = name or ""
self.mode = mode
self.comptype = comptype
self.fileobj = fileobj
self.bufsize = bufsize
self.buf = b""
self.pos = 0
self.closed = False
try:
if comptype == "gz":
try:
import zlib
except ImportError:
raise CompressionError("zlib module is not available")
self.zlib = zlib
self.crc = zlib.crc32(b"")
if mode == "r":
self._init_read_gz()
self.exception = zlib.error
else:
self._init_write_gz()
elif comptype == "bz2":
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if mode == "r":
self.dbuf = b""
self.cmp = bz2.BZ2Decompressor()
self.exception = IOError
else:
self.cmp = bz2.BZ2Compressor()
elif comptype == "xz":
try:
import lzma
except ImportError:
raise CompressionError("lzma module is not available")
if mode == "r":
self.dbuf = b""
self.cmp = lzma.LZMADecompressor()
self.exception = lzma.LZMAError
else:
self.cmp = lzma.LZMACompressor()
elif comptype != "tar":
raise CompressionError("unknown compression type %r" % comptype)
except:
if not self._extfileobj:
self.fileobj.close()
self.closed = True
raise
def __del__(self):
if hasattr(self, "closed") and not self.closed:
self.close()
def _init_write_gz(self):
"""Initialize for writing with gzip compression.
"""
self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
-self.zlib.MAX_WBITS,
self.zlib.DEF_MEM_LEVEL,
0)
timestamp = struct.pack("<L", int(time.time()))
self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
if self.name.endswith(".gz"):
self.name = self.name[:-3]
# RFC1952 says we must use ISO-8859-1 for the FNAME field.
self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
def write(self, s):
"""Write string s to the stream.
"""
if self.comptype == "gz":
self.crc = self.zlib.crc32(s, self.crc)
self.pos += len(s)
if self.comptype != "tar":
s = self.cmp.compress(s)
self.__write(s)
def __write(self, s):
"""Write string s to the stream if a whole new block
is ready to be written.
"""
self.buf += s
while len(self.buf) > self.bufsize:
self.fileobj.write(self.buf[:self.bufsize])
self.buf = self.buf[self.bufsize:]
def close(self):
"""Close the _Stream object. No operation should be
done on it afterwards.
"""
if self.closed:
return
if self.mode == "w" and self.comptype != "tar":
self.buf += self.cmp.flush()
if self.mode == "w" and self.buf:
self.fileobj.write(self.buf)
self.buf = b""
if self.comptype == "gz":
# The native zlib crc is an unsigned 32-bit integer, but
# the Python wrapper implicitly casts that to a signed C
# long. So, on a 32-bit box self.crc may "look negative",
# while the same crc on a 64-bit box may "look positive".
# To avoid irksome warnings from the `struct` module, force
# it to look positive on all boxes.
self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff))
self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def _init_read_gz(self):
"""Initialize for reading a gzip compressed fileobj.
"""
self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
self.dbuf = b""
# taken from gzip.GzipFile with some alterations
if self.__read(2) != b"\037\213":
raise ReadError("not a gzip file")
if self.__read(1) != b"\010":
raise CompressionError("unsupported compression method")
flag = ord(self.__read(1))
self.__read(6)
if flag & 4:
xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
self.read(xlen)
if flag & 8:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 16:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 2:
self.__read(2)
def tell(self):
"""Return the stream's file pointer position.
"""
return self.pos
def seek(self, pos=0):
"""Set the stream's file pointer to pos. Negative seeking
is forbidden.
"""
if pos - self.pos >= 0:
blocks, remainder = divmod(pos - self.pos, self.bufsize)
for i in range(blocks):
self.read(self.bufsize)
self.read(remainder)
else:
raise StreamError("seeking backwards is not allowed")
return self.pos
def read(self, size=None):
"""Return the next size number of bytes from the stream.
If size is not defined, return all bytes of the stream
up to EOF.
"""
if size is None:
t = []
while True:
buf = self._read(self.bufsize)
if not buf:
break
t.append(buf)
buf = "".join(t)
else:
buf = self._read(size)
self.pos += len(buf)
return buf
def _read(self, size):
"""Return size bytes from the stream.
"""
if self.comptype == "tar":
return self.__read(size)
c = len(self.dbuf)
while c < size:
buf = self.__read(self.bufsize)
if not buf:
break
try:
buf = self.cmp.decompress(buf)
except self.exception:
raise ReadError("invalid compressed data")
self.dbuf += buf
c += len(buf)
buf = self.dbuf[:size]
self.dbuf = self.dbuf[size:]
return buf
def __read(self, size):
"""Return size bytes from stream. If internal buffer is empty,
read another block from the stream.
"""
c = len(self.buf)
while c < size:
buf = self.fileobj.read(self.bufsize)
if not buf:
break
self.buf += buf
c += len(buf)
buf = self.buf[:size]
self.buf = self.buf[size:]
return buf
# class _Stream
class _StreamProxy(object):
"""Small proxy class that enables transparent compression
detection for the Stream interface (mode 'r|*').
"""
def __init__(self, fileobj):
self.fileobj = fileobj
self.buf = self.fileobj.read(BLOCKSIZE)
def read(self, size):
self.read = self.fileobj.read
return self.buf
def getcomptype(self):
if self.buf.startswith(b"\x1f\x8b\x08"):
return "gz"
elif self.buf[0:3] == b"BZh" and self.buf[4:10] == b"1AY&SY":
return "bz2"
elif self.buf.startswith((b"\x5d\x00\x00\x80", b"\xfd7zXZ")):
return "xz"
else:
return "tar"
def close(self):
self.fileobj.close()
# class StreamProxy
#------------------------
# Extraction file object
#------------------------
class _FileInFile(object):
"""A thin wrapper around an existing file object that
provides a part of its data as an individual file
object.
"""
def __init__(self, fileobj, offset, size, blockinfo=None):
self.fileobj = fileobj
self.offset = offset
self.size = size
self.position = 0
self.name = getattr(fileobj, "name", None)
self.closed = False
if blockinfo is None:
blockinfo = [(0, size)]
# Construct a map with data and zero blocks.
self.map_index = 0
self.map = []
lastpos = 0
realpos = self.offset
for offset, size in blockinfo:
if offset > lastpos:
self.map.append((False, lastpos, offset, None))
self.map.append((True, offset, offset + size, realpos))
realpos += size
lastpos = offset + size
if lastpos < self.size:
self.map.append((False, lastpos, self.size, None))
def flush(self):
pass
def readable(self):
return True
def writable(self):
return False
def seekable(self):
return self.fileobj.seekable()
def tell(self):
"""Return the current file position.
"""
return self.position
def seek(self, position, whence=io.SEEK_SET):
"""Seek to a position in the file.
"""
if whence == io.SEEK_SET:
self.position = min(max(position, 0), self.size)
elif whence == io.SEEK_CUR:
if position < 0:
self.position = max(self.position + position, 0)
else:
self.position = min(self.position + position, self.size)
elif whence == io.SEEK_END:
self.position = max(min(self.size + position, self.size), 0)
else:
raise ValueError("Invalid argument")
return self.position
def read(self, size=None):
"""Read data from the file.
"""
if size is None:
size = self.size - self.position
else:
size = min(size, self.size - self.position)
buf = b""
while size > 0:
while True:
data, start, stop, offset = self.map[self.map_index]
if start <= self.position < stop:
break
else:
self.map_index += 1
if self.map_index == len(self.map):
self.map_index = 0
length = min(size, stop - self.position)
if data:
self.fileobj.seek(offset + (self.position - start))
buf += self.fileobj.read(length)
else:
buf += NUL * length
size -= length
self.position += length
return buf
def readinto(self, b):
buf = self.read(len(b))
b[:len(buf)] = buf
return len(buf)
def close(self):
self.closed = True
#class _FileInFile
class ExFileObject(io.BufferedReader):
def __init__(self, tarfile, tarinfo):
fileobj = _FileInFile(tarfile.fileobj, tarinfo.offset_data,
tarinfo.size, tarinfo.sparse)
super().__init__(fileobj)
#class ExFileObject
#------------------
# Exported Classes
#------------------
class TarInfo(object):
"""Informational class which holds the details about an
archive member given by a tar header block.
TarInfo objects are returned by TarFile.getmember(),
TarFile.getmembers() and TarFile.gettarinfo() and are
usually created internally.
"""
__slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
"chksum", "type", "linkname", "uname", "gname",
"devmajor", "devminor",
"offset", "offset_data", "pax_headers", "sparse",
"tarfile", "_sparse_structs", "_link_target")
def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
of the member.
"""
self.name = name # member name
self.mode = 0o644 # file permissions
self.uid = 0 # user id
self.gid = 0 # group id
self.size = 0 # file size
self.mtime = 0 # modification time
self.chksum = 0 # header checksum
self.type = REGTYPE # member type
self.linkname = "" # link name
self.uname = "" # user name
self.gname = "" # group name
self.devmajor = 0 # device major number
self.devminor = 0 # device minor number
self.offset = 0 # the tar header starts here
self.offset_data = 0 # the file's data starts here
self.sparse = None # sparse member information
self.pax_headers = {} # pax header information
# In pax headers the "name" and "linkname" field are called
# "path" and "linkpath".
def _getpath(self):
return self.name
def _setpath(self, name):
self.name = name
path = property(_getpath, _setpath)
def _getlinkpath(self):
return self.linkname
def _setlinkpath(self, linkname):
self.linkname = linkname
linkpath = property(_getlinkpath, _setlinkpath)
def __repr__(self):
return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
def get_info(self):
"""Return the TarInfo's attributes as a dictionary.
"""
info = {
"name": self.name,
"mode": self.mode & 0o7777,
"uid": self.uid,
"gid": self.gid,
"size": self.size,
"mtime": self.mtime,
"chksum": self.chksum,
"type": self.type,
"linkname": self.linkname,
"uname": self.uname,
"gname": self.gname,
"devmajor": self.devmajor,
"devminor": self.devminor
}
if info["type"] == DIRTYPE and not info["name"].endswith("/"):
info["name"] += "/"
return info
def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
"""Return a tar header as a string of 512 byte blocks.
"""
info = self.get_info()
if format == USTAR_FORMAT:
return self.create_ustar_header(info, encoding, errors)
elif format == GNU_FORMAT:
return self.create_gnu_header(info, encoding, errors)
elif format == PAX_FORMAT:
return self.create_pax_header(info, encoding)
else:
raise ValueError("invalid format")
def create_ustar_header(self, info, encoding, errors):
"""Return the object as a ustar header block.
"""
info["magic"] = POSIX_MAGIC
if len(info["linkname"]) > LENGTH_LINK:
raise ValueError("linkname is too long")
if len(info["name"]) > LENGTH_NAME:
info["prefix"], info["name"] = self._posix_split_name(info["name"])
return self._create_header(info, USTAR_FORMAT, encoding, errors)
def create_gnu_header(self, info, encoding, errors):
"""Return the object as a GNU header block sequence.
"""
info["magic"] = GNU_MAGIC
buf = b""
if len(info["linkname"]) > LENGTH_LINK:
buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
if len(info["name"]) > LENGTH_NAME:
buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
def create_pax_header(self, info, encoding):
"""Return the object as a ustar header block. If it cannot be
represented this way, prepend a pax extended header sequence
with supplement information.
"""
info["magic"] = POSIX_MAGIC
pax_headers = self.pax_headers.copy()
# Test string fields for values that exceed the field length or cannot
# be represented in ASCII encoding.
for name, hname, length in (
("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
("uname", "uname", 32), ("gname", "gname", 32)):
if hname in pax_headers:
# The pax header has priority.
continue
# Try to encode the string as ASCII.
try:
info[name].encode("ascii", "strict")
except UnicodeEncodeError:
pax_headers[hname] = info[name]
continue
if len(info[name]) > length:
pax_headers[hname] = info[name]
# Test number fields for values that exceed the field limit or values
# that like to be stored as float.
for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
if name in pax_headers:
# The pax header has priority. Avoid overflow.
info[name] = 0
continue
val = info[name]
if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
pax_headers[name] = str(val)
info[name] = 0
# Create a pax extended header if necessary.
if pax_headers:
buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
else:
buf = b""
return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
@classmethod
def create_pax_global_header(cls, pax_headers):
"""Return the object as a pax global header block sequence.
"""
return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf-8")
def _posix_split_name(self, name):
"""Split a name longer than 100 chars into a prefix
and a name part.
"""
prefix = name[:LENGTH_PREFIX + 1]
while prefix and prefix[-1] != "/":
prefix = prefix[:-1]
name = name[len(prefix):]
prefix = prefix[:-1]
if not prefix or len(name) > LENGTH_NAME:
raise ValueError("name is too long")
return prefix, name
@staticmethod
def _create_header(info, format, encoding, errors):
"""Return a header block. info is a dictionary with file
information, format must be one of the *_FORMAT constants.
"""
parts = [
stn(info.get("name", ""), 100, encoding, errors),
itn(info.get("mode", 0) & 0o7777, 8, format),
itn(info.get("uid", 0), 8, format),
itn(info.get("gid", 0), 8, format),
itn(info.get("size", 0), 12, format),
itn(info.get("mtime", 0), 12, format),
b" ", # checksum field
info.get("type", REGTYPE),
stn(info.get("linkname", ""), 100, encoding, errors),
info.get("magic", POSIX_MAGIC),
stn(info.get("uname", ""), 32, encoding, errors),
stn(info.get("gname", ""), 32, encoding, errors),
itn(info.get("devmajor", 0), 8, format),
itn(info.get("devminor", 0), 8, format),
stn(info.get("prefix", ""), 155, encoding, errors)
]
buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
buf = buf[:-364] + bytes("%06o\0" % chksum, "ascii") + buf[-357:]
return buf
@staticmethod
def _create_payload(payload):
"""Return the string payload filled with zero bytes
up to the next 512 byte border.
"""
blocks, remainder = divmod(len(payload), BLOCKSIZE)
if remainder > 0:
payload += (BLOCKSIZE - remainder) * NUL
return payload
@classmethod
def _create_gnu_long_header(cls, name, type, encoding, errors):
"""Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
for name.
"""
name = name.encode(encoding, errors) + NUL
info = {}
info["name"] = "././@LongLink"
info["type"] = type
info["size"] = len(name)
info["magic"] = GNU_MAGIC
# create extended header + name blocks.
return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
cls._create_payload(name)
@classmethod
def _create_pax_generic_header(cls, pax_headers, type, encoding):
"""Return a POSIX.1-2008 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be strings.
"""
# Check if one of the fields contains surrogate characters and thereby
# forces hdrcharset=BINARY, see _proc_pax() for more information.
binary = False
for keyword, value in pax_headers.items():
try:
value.encode("utf-8", "strict")
except UnicodeEncodeError:
binary = True
break
records = b""
if binary:
# Put the hdrcharset field at the beginning of the header.
records += b"21 hdrcharset=BINARY\n"
for keyword, value in pax_headers.items():
keyword = keyword.encode("utf-8")
if binary:
# Try to restore the original byte representation of `value'.
# Needless to say, that the encoding must match the string.
value = value.encode(encoding, "surrogateescape")
else:
value = value.encode("utf-8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
cls._create_payload(records)
@classmethod
def frombuf(cls, buf, encoding, errors):
"""Construct a TarInfo object from a 512 byte bytes object.
"""
if len(buf) == 0:
raise EmptyHeaderError("empty header")
if len(buf) != BLOCKSIZE:
raise TruncatedHeaderError("truncated header")
if buf.count(NUL) == BLOCKSIZE:
raise EOFHeaderError("end of file header")
chksum = nti(buf[148:156])
if chksum not in calc_chksums(buf):
raise InvalidHeaderError("bad checksum")
obj = cls()
obj.name = nts(buf[0:100], encoding, errors)
obj.mode = nti(buf[100:108])
obj.uid = nti(buf[108:116])
obj.gid = nti(buf[116:124])
obj.size = nti(buf[124:136])
obj.mtime = nti(buf[136:148])
obj.chksum = chksum
obj.type = buf[156:157]
obj.linkname = nts(buf[157:257], encoding, errors)
obj.uname = nts(buf[265:297], encoding, errors)
obj.gname = nts(buf[297:329], encoding, errors)
obj.devmajor = nti(buf[329:337])
obj.devminor = nti(buf[337:345])
prefix = nts(buf[345:500], encoding, errors)
# Old V7 tar format represents a directory as a regular
# file with a trailing slash.
if obj.type == AREGTYPE and obj.name.endswith("/"):
obj.type = DIRTYPE
# The old GNU sparse format occupies some of the unused
# space in the buffer for up to 4 sparse structures.
# Save the them for later processing in _proc_sparse().
if obj.type == GNUTYPE_SPARSE:
pos = 386
structs = []
for i in range(4):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
structs.append((offset, numbytes))
pos += 24
isextended = bool(buf[482])
origsize = nti(buf[483:495])
obj._sparse_structs = (structs, isextended, origsize)
# Remove redundant slashes from directories.
if obj.isdir():
obj.name = obj.name.rstrip("/")
# Reconstruct a ustar longname.
if prefix and obj.type not in GNU_TYPES:
obj.name = prefix + "/" + obj.name
return obj
@classmethod
def fromtarfile(cls, tarfile):
"""Return the next TarInfo object from TarFile object
tarfile.
"""
buf = tarfile.fileobj.read(BLOCKSIZE)
obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
return obj._proc_member(tarfile)
#--------------------------------------------------------------------------
# The following are methods that are called depending on the type of a
# member. The entry point is _proc_member() which can be overridden in a
# subclass to add custom _proc_*() methods. A _proc_*() method MUST
# implement the following
# operations:
# 1. Set self.offset_data to the position where the data blocks begin,
# if there is data that follows.
# 2. Set tarfile.offset to the position where the next member's header will
# begin.
# 3. Return self or another valid TarInfo object.
def _proc_member(self, tarfile):
"""Choose the right processing method depending on
the type and call it.
"""
if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
return self._proc_gnulong(tarfile)
elif self.type == GNUTYPE_SPARSE:
return self._proc_sparse(tarfile)
elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
return self._proc_pax(tarfile)
else:
return self._proc_builtin(tarfile)
def _proc_builtin(self, tarfile):
"""Process a builtin type or an unknown type which
will be treated as a regular file.
"""
self.offset_data = tarfile.fileobj.tell()
offset = self.offset_data
if self.isreg() or self.type not in SUPPORTED_TYPES:
# Skip the following data blocks.
offset += self._block(self.size)
tarfile.offset = offset
# Patch the TarInfo object with saved global
# header information.
self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
return self
def _proc_gnulong(self, tarfile):
"""Process the blocks that hold a GNU longname
or longlink member.
"""
buf = tarfile.fileobj.read(self._block(self.size))
# Fetch the next header and process it.
try:
next = self.fromtarfile(tarfile)
except HeaderError:
raise SubsequentHeaderError("missing or bad subsequent header")
# Patch the TarInfo object from the next header with
# the longname information.
next.offset = self.offset
if self.type == GNUTYPE_LONGNAME:
next.name = nts(buf, tarfile.encoding, tarfile.errors)
elif self.type == GNUTYPE_LONGLINK:
next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
return next
def _proc_sparse(self, tarfile):
"""Process a GNU sparse header plus extra headers.
"""
# We already collected some sparse structures in frombuf().
structs, isextended, origsize = self._sparse_structs
del self._sparse_structs
# Collect sparse structures from extended header blocks.
while isextended:
buf = tarfile.fileobj.read(BLOCKSIZE)
pos = 0
for i in range(21):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
if offset and numbytes:
structs.append((offset, numbytes))
pos += 24
isextended = bool(buf[504])
self.sparse = structs
self.offset_data = tarfile.fileobj.tell()
tarfile.offset = self.offset_data + self._block(self.size)
self.size = origsize
return self
def _proc_pax(self, tarfile):
"""Process an extended or global header as described in
POSIX.1-2008.
"""
# Read the header information.
buf = tarfile.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == XGLTYPE:
pax_headers = tarfile.pax_headers
else:
pax_headers = tarfile.pax_headers.copy()
# Check if the pax header contains a hdrcharset field. This tells us
# the encoding of the path, linkpath, uname and gname fields. Normally,
# these fields are UTF-8 encoded but since POSIX.1-2008 tar
# implementations are allowed to store them as raw binary strings if
# the translation to UTF-8 fails.
match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
if match is not None:
pax_headers["hdrcharset"] = match.group(1).decode("utf-8")
# For the time being, we don't care about anything other than "BINARY".
# The only other value that is currently allowed by the standard is
# "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
hdrcharset = pax_headers.get("hdrcharset")
if hdrcharset == "BINARY":
encoding = tarfile.encoding
else:
encoding = "utf-8"
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(br"(\d+) ([^=]+)=")
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
# Normally, we could just use "utf-8" as the encoding and "strict"
# as the error handler, but we better not take the risk. For
# example, GNU tar <= 1.23 is known to store filenames it cannot
# translate to UTF-8 as raw strings (unfortunately without a
# hdrcharset=BINARY header).
# We first try the strict standard encoding, and if that fails we
# fall back on the user's encoding and error handler.
keyword = self._decode_pax_field(keyword, "utf-8", "utf-8",
tarfile.errors)
if keyword in PAX_NAME_FIELDS:
value = self._decode_pax_field(value, encoding, tarfile.encoding,
tarfile.errors)
else:
value = self._decode_pax_field(value, "utf-8", "utf-8",
tarfile.errors)
pax_headers[keyword] = value
pos += length
# Fetch the next header.
try:
next = self.fromtarfile(tarfile)
except HeaderError:
raise SubsequentHeaderError("missing or bad subsequent header")
# Process GNU sparse information.
if "GNU.sparse.map" in pax_headers:
# GNU extended sparse format version 0.1.
self._proc_gnusparse_01(next, pax_headers)
elif "GNU.sparse.size" in pax_headers:
# GNU extended sparse format version 0.0.
self._proc_gnusparse_00(next, pax_headers, buf)
elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
# GNU extended sparse format version 1.0.
self._proc_gnusparse_10(next, pax_headers, tarfile)
if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in SUPPORTED_TYPES:
offset += next._block(next.size)
tarfile.offset = offset
return next
def _proc_gnusparse_00(self, next, pax_headers, buf):
"""Process a GNU tar extended sparse header, version 0.0.
"""
offsets = []
for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
offsets.append(int(match.group(1)))
numbytes = []
for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
numbytes.append(int(match.group(1)))
next.sparse = list(zip(offsets, numbytes))
def _proc_gnusparse_01(self, next, pax_headers):
"""Process a GNU tar extended sparse header, version 0.1.
"""
sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
next.sparse = list(zip(sparse[::2], sparse[1::2]))
def _proc_gnusparse_10(self, next, pax_headers, tarfile):
"""Process a GNU tar extended sparse header, version 1.0.
"""
fields = None
sparse = []
buf = tarfile.fileobj.read(BLOCKSIZE)
fields, buf = buf.split(b"\n", 1)
fields = int(fields)
while len(sparse) < fields * 2:
if b"\n" not in buf:
buf += tarfile.fileobj.read(BLOCKSIZE)
number, buf = buf.split(b"\n", 1)
sparse.append(int(number))
next.offset_data = tarfile.fileobj.tell()
next.sparse = list(zip(sparse[::2], sparse[1::2]))
def _apply_pax_info(self, pax_headers, encoding, errors):
"""Replace fields with supplemental information from a previous
pax extended or global header.
"""
for keyword, value in pax_headers.items():
if keyword == "GNU.sparse.name":
setattr(self, "path", value)
elif keyword == "GNU.sparse.size":
setattr(self, "size", int(value))
elif keyword == "GNU.sparse.realsize":
setattr(self, "size", int(value))
elif keyword in PAX_FIELDS:
if keyword in PAX_NUMBER_FIELDS:
try:
value = PAX_NUMBER_FIELDS[keyword](value)
except ValueError:
value = 0
if keyword == "path":
value = value.rstrip("/")
setattr(self, keyword, value)
self.pax_headers = pax_headers.copy()
def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
"""Decode a single field from a pax record.
"""
try:
return value.decode(encoding, "strict")
except UnicodeDecodeError:
return value.decode(fallback_encoding, fallback_errors)
def _block(self, count):
"""Round up a byte count by BLOCKSIZE and return it,
e.g. _block(834) => 1024.
"""
blocks, remainder = divmod(count, BLOCKSIZE)
if remainder:
blocks += 1
return blocks * BLOCKSIZE
def isreg(self):
return self.type in REGULAR_TYPES
def isfile(self):
return self.isreg()
def isdir(self):
return self.type == DIRTYPE
def issym(self):
return self.type == SYMTYPE
def islnk(self):
return self.type == LNKTYPE
def ischr(self):
return self.type == CHRTYPE
def isblk(self):
return self.type == BLKTYPE
def isfifo(self):
return self.type == FIFOTYPE
def issparse(self):
return self.sparse is not None
def isdev(self):
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo
class TarFile(object):
"""The TarFile Class provides an interface to tar archives.
"""
debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
dereference = False # If true, add content of linked file to the
# tar file, else the link.
ignore_zeros = False # If true, skips empty or invalid blocks and
# continues processing.
errorlevel = 1 # If 0, fatal errors only appear in debug
# messages (if debug >= 0). If > 0, errors
# are passed to the caller as exceptions.
format = DEFAULT_FORMAT # The format to use when creating an archive.
encoding = ENCODING # Encoding for 8-bit character strings.
errors = None # Error handler for unicode conversion.
tarinfo = TarInfo # The default TarInfo class to use.
fileobject = ExFileObject # The file-object for extractfile().
def __init__(self, name=None, mode="r", fileobj=None, format=None,
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):
"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
read from an existing archive, 'a' to append data to an existing
file or 'w' to create a new file overwriting an existing one. `mode'
defaults to 'r'.
If `fileobj' is given, it is used for reading or writing data. If it
can be determined, `mode' is overridden by `fileobj's mode.
`fileobj' is not closed, when TarFile is closed.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
self.mode = mode
self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
if not fileobj:
if self.mode == "a" and not os.path.exists(name):
# Create nonexistent files in append mode.
self.mode = "w"
self._mode = "wb"
fileobj = bltn_open(name, self._mode)
self._extfileobj = False
else:
if name is None and hasattr(fileobj, "name"):
name = fileobj.name
if hasattr(fileobj, "mode"):
self._mode = fileobj.mode
self._extfileobj = True
self.name = os.path.abspath(name) if name else None
self.fileobj = fileobj
# Init attributes.
if format is not None:
self.format = format
if tarinfo is not None:
self.tarinfo = tarinfo
if dereference is not None:
self.dereference = dereference
if ignore_zeros is not None:
self.ignore_zeros = ignore_zeros
if encoding is not None:
self.encoding = encoding
self.errors = errors
if pax_headers is not None and self.format == PAX_FORMAT:
self.pax_headers = pax_headers
else:
self.pax_headers = {}
if debug is not None:
self.debug = debug
if errorlevel is not None:
self.errorlevel = errorlevel
# Init datastructures.
self.closed = False
self.members = [] # list of members as TarInfo objects
self._loaded = False # flag if all members have been read
self.offset = self.fileobj.tell()
# current position in the archive file
self.inodes = {} # dictionary caching the inodes of
# archive members already added
try:
if self.mode == "r":
self.firstmember = None
self.firstmember = self.next()
if self.mode == "a":
# Move to the end of the archive,
# before the first empty block.
while True:
self.fileobj.seek(self.offset)
try:
tarinfo = self.tarinfo.fromtarfile(self)
self.members.append(tarinfo)
except EOFHeaderError:
self.fileobj.seek(self.offset)
break
except HeaderError as e:
raise ReadError(str(e))
if self.mode in "aw":
self._loaded = True
if self.pax_headers:
buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
self.fileobj.write(buf)
self.offset += len(buf)
except:
if not self._extfileobj:
self.fileobj.close()
self.closed = True
raise
#--------------------------------------------------------------------------
# Below are the classmethods which act as alternate constructors to the
# TarFile class. The open() method is the only one that is needed for
# public use; it is the "super"-constructor and is able to select an
# adequate "sub"-constructor for a particular compression using the mapping
# from OPEN_METH.
#
# This concept allows one to subclass TarFile without losing the comfort of
# the super-constructor. A sub-constructor is registered and made available
# by adding it to the mapping in OPEN_METH.
@classmethod
def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
"""Open a tar archive for reading, writing or appending. Return
an appropriate TarFile class.
mode:
'r' or 'r:*' open for reading with transparent compression
'r:' open for reading exclusively uncompressed
'r:gz' open for reading with gzip compression
'r:bz2' open for reading with bzip2 compression
'r:xz' open for reading with lzma compression
'a' or 'a:' open for appending, creating the file if necessary
'w' or 'w:' open for writing without compression
'w:gz' open for writing with gzip compression
'w:bz2' open for writing with bzip2 compression
'w:xz' open for writing with lzma compression
'r|*' open a stream of tar blocks with transparent compression
'r|' open an uncompressed stream of tar blocks for reading
'r|gz' open a gzip compressed stream of tar blocks
'r|bz2' open a bzip2 compressed stream of tar blocks
'r|xz' open an lzma compressed stream of tar blocks
'w|' open an uncompressed stream for writing
'w|gz' open a gzip compressed stream for writing
'w|bz2' open a bzip2 compressed stream for writing
'w|xz' open an lzma compressed stream for writing
"""
if not name and not fileobj:
raise ValueError("nothing to open")
if mode in ("r", "r:*"):
# Find out which *open() is appropriate for opening the file.
for comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
if fileobj is not None:
saved_pos = fileobj.tell()
try:
return func(name, "r", fileobj, **kwargs)
except (ReadError, CompressionError) as e:
if fileobj is not None:
fileobj.seek(saved_pos)
continue
raise ReadError("file could not be opened successfully")
elif ":" in mode:
filemode, comptype = mode.split(":", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
# Select the *open() function according to
# given compression.
if comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
else:
raise CompressionError("unknown compression type %r" % comptype)
return func(name, filemode, fileobj, **kwargs)
elif "|" in mode:
filemode, comptype = mode.split("|", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
if filemode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
stream = _Stream(name, filemode, comptype, fileobj, bufsize)
try:
t = cls(name, filemode, stream, **kwargs)
except:
stream.close()
raise
t._extfileobj = False
return t
elif mode in "aw":
return cls.taropen(name, mode, fileobj, **kwargs)
raise ValueError("undiscernible mode")
@classmethod
def taropen(cls, name, mode="r", fileobj=None, **kwargs):
"""Open uncompressed tar archive name for reading or writing.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
return cls(name, mode, fileobj, **kwargs)
@classmethod
def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open gzip compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
try:
import gzip
gzip.GzipFile
except (ImportError, AttributeError):
raise CompressionError("gzip module is not available")
extfileobj = fileobj is not None
try:
fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
t = cls.taropen(name, mode, fileobj, **kwargs)
except IOError:
if not extfileobj and fileobj is not None:
fileobj.close()
if fileobj is None:
raise
raise ReadError("not a gzip file")
except:
if not extfileobj and fileobj is not None:
fileobj.close()
raise
t._extfileobj = extfileobj
return t
@classmethod
def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open bzip2 compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'.")
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
fileobj = bz2.BZ2File(fileobj or name, mode,
compresslevel=compresslevel)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
except (IOError, EOFError):
fileobj.close()
raise ReadError("not a bzip2 file")
t._extfileobj = False
return t
@classmethod
def xzopen(cls, name, mode="r", fileobj=None, preset=None, **kwargs):
"""Open lzma compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if mode not in ("r", "w"):
raise ValueError("mode must be 'r' or 'w'")
try:
import lzma
except ImportError:
raise CompressionError("lzma module is not available")
fileobj = lzma.LZMAFile(fileobj or name, mode, preset=preset)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
except (lzma.LZMAError, EOFError):
fileobj.close()
raise ReadError("not an lzma file")
t._extfileobj = False
return t
# All *open() methods are registered here.
OPEN_METH = {
"tar": "taropen", # uncompressed tar
"gz": "gzopen", # gzip compressed tar
"bz2": "bz2open", # bzip2 compressed tar
"xz": "xzopen" # lzma compressed tar
}
#--------------------------------------------------------------------------
# The public methods which TarFile provides:
def close(self):
"""Close the TarFile. In write-mode, two finishing zero blocks are
appended to the archive.
"""
if self.closed:
return
if self.mode in "aw":
self.fileobj.write(NUL * (BLOCKSIZE * 2))
self.offset += (BLOCKSIZE * 2)
# fill up the end with zero-blocks
# (like option -b20 for tar does)
blocks, remainder = divmod(self.offset, RECORDSIZE)
if remainder > 0:
self.fileobj.write(NUL * (RECORDSIZE - remainder))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def getmember(self, name):
"""Return a TarInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version.
"""
tarinfo = self._getmember(name)
if tarinfo is None:
raise KeyError("filename %r not found" % name)
return tarinfo
def getmembers(self):
"""Return the members of the archive as a list of TarInfo objects. The
list has the same order as the members in the archive.
"""
self._check()
if not self._loaded: # if we want to obtain a list of
self._load() # all members, we first have to
# scan the whole archive.
return self.members
def getnames(self):
"""Return the members of the archive as a list of their names. It has
the same order as the list returned by getmembers().
"""
return [tarinfo.name for tarinfo in self.getmembers()]
def gettarinfo(self, name=None, arcname=None, fileobj=None):
"""Create a TarInfo object for either the file `name' or the file
object `fileobj' (using os.fstat on its file descriptor). You can
modify some of the TarInfo's attributes before you add it using
addfile(). If given, `arcname' specifies an alternative name for the
file in the archive.
"""
self._check("aw")
# When fileobj is given, replace name by
# fileobj's real name.
if fileobj is not None:
name = fileobj.name
# Building the name of the member in the archive.
# Backward slashes are converted to forward slashes,
# Absolute paths are turned to relative paths.
if arcname is None:
arcname = name
drv, arcname = os.path.splitdrive(arcname)
arcname = arcname.replace(os.sep, "/")
arcname = arcname.lstrip("/")
# Now, fill the TarInfo object with
# information specific for the file.
tarinfo = self.tarinfo()
tarinfo.tarfile = self
# Use os.stat or os.lstat, depending on platform
# and if symlinks shall be resolved.
if fileobj is None:
if hasattr(os, "lstat") and not self.dereference:
statres = os.lstat(name)
else:
statres = os.stat(name)
else:
statres = os.fstat(fileobj.fileno())
linkname = ""
stmd = statres.st_mode
if stat.S_ISREG(stmd):
inode = (statres.st_ino, statres.st_dev)
if not self.dereference and statres.st_nlink > 1 and \
inode in self.inodes and arcname != self.inodes[inode]:
# Is it a hardlink to an already
# archived file?
type = LNKTYPE
linkname = self.inodes[inode]
else:
# The inode is added only if its valid.
# For win32 it is always 0.
type = REGTYPE
if inode[0]:
self.inodes[inode] = arcname
elif stat.S_ISDIR(stmd):
type = DIRTYPE
elif stat.S_ISFIFO(stmd):
type = FIFOTYPE
elif stat.S_ISLNK(stmd):
type = SYMTYPE
linkname = os.readlink(name)
elif stat.S_ISCHR(stmd):
type = CHRTYPE
elif stat.S_ISBLK(stmd):
type = BLKTYPE
else:
return None
# Fill the TarInfo object with all
# information we can get.
tarinfo.name = arcname
tarinfo.mode = stmd
tarinfo.uid = statres.st_uid
tarinfo.gid = statres.st_gid
if type == REGTYPE:
tarinfo.size = statres.st_size
else:
tarinfo.size = 0
tarinfo.mtime = statres.st_mtime
tarinfo.type = type
tarinfo.linkname = linkname
if pwd:
try:
tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
except KeyError:
pass
if grp:
try:
tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
except KeyError:
pass
if type in (CHRTYPE, BLKTYPE):
if hasattr(os, "major") and hasattr(os, "minor"):
tarinfo.devmajor = os.major(statres.st_rdev)
tarinfo.devminor = os.minor(statres.st_rdev)
return tarinfo
def list(self, verbose=True):
"""Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced.
"""
self._check()
for tarinfo in self:
if verbose:
print(stat.filemode(tarinfo.mode), end=' ')
print("%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid), end=' ')
if tarinfo.ischr() or tarinfo.isblk():
print("%10s" % ("%d,%d" \
% (tarinfo.devmajor, tarinfo.devminor)), end=' ')
else:
print("%10d" % tarinfo.size, end=' ')
print("%d-%02d-%02d %02d:%02d:%02d" \
% time.localtime(tarinfo.mtime)[:6], end=' ')
print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')
if verbose:
if tarinfo.issym():
print("->", tarinfo.linkname, end=' ')
if tarinfo.islnk():
print("link to", tarinfo.linkname, end=' ')
print()
def add(self, name, arcname=None, recursive=True, exclude=None, *, filter=None):
"""Add the file `name' to the archive. `name' may be any type of file
(directory, fifo, symbolic link, etc.). If given, `arcname'
specifies an alternative name for the file in the archive.
Directories are added recursively by default. This can be avoided by
setting `recursive' to False. `exclude' is a function that should
return True for each filename to be excluded. `filter' is a function
that expects a TarInfo object argument and returns the changed
TarInfo object, if it returns None the TarInfo object will be
excluded from the archive.
"""
self._check("aw")
if arcname is None:
arcname = name
# Exclude pathnames.
if exclude is not None:
import warnings
warnings.warn("use the filter argument instead",
DeprecationWarning, 2)
if exclude(name):
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Skip if somebody tries to archive the archive...
if self.name is not None and os.path.abspath(name) == self.name:
self._dbg(2, "tarfile: Skipped %r" % name)
return
self._dbg(1, name)
# Create a TarInfo object from the file.
tarinfo = self.gettarinfo(name, arcname)
if tarinfo is None:
self._dbg(1, "tarfile: Unsupported type %r" % name)
return
# Change or exclude the TarInfo object.
if filter is not None:
tarinfo = filter(tarinfo)
if tarinfo is None:
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Append the tar header and data to the archive.
if tarinfo.isreg():
with bltn_open(name, "rb") as f:
self.addfile(tarinfo, f)
elif tarinfo.isdir():
self.addfile(tarinfo)
if recursive:
for f in os.listdir(name):
self.add(os.path.join(name, f), os.path.join(arcname, f),
recursive, exclude, filter=filter)
else:
self.addfile(tarinfo)
def addfile(self, tarinfo, fileobj=None):
"""Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
given, tarinfo.size bytes are read from it and added to the archive.
You can create TarInfo objects using gettarinfo().
On Windows platforms, `fileobj' should always be opened with mode
'rb' to avoid irritation about the file size.
"""
self._check("aw")
tarinfo = copy.copy(tarinfo)
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
self.fileobj.write(buf)
self.offset += len(buf)
# If there's data to follow, append it.
if fileobj is not None:
copyfileobj(fileobj, self.fileobj, tarinfo.size)
blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
if remainder > 0:
self.fileobj.write(NUL * (BLOCKSIZE - remainder))
blocks += 1
self.offset += blocks * BLOCKSIZE
self.members.append(tarinfo)
def extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 0o700
# Do not set_attrs directories, as we will do that further down
self.extract(tarinfo, path, set_attrs=not tarinfo.isdir())
# Reverse sort directories.
directories.sort(key=lambda a: a.name)
directories.reverse()
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extract(self, member, path="", set_attrs=True):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'. File attributes (owner,
mtime, mode) are set unless `set_attrs' is False.
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
# Prepare the link target for makelink().
if tarinfo.islnk():
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs)
except EnvironmentError as e:
if self.errorlevel > 0:
raise
else:
if e.filename is None:
self._dbg(1, "tarfile: %s" % e.strerror)
else:
self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
a filename or a TarInfo object. If `member' is a regular file or a
link, an io.BufferedReader object is returned. Otherwise, None is
returned.
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
# Members with unknown types are treated as regular files.
return self.fileobject(self, tarinfo)
elif tarinfo.islnk() or tarinfo.issym():
if isinstance(self.fileobj, _Stream):
# A small but ugly workaround for the case that someone tries
# to extract a (sym)link as a file-object from a non-seekable
# stream of tar blocks.
raise StreamError("cannot extract (sym)link as file object")
else:
# A (sym)link's file object is its target's file object.
return self.extractfile(self._find_link_target(tarinfo))
else:
# If there's no data associated with the member (directory, chrdev,
# blkdev, etc.), return None instead of a file object.
return None
def _extract_member(self, tarinfo, targetpath, set_attrs=True):
"""Extract the TarInfo object tarinfo to a physical
file called targetpath.
"""
# Fetch the TarInfo object for the given name
# and build the destination pathname, replacing
# forward slashes to platform specific separators.
targetpath = targetpath.rstrip("/")
targetpath = targetpath.replace("/", os.sep)
# Create all upper directories.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
# Create directories that are not part of the archive with
# default permissions.
os.makedirs(upperdirs)
if tarinfo.islnk() or tarinfo.issym():
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
else:
self._dbg(1, tarinfo.name)
if tarinfo.isreg():
self.makefile(tarinfo, targetpath)
elif tarinfo.isdir():
self.makedir(tarinfo, targetpath)
elif tarinfo.isfifo():
self.makefifo(tarinfo, targetpath)
elif tarinfo.ischr() or tarinfo.isblk():
self.makedev(tarinfo, targetpath)
elif tarinfo.islnk() or tarinfo.issym():
self.makelink(tarinfo, targetpath)
elif tarinfo.type not in SUPPORTED_TYPES:
self.makeunknown(tarinfo, targetpath)
else:
self.makefile(tarinfo, targetpath)
if set_attrs:
self.chown(tarinfo, targetpath)
if not tarinfo.issym():
self.chmod(tarinfo, targetpath)
self.utime(tarinfo, targetpath)
#--------------------------------------------------------------------------
# Below are the different file methods. They are called via
# _extract_member() when extract() is called. They can be replaced in a
# subclass to implement other functionality.
def makedir(self, tarinfo, targetpath):
"""Make a directory called targetpath.
"""
try:
# Use a safe mode for the directory, the real mode is set
# later in _extract_member().
os.mkdir(targetpath, 0o700)
except FileExistsError:
pass
def makefile(self, tarinfo, targetpath):
"""Make a file called targetpath.
"""
source = self.fileobj
source.seek(tarinfo.offset_data)
with bltn_open(targetpath, "wb") as target:
if tarinfo.sparse is not None:
for offset, size in tarinfo.sparse:
target.seek(offset)
copyfileobj(source, target, size)
else:
copyfileobj(source, target, tarinfo.size)
target.seek(tarinfo.size)
target.truncate()
def makeunknown(self, tarinfo, targetpath):
"""Make a file from a TarInfo object with an unknown type
at targetpath.
"""
self.makefile(tarinfo, targetpath)
self._dbg(1, "tarfile: Unknown file type %r, " \
"extracted as regular file." % tarinfo.type)
def makefifo(self, tarinfo, targetpath):
"""Make a fifo called targetpath.
"""
if hasattr(os, "mkfifo"):
os.mkfifo(targetpath)
else:
raise ExtractError("fifo not supported by system")
def makedev(self, tarinfo, targetpath):
"""Make a character or block device called targetpath.
"""
if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
raise ExtractError("special devices not supported by system")
mode = tarinfo.mode
if tarinfo.isblk():
mode |= stat.S_IFBLK
else:
mode |= stat.S_IFCHR
os.mknod(targetpath, mode,
os.makedev(tarinfo.devmajor, tarinfo.devminor))
def makelink(self, tarinfo, targetpath):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
"""
try:
# For systems that support symbolic and hard links.
if tarinfo.issym():
os.symlink(tarinfo.linkname, targetpath)
else:
# See extract().
if os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath)
else:
self._extract_member(self._find_link_target(tarinfo),
targetpath)
except symlink_exception:
try:
self._extract_member(self._find_link_target(tarinfo),
targetpath)
except KeyError:
raise ExtractError("unable to resolve link inside archive")
def chown(self, tarinfo, targetpath):
"""Set owner of targetpath according to tarinfo.
"""
if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
# We have to be root to do so.
try:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
g = tarinfo.gid
try:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
u = tarinfo.uid
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
if sys.platform != "os2emx":
os.chown(targetpath, u, g)
except EnvironmentError as e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
except EnvironmentError as e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
"""
if not hasattr(os, 'utime'):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
except EnvironmentError as e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
def next(self):
"""Return the next member of the archive as a TarInfo object, when
TarFile is opened for reading. Return None if there is no more
available.
"""
self._check("ra")
if self.firstmember is not None:
m = self.firstmember
self.firstmember = None
return m
# Read the next block.
self.fileobj.seek(self.offset)
tarinfo = None
while True:
try:
tarinfo = self.tarinfo.fromtarfile(self)
except EOFHeaderError as e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
except InvalidHeaderError as e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
elif self.offset == 0:
raise ReadError(str(e))
except EmptyHeaderError:
if self.offset == 0:
raise ReadError("empty file")
except TruncatedHeaderError as e:
if self.offset == 0:
raise ReadError(str(e))
except SubsequentHeaderError as e:
raise ReadError(str(e))
break
if tarinfo is not None:
self.members.append(tarinfo)
else:
self._loaded = True
return tarinfo
#--------------------------------------------------------------------------
# Little helper methods:
def _getmember(self, name, tarinfo=None, normalize=False):
"""Find an archive member by name from bottom to top.
If tarinfo is given, it is used as the starting point.
"""
# Ensure that all members have been loaded.
members = self.getmembers()
# Limit the member search list up to tarinfo.
if tarinfo is not None:
members = members[:members.index(tarinfo)]
if normalize:
name = os.path.normpath(name)
for member in reversed(members):
if normalize:
member_name = os.path.normpath(member.name)
else:
member_name = member.name
if name == member_name:
return member
def _load(self):
"""Read through the entire archive file and look for readable
members.
"""
while True:
tarinfo = self.next()
if tarinfo is None:
break
self._loaded = True
def _check(self, mode=None):
"""Check if TarFile is still open, and if the operation's mode
corresponds to TarFile's mode.
"""
if self.closed:
raise IOError("%s is closed" % self.__class__.__name__)
if mode is not None and self.mode not in mode:
raise IOError("bad operation for mode %r" % self.mode)
def _find_link_target(self, tarinfo):
"""Find the target member of a symlink or hardlink member in the
archive.
"""
if tarinfo.issym():
# Always search the entire archive.
linkname = "/".join(filter(None, (os.path.dirname(tarinfo.name), tarinfo.linkname)))
limit = None
else:
# Search the archive before the link, because a hard link is
# just a reference to an already archived file.
linkname = tarinfo.linkname
limit = tarinfo
member = self._getmember(linkname, tarinfo=limit, normalize=True)
if member is None:
raise KeyError("linkname %r not found" % linkname)
return member
def __iter__(self):
"""Provide an iterator object.
"""
if self._loaded:
return iter(self.members)
else:
return TarIter(self)
def _dbg(self, level, msg):
"""Write debugging output to sys.stderr.
"""
if level <= self.debug:
print(msg, file=sys.stderr)
def __enter__(self):
self._check()
return self
def __exit__(self, type, value, traceback):
if type is None:
self.close()
else:
# An exception occurred. We must not call close() because
# it would try to write end-of-archive blocks and padding.
if not self._extfileobj:
self.fileobj.close()
self.closed = True
# class TarFile
class TarIter:
"""Iterator Class.
for tarinfo in TarFile(...):
suite...
"""
def __init__(self, tarfile):
"""Construct a TarIter object.
"""
self.tarfile = tarfile
self.index = 0
def __iter__(self):
"""Return iterator object.
"""
return self
def __next__(self):
"""Return the next item using TarFile's next() method.
When all members have been read, set TarFile as _loaded.
"""
# Fix for SF #1100429: Under rare circumstances it can
# happen that getmembers() is called during iteration,
# which will cause TarIter to stop prematurely.
if self.index == 0 and self.tarfile.firstmember is not None:
tarinfo = self.tarfile.next()
elif self.index < len(self.tarfile.members):
tarinfo = self.tarfile.members[self.index]
elif not self.tarfile._loaded:
tarinfo = self.tarfile.next()
if not tarinfo:
self.tarfile._loaded = True
raise StopIteration
else:
raise StopIteration
self.index += 1
return tarinfo
#--------------------
# exported functions
#--------------------
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
"""
try:
t = open(name)
t.close()
return True
except TarError:
return False
bltn_open = open
open = TarFile.open
| gpl-2.0 |
ESSolutions/ESSArch_Core | ESSArch_Core/install/install_default_config.py | 1 | 31143 | """
ESSArch is an open source archiving and digital preservation system
ESSArch
Copyright (C) 2005-2019 ES Solutions AB
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Contact information:
Web - http://www.essolutions.se
Email - essarch@essolutions.se
"""
import click
import django
django.setup()
from pydoc import locate # noqa isort:skip
from django.conf import settings # noqa isort:skip
from django.contrib.auth import get_user_model # noqa isort:skip
from django.contrib.auth.models import Permission # noqa isort:skip
from groups_manager.models import GroupType # noqa isort:skip
from elasticsearch.client import IngestClient # noqa isort:skip
from elasticsearch_dsl.connections import get_connection # noqa isort:skip
from ESSArch_Core.search import alias_migration # noqa isort:skip
from ESSArch_Core.auth.models import Group, GroupMemberRole # noqa isort:skip
from ESSArch_Core.configuration.models import EventType, Feature, Parameter, Path, Site, StoragePolicy # noqa isort:skip
from ESSArch_Core.storage.models import ( # noqa isort:skip
DISK,
StorageMedium,
StorageMethod,
StorageMethodTargetRelation,
StorageTarget,
)
User = get_user_model()
def installDefaultConfiguration():
print("Installing event types...")
installDefaultEventTypes()
installDefaultFeatures()
print("Installing parameters...")
installDefaultParameters()
print("Installing site...")
installDefaultSite()
print("Installing users, groups and permissions...")
installDefaultUsers()
print("\nInstalling paths...")
installDefaultPaths()
print("\nInstalling storage policies...")
installDefaultStoragePolicies()
print("\nInstalling storage methods...")
installDefaultStorageMethods()
print("\nInstalling storage targets...")
installDefaultStorageTargets()
print("\nInstalling storage method target relations...")
installDefaultStorageMethodTargetRelations()
print("\nInstalling Elasticsearch pipelines...")
installPipelines()
print("\nInstalling search indices...")
installSearchIndices()
return 0
def installDefaultFeatures():
click.echo('Installing default features:')
features = [
{
'name': 'archival descriptions',
'enabled': True,
},
{
'name': 'receive',
'enabled': True,
},
{
'name': 'transfer',
'enabled': False,
},
]
for feature in features:
click.secho('- {}... '.format(feature['name']), nl=False)
f, _ = Feature.objects.get_or_create(
name=feature['name'],
defaults={
'enabled': feature['enabled'],
'description': feature.get('description', ''),
}
)
click.secho('enabled' if f.enabled else 'disabled', fg='green' if f.enabled else 'red')
return
def installDefaultEventTypes():
ip_cat = EventType.CATEGORY_INFORMATION_PACKAGE
delivery_cat = EventType.CATEGORY_DELIVERY
dct = {
'Prepared IP': {'eventType': '10100', 'category': ip_cat},
'Created IP root directory': {'eventType': '10200', 'category': ip_cat},
'Created physical model': {'eventType': '10300', 'category': ip_cat},
'Created SIP': {'eventType': '10400', 'category': ip_cat},
'Submitted SIP': {'eventType': '10500', 'category': ip_cat},
'Delivery received': {'eventType': '20100', 'category': delivery_cat},
'Delivery checked': {'eventType': '20200', 'category': delivery_cat},
'Delivery registered': {'eventType': '20300', 'category': delivery_cat},
'Delivery registered in journal system': {'eventType': '20310', 'category': delivery_cat},
'Delivery registered in archival information system': {'eventType': '20320', 'category': delivery_cat},
'Delivery receipt sent': {'eventType': '20400', 'category': delivery_cat},
'Delivery ready for hand over': {'eventType': '20500', 'category': delivery_cat},
'Delivery transferred': {'eventType': '20600', 'category': delivery_cat},
'Delivery approved': {'eventType': '20700', 'category': delivery_cat},
'Delivery rejected': {'eventType': '20800', 'category': delivery_cat},
'Received the IP for long-term preservation': {'eventType': '30000', 'category': ip_cat},
'Verified IP against archive information system': {'eventType': '30100', 'category': ip_cat},
'Verified IP is approved for long-term preservation': {'eventType': '30110', 'category': ip_cat},
'Created AIP': {'eventType': '30200', 'category': ip_cat},
'Preserved AIP': {'eventType': '30300', 'category': ip_cat},
'Cached AIP': {'eventType': '30310', 'category': ip_cat},
'Removed the source to the SIP': {'eventType': '30400', 'category': ip_cat},
'Removed the source to the AIP': {'eventType': '30410', 'category': ip_cat},
'Ingest order completed': {'eventType': '30500', 'category': ip_cat},
'Ingest order accepted': {'eventType': '30510', 'category': ip_cat},
'Ingest order requested': {'eventType': '30520', 'category': ip_cat},
'Created DIP': {'eventType': '30600', 'category': ip_cat},
'DIP order requested': {'eventType': '30610', 'category': ip_cat},
'DIP order accepted': {'eventType': '30620', 'category': ip_cat},
'DIP order completed': {'eventType': '30630', 'category': ip_cat},
'Moved to workarea': {'eventType': '30700', 'category': ip_cat},
'Moved from workarea': {'eventType': '30710', 'category': ip_cat},
'Moved to gate from workarea': {'eventType': '30720', 'category': ip_cat},
'Unmounted the tape from drive in robot': {'eventType': '40100', 'category': ip_cat},
'Mounted the tape in drive in robot': {'eventType': '40200', 'category': ip_cat},
'Deactivated storage medium': {'eventType': '40300', 'category': ip_cat},
'Quick media verification order requested': {'eventType': '40400', 'category': ip_cat},
'Quick media verification order accepted': {'eventType': '40410', 'category': ip_cat},
'Quick media verification order completed': {'eventType': '40420', 'category': ip_cat},
'Storage medium delivered': {'eventType': '40500', 'category': ip_cat},
'Storage medium received': {'eventType': '40510', 'category': ip_cat},
'Storage medium placed': {'eventType': '40520', 'category': ip_cat},
'Storage medium collected': {'eventType': '40530', 'category': ip_cat},
'Storage medium robot': {'eventType': '40540', 'category': ip_cat},
'Data written to disk storage method': {'eventType': '40600', 'category': ip_cat},
'Data read from disk storage method': {'eventType': '40610', 'category': ip_cat},
'Data written to tape storage method': {'eventType': '40700', 'category': ip_cat},
'Data read from tape storage method': {'eventType': '40710', 'category': ip_cat},
'Calculated checksum ': {'eventType': '50000', 'category': ip_cat},
'Identified format': {'eventType': '50100', 'category': ip_cat},
'Validated file format': {'eventType': '50200', 'category': ip_cat},
'Validated XML file': {'eventType': '50210', 'category': ip_cat},
'Validated logical representation against physical representation': {'eventType': '50220', 'category': ip_cat},
'Validated checksum': {'eventType': '50230', 'category': ip_cat},
'Compared XML files': {'eventType': '50240', 'category': ip_cat},
'Virus control done': {'eventType': '50300', 'category': ip_cat},
'Created TAR': {'eventType': '50400', 'category': ip_cat},
'Created ZIP': {'eventType': '50410', 'category': ip_cat},
'Updated IP status': {'eventType': '50500', 'category': ip_cat},
'Updated IP path': {'eventType': '50510', 'category': ip_cat},
'Generated XML file': {'eventType': '50600', 'category': ip_cat},
'Appended events': {'eventType': '50610', 'category': ip_cat},
'Copied schemas': {'eventType': '50620', 'category': ip_cat},
'Parsed events file': {'eventType': '50630', 'category': ip_cat},
'Uploaded file': {'eventType': '50700', 'category': ip_cat},
'Deleted files': {'eventType': '50710', 'category': ip_cat},
'Unpacked object': {'eventType': '50720', 'category': ip_cat},
'Converted RES to PREMIS': {'eventType': '50730', 'category': ip_cat},
'Deleted IP': {'eventType': '50740', 'category': ip_cat},
'Conversion': {'eventType': '50750', 'category': ip_cat},
'Action tool': {'eventType': '50760', 'category': ip_cat},
}
for key, val in dct.items():
print('-> %s: %s' % (key, val['eventType']))
EventType.objects.update_or_create(
eventType=val['eventType'],
defaults={
'eventDetail': key,
'category': val['category'],
},
)
return 0
def installDefaultParameters():
site_name = 'Site-X'
dct = {
'agent_identifier_type': 'ESS',
'agent_identifier_value': 'ESS',
'event_identifier_type': 'ESS',
'linking_agent_identifier_type': 'ESS',
'linking_object_identifier_type': 'ESS',
'object_identifier_type': 'ESS',
'related_object_identifier_type': 'ESS',
'site_name': site_name,
'medium_location': 'Media_%s' % site_name,
}
for key in dct:
print('-> %s: %s' % (key, dct[key]))
Parameter.objects.get_or_create(entity=key, defaults={'value': dct[key]})
return 0
def installDefaultSite():
Site.objects.get_or_create(name='ESSArch')
def installDefaultUsers():
#####################################
# Groups and permissions
organization, _ = GroupType.objects.get_or_create(label="organization")
default_org, _ = Group.objects.get_or_create(name='Default', group_type=organization)
role_user, _ = GroupMemberRole.objects.get_or_create(codename='user')
permission_list_user = [
# ---- app: ip ---- model: informationpackage
['view_informationpackage', 'ip', 'informationpackage'], # Can view information packages
['add_informationpackage', 'ip', 'informationpackage'], # Can add Information Package
['delete_informationpackage', 'ip', 'informationpackage'], # Can delete Information Package (Ingest)
['can_upload', 'ip', 'informationpackage'], # Can upload files to IP
['set_uploaded', 'ip', 'informationpackage'], # Can set IP as uploaded
['create_sip', 'ip', 'informationpackage'], # Can create SIP
['submit_sip', 'ip', 'informationpackage'], # Can submit SIP
['prepare_ip', 'ip', 'informationpackage'], # Can prepare IP
['receive', 'ip', 'informationpackage'], # Can receive IP
['preserve', 'ip', 'informationpackage'], # Can preserve IP (Ingest)
['preserve_dip', 'ip', 'informationpackage'], # Can preserve DIP (Access)
['get_from_storage', 'ip', 'informationpackage'], # Can get extracted IP from storage (Access)
['get_tar_from_storage', 'ip', 'informationpackage'], # Can get packaged IP from storage (Access)
['add_to_ingest_workarea', 'ip', 'informationpackage'], # Can add IP to ingest workarea "readonly" (Ing)
['diff-check', 'ip', 'informationpackage'], # Can diff-check IP (?)
# ---- app: ip ---- model: workarea
['move_from_ingest_workarea', 'ip', 'workarea'], # Can move IP from ingest workarea (Ingest)
['move_from_access_workarea', 'ip', 'workarea'], # Can move IP from access workarea (Access)
['preserve_from_ingest_workarea', 'ip', 'workarea'], # Can preserve IP from ingest workarea (Ingest)
['preserve_from_access_workarea', 'ip', 'workarea'], # Can preserve IP from access workarea (Access)
# ---- app: ip ---- model: order
['prepare_order', 'ip', 'order'], # Can prepare order (Access)
# ---- app: tags ---- model: Tag
['search', 'tags', 'tag'], # Can search
['transfer_sip', 'ip', 'informationpackage'], # Can transfer SIP
# ---- app: WorkflowEngine ---- model: processtask
# ['can_retry','WorkflowEngine','processtask'], # Can retry tasks (other)
]
for p in permission_list_user:
p_obj = Permission.objects.get(
codename=p[0], content_type__app_label=p[1],
content_type__model=p[2],
)
role_user.permissions.add(p_obj)
role_admin, _ = GroupMemberRole.objects.get_or_create(codename='admin')
permission_list_admin = [
# ---- app: profiles ---- model: submissionagreement
['add_submissionagreement', 'profiles', 'submissionagreement'], # Can add Submission Agreement (Import)
['change_submissionagreement', 'profiles', 'submissionagreement'], # Can change Submission Agreement
# ---- app: profiles ---- model: profile
['add_profile', 'profiles', 'profile'], # Can add Profile (Import/Administration)
['change_profile', 'profiles', 'profile'], # Can change Profile
# ---- app: WorkflowEngine ---- model: processtask
# ['can_retry','WorkflowEngine','processtask'], # Can retry tasks (other)
# ---- app: ip ---- model: informationpackage
['get_from_storage_as_new', 'ip', 'informationpackage'], # Can get IP "as new" from storage (Access)
['add_to_ingest_workarea_as_new', 'ip', 'informationpackage'],
# Can add IP as new generation to ingest workarea (Ingest)
# ---- app: ip ---- model: order
['prepare_order', 'ip', 'order'], # Can prepare order (Access)
# ---- app: storage ---- model: storageobject
['storage_migration', 'storage', 'storageobject'], # Storage migration (Administration)
['storage_maintenance', 'storage', 'storageobject'], # Storage maintenance (Administration)
['storage_management', 'storage', 'storageobject'], # Storage management (Administration)
# ---- app: maintenance ---- model: AppraisalTemplate
['add_appraisaltemplate', 'maintenance', 'appraisaltemplate'], # Can add appraisal template (Admin)
['change_appraisaltemplate', 'maintenance', 'appraisaltemplate'], # Can change appraisal template (Admin)
# ---- app: maintenance ---- model: ConversionRule
['add_conversiontemplate', 'maintenance', 'conversiontemplate'], # Can add conversion template (Admin)
['change_conversiontemplate', 'maintenance', 'conversiontemplate'], # Can change conversion template (Admin)
# ---- app: tags ---- model: Tag
['create_archive', 'tags', 'tag'], # Can create archives
]
for p in permission_list_admin:
p_obj = Permission.objects.get(
codename=p[0], content_type__app_label=p[1],
content_type__model=p[2],
)
role_admin.permissions.add(p_obj)
role_sysadmin, _ = GroupMemberRole.objects.get_or_create(codename='sysadmin')
permission_list_sysadmin = [
# ---- app: auth ---- model: group
['add_group', 'auth', 'group'], # Can add group
['change_group', 'auth', 'group'], # Can change group
['delete_group', 'auth', 'group'], # Can delete group
['view_group', 'auth', 'group'], # Can view group
# ---- app: auth ---- model: user
['add_user', 'auth', 'user'], # Can add user
['change_user', 'auth', 'user'], # Can change user
['delete_user', 'auth', 'user'], # Can delete user
['view_user', 'auth', 'user'], # Can view user
# ---- app: essauth ---- model: grouomemberrole
['add_groupmemberrole', 'essauth', 'groupmemberrole'], # Can add role
['change_groupmemberrole', 'essauth', 'groupmemberrole'], # Can change role
['delete_groupmemberrole', 'essauth', 'groupmemberrole'], # Can delete role
['assign_groupmemberrole', 'essauth', 'groupmemberrole'], # Can assign role
# ---- app: configuration ---- model: parameter
['add_parameter', 'configuration', 'parameter'], # Can add parameter
['change_parameter', 'configuration', 'parameter'], # Can change parameter
['delete_parameter', 'configuration', 'parameter'], # Can delete parameter
# ---- app: configuration ---- model: path
['add_path', 'configuration', 'path'], # Can add path
['change_path', 'configuration', 'path'], # Can change path
['delete_path', 'configuration', 'path'], # Can delete path
# ---- app: configuration ---- model: eventtype
['add_eventtype', 'configuration', 'eventtype'], # Can add eventtype
['change_eventtype', 'configuration', 'eventtype'], # Can change eventtype
['delete_eventtype', 'configuration', 'eventtype'], # Can delete eventtype
# ---- app: profiles ---- model: profile
['add_profile', 'profiles', 'profile'], # Can add profile
['change_profile', 'profiles', 'profile'], # Can change profile
['delete_profile', 'profiles', 'profile'], # Can delete profile
# ---- app: profiles ---- model: submissionagreement
['add_submissionagreement', 'profiles', 'submissionagreement'], # Can add submissionagreement
['change_submissionagreement', 'profiles', 'submissionagreement'], # Can change submissionagreement
['delete_submissionagreement', 'profiles', 'submissionagreement'], # Can delete submissionagreement
# ---- app: groups_manager ---- model: grouptype
['add_grouptype', 'groups_manager', 'grouptype'], # Can add grouptype
['change_grouptype', 'groups_manager', 'grouptype'], # Can change grouptype
['delete_grouptype', 'groups_manager', 'grouptype'], # Can delete grouptype
# ---- app: configuration ---- model: storagepolicy
['add_storagepolicy', 'configuration', 'storagepolicy'], # Can add storagepolicy
['change_storagepolicy', 'configuration', 'storagepolicy'], # Can change storagepolicy
['delete_storagepolicy', 'configuration', 'storagepolicy'], # Can delete storagepolicy
# ---- app: storage ---- model: storagemethod
['add_storagemethod', 'storage', 'storagemethod'], # Can add storagemethod
['change_storagemethod', 'storage', 'storagemethod'], # Can change storagemethod
['delete_storagemethod', 'storage', 'storagemethod'], # Can delete storagemethod
# ---- app: storage ---- model: storagetarget
['add_storagetarget', 'storage', 'storagetarget'], # Can add storagetarget
['change_storagetarget', 'storage', 'storagetarget'], # Can change storagetarget
['delete_storagetarget', 'storage', 'storagetarget'], # Can delete storagetarget
# ---- app: storage ---- model: storagemethodtargetrelation
[
'add_storagemethodtargetrelation', 'storage',
'storagemethodtargetrelation'
], # Can add storagemethodtargetrelation
[
'change_storagemethodtargetrelation', 'storage',
'storagemethodtargetrelation'
], # Can change storagemethodtargetrelation
[
'delete_storagemethodtargetrelation', 'storage',
'storagemethodtargetrelation'
], # Can delete storagemethodtargetrelation
# ---- app: storage ---- model: storageobject
['storage_migration', 'storage', 'storageobject'], # Storage migration (Administration)
['storage_maintenance', 'storage', 'storageobject'], # Storage maintenance (Administration)
['storage_management', 'storage', 'storageobject'], # Storage management (Administration)
# ---- app: storage ---- model: ioqueue
['change_ioqueue', 'storage', 'ioqueue'], # Can change ioqueue
['delete_ioqueue', 'storage', 'ioqueue'], # Can delete ioqueue
# ---- app: storage ---- model: robot
['add_robot', 'storage', 'robot'], # Can add robot
['change_robot', 'storage', 'robot'], # Can change robot
['delete_robot', 'storage', 'robot'], # Can delete robot
# ---- app: storage ---- model: robotqueue
['change_robotqueue', 'storage', 'robotqueue'], # Can change robotqueue
['delete_robotqueue', 'storage', 'robotqueue'], # Can delete robotqueue
# ---- app: storage ---- model: tapedrive
['add_tapedrive', 'storage', 'tapedrive'], # Can add tapedrive
['change_tapedrive', 'storage', 'tapedrive'], # Can change tapedrive
['delete_tapedrive', 'storage', 'tapedrive'], # Can delete tapedrive
]
for p in permission_list_sysadmin:
p_obj = Permission.objects.get(
codename=p[0], content_type__app_label=p[1],
content_type__model=p[2],
)
role_sysadmin.permissions.add(p_obj)
#####################################
# Users
user_superuser, created = User.objects.get_or_create(
first_name='superuser', last_name='Lastname',
username='superuser', email='superuser@essolutions.se',
)
if created:
user_superuser.set_password('superuser')
user_superuser.is_staff = True
user_superuser.is_superuser = True
user_superuser.save()
default_org.add_member(user_superuser.essauth_member)
user_user, created = User.objects.get_or_create(
first_name='user', last_name='Lastname',
username='user', email='user@essolutions.se'
)
if created:
user_user.set_password('user')
user_user.save()
default_org.add_member(user_user.essauth_member, roles=[role_user])
user_admin, created = User.objects.get_or_create(
first_name='admin', last_name='Lastname',
username='admin', email='admin@essolutions.se',
)
if created:
user_admin.set_password('admin')
user_admin.is_staff = True
user_admin.save()
default_org.add_member(user_admin.essauth_member, roles=[role_user, role_admin])
user_sysadmin, created = User.objects.get_or_create(
first_name='sysadmin', last_name='Lastname',
username='sysadmin', email='sysadmin@essolutions.se',
)
if created:
user_sysadmin.set_password('sysadmin')
user_sysadmin.is_staff = True
user_sysadmin.save()
default_org.add_member(user_sysadmin.essauth_member, roles=[role_sysadmin])
return 0
def installDefaultPaths():
dct = {
'mimetypes_definitionfile': '/ESSArch/config/mime.types',
'preingest': '/ESSArch/data/preingest/packages',
'preingest_reception': '/ESSArch/data/preingest/reception',
'ingest': '/ESSArch/data/ingest/packages',
'ingest_reception': '/ESSArch/data/ingest/reception',
'ingest_transfer': '/ESSArch/data/ingest/transfer',
'ingest_unidentified': '/ESSArch/data/ingest/uip',
'access_workarea': '/ESSArch/data/workspace',
'ingest_workarea': '/ESSArch/data/workspace',
'disseminations': '/ESSArch/data/disseminations',
'orders': '/ESSArch/data/orders',
'verify': '/ESSArch/data/verify',
'temp': '/ESSArch/data/temp',
'appraisal_reports': '/ESSArch/data/reports/appraisal',
'conversion_reports': '/ESSArch/data/reports/conversion',
'receipts': '/ESSArch/data/receipts',
}
for key in dct:
print('-> %s: %s' % (key, dct[key]))
Path.objects.get_or_create(entity=key, defaults={'value': dct[key]})
return 0
def installDefaultStoragePolicies():
cache_method, created_cache_method = StorageMethod.objects.get_or_create(
name='Default Cache Storage Method',
defaults={
'enabled': True,
'type': DISK,
'containers': False,
}
)
if created_cache_method:
cache_target, created_cache_target = StorageTarget.objects.get_or_create(
name='Default Cache Storage Target 1',
defaults={
'status': True,
'type': DISK,
'target': '/ESSArch/data/store/cache',
}
)
if created_cache_target:
StorageMedium.objects.get_or_create(
medium_id='Default Cache Disk 1',
defaults={
'storage_target': cache_target,
'status': 20,
'location': Parameter.objects.get(entity='medium_location').value,
'location_status': 50,
'block_size': cache_target.default_block_size,
'format': cache_target.default_format,
'agent': Parameter.objects.get(entity='agent_identifier_value').value,
}
)
StorageMethodTargetRelation.objects.create(
name='Default Cache Storage Method Target Relation 1',
status=True,
storage_method=cache_method,
storage_target=cache_target,
)
ingest = Path.objects.get(entity='ingest')
policy, created_policy = StoragePolicy.objects.get_or_create(
policy_id='1',
defaults={
'checksum_algorithm': StoragePolicy.MD5,
'policy_name': 'default',
'cache_storage': cache_method, 'ingest_path': ingest,
'receive_extract_sip': True,
'cache_minimum_capacity': 0,
'cache_maximum_age': 0,
}
)
if created_policy or created_cache_method:
policy.storage_methods.add(cache_method)
return 0
def installDefaultStorageMethods():
sm1, _ = StorageMethod.objects.get_or_create(
name='Default Storage Method 1',
defaults={
'enabled': True,
'type': DISK,
'containers': False,
}
)
sm2, _ = StorageMethod.objects.get_or_create(
name='Default Long-term Storage Method 1',
defaults={
'enabled': True,
'type': DISK,
'containers': True,
}
)
default_policy = StoragePolicy.objects.get(policy_name='default')
default_policy.storage_methods.add(sm1, sm2)
return 0
def installDefaultStorageTargets():
target, created = StorageTarget.objects.get_or_create(
name='Default Storage Target 1',
defaults={
'status': True,
'type': DISK,
'target': '/ESSArch/data/store/disk1',
}
)
if created:
StorageMedium.objects.get_or_create(
medium_id='Default Storage Disk 1',
defaults={
'storage_target': target,
'status': 20,
'location': Parameter.objects.get(entity='medium_location').value,
'location_status': 50,
'block_size': target.default_block_size,
'format': target.default_format,
'agent': Parameter.objects.get(entity='agent_identifier_value').value,
}
)
target, created = StorageTarget.objects.get_or_create(
name='Default Long-term Storage Target 1',
defaults={
'status': True,
'type': DISK,
'target': '/ESSArch/data/store/longterm_disk1',
}
)
if created:
StorageMedium.objects.get_or_create(
medium_id='Default Long-term Storage Disk 1',
defaults={
'storage_target': target,
'status': 20,
'location': Parameter.objects.get(entity='medium_location').value,
'location_status': 50,
'block_size': target.default_block_size,
'format': target.default_format,
'agent': Parameter.objects.get(entity='agent_identifier_value').value,
}
)
return 0
def installDefaultStorageMethodTargetRelations():
StorageMethodTargetRelation.objects.get_or_create(
name='Default Storage Method Target Relation 1',
storage_method=StorageMethod.objects.get(name='Default Storage Method 1'),
storage_target=StorageTarget.objects.get(name='Default Storage Target 1'),
defaults={
'status': True,
}
)
StorageMethodTargetRelation.objects.get_or_create(
name='Default Long-term Storage Method Target Relation 1',
storage_method=StorageMethod.objects.get(name='Default Long-term Storage Method 1'),
storage_target=StorageTarget.objects.get(name='Default Long-term Storage Target 1'),
defaults={
'status': True,
}
)
return 0
def installPipelines():
conn = get_connection()
client = IngestClient(conn)
client.put_pipeline(id='ingest_attachment', body={
'description': "Extract attachment information",
'processors': [
{
"attachment": {
"field": "data",
"indexed_chars": "-1"
},
"remove": {
"field": "data"
}
}
]
})
client.put_pipeline(id='add_timestamp', body={
'description': "Adds an index_date timestamp",
'processors': [
{
"set": {
"field": "index_date",
"value": "{{_ingest.timestamp}}",
},
},
]
})
def installSearchIndices():
for _index_name, index_class in settings.ELASTICSEARCH_INDEXES['default'].items():
doctype = locate(index_class)
alias_migration.setup_index(doctype)
print('done')
if __name__ == '__main__':
installDefaultConfiguration()
| gpl-3.0 |
Zord13appdesa/python-for-android | python3-alpha/python3-src/Lib/msilib/schema.py | 48 | 81587 | from . import Table
_Validation = Table('_Validation')
_Validation.add_field(1,'Table',11552)
_Validation.add_field(2,'Column',11552)
_Validation.add_field(3,'Nullable',3332)
_Validation.add_field(4,'MinValue',4356)
_Validation.add_field(5,'MaxValue',4356)
_Validation.add_field(6,'KeyTable',7679)
_Validation.add_field(7,'KeyColumn',5378)
_Validation.add_field(8,'Category',7456)
_Validation.add_field(9,'Set',7679)
_Validation.add_field(10,'Description',7679)
ActionText = Table('ActionText')
ActionText.add_field(1,'Action',11592)
ActionText.add_field(2,'Description',7936)
ActionText.add_field(3,'Template',7936)
AdminExecuteSequence = Table('AdminExecuteSequence')
AdminExecuteSequence.add_field(1,'Action',11592)
AdminExecuteSequence.add_field(2,'Condition',7679)
AdminExecuteSequence.add_field(3,'Sequence',5378)
Condition = Table('Condition')
Condition.add_field(1,'Feature_',11558)
Condition.add_field(2,'Level',9474)
Condition.add_field(3,'Condition',7679)
AdminUISequence = Table('AdminUISequence')
AdminUISequence.add_field(1,'Action',11592)
AdminUISequence.add_field(2,'Condition',7679)
AdminUISequence.add_field(3,'Sequence',5378)
AdvtExecuteSequence = Table('AdvtExecuteSequence')
AdvtExecuteSequence.add_field(1,'Action',11592)
AdvtExecuteSequence.add_field(2,'Condition',7679)
AdvtExecuteSequence.add_field(3,'Sequence',5378)
AdvtUISequence = Table('AdvtUISequence')
AdvtUISequence.add_field(1,'Action',11592)
AdvtUISequence.add_field(2,'Condition',7679)
AdvtUISequence.add_field(3,'Sequence',5378)
AppId = Table('AppId')
AppId.add_field(1,'AppId',11558)
AppId.add_field(2,'RemoteServerName',7679)
AppId.add_field(3,'LocalService',7679)
AppId.add_field(4,'ServiceParameters',7679)
AppId.add_field(5,'DllSurrogate',7679)
AppId.add_field(6,'ActivateAtStorage',5378)
AppId.add_field(7,'RunAsInteractiveUser',5378)
AppSearch = Table('AppSearch')
AppSearch.add_field(1,'Property',11592)
AppSearch.add_field(2,'Signature_',11592)
Property = Table('Property')
Property.add_field(1,'Property',11592)
Property.add_field(2,'Value',3840)
BBControl = Table('BBControl')
BBControl.add_field(1,'Billboard_',11570)
BBControl.add_field(2,'BBControl',11570)
BBControl.add_field(3,'Type',3378)
BBControl.add_field(4,'X',1282)
BBControl.add_field(5,'Y',1282)
BBControl.add_field(6,'Width',1282)
BBControl.add_field(7,'Height',1282)
BBControl.add_field(8,'Attributes',4356)
BBControl.add_field(9,'Text',7986)
Billboard = Table('Billboard')
Billboard.add_field(1,'Billboard',11570)
Billboard.add_field(2,'Feature_',3366)
Billboard.add_field(3,'Action',7474)
Billboard.add_field(4,'Ordering',5378)
Feature = Table('Feature')
Feature.add_field(1,'Feature',11558)
Feature.add_field(2,'Feature_Parent',7462)
Feature.add_field(3,'Title',8000)
Feature.add_field(4,'Description',8191)
Feature.add_field(5,'Display',5378)
Feature.add_field(6,'Level',1282)
Feature.add_field(7,'Directory_',7496)
Feature.add_field(8,'Attributes',1282)
Binary = Table('Binary')
Binary.add_field(1,'Name',11592)
Binary.add_field(2,'Data',2304)
BindImage = Table('BindImage')
BindImage.add_field(1,'File_',11592)
BindImage.add_field(2,'Path',7679)
File = Table('File')
File.add_field(1,'File',11592)
File.add_field(2,'Component_',3400)
File.add_field(3,'FileName',4095)
File.add_field(4,'FileSize',260)
File.add_field(5,'Version',7496)
File.add_field(6,'Language',7444)
File.add_field(7,'Attributes',5378)
File.add_field(8,'Sequence',1282)
CCPSearch = Table('CCPSearch')
CCPSearch.add_field(1,'Signature_',11592)
CheckBox = Table('CheckBox')
CheckBox.add_field(1,'Property',11592)
CheckBox.add_field(2,'Value',7488)
Class = Table('Class')
Class.add_field(1,'CLSID',11558)
Class.add_field(2,'Context',11552)
Class.add_field(3,'Component_',11592)
Class.add_field(4,'ProgId_Default',7679)
Class.add_field(5,'Description',8191)
Class.add_field(6,'AppId_',7462)
Class.add_field(7,'FileTypeMask',7679)
Class.add_field(8,'Icon_',7496)
Class.add_field(9,'IconIndex',5378)
Class.add_field(10,'DefInprocHandler',7456)
Class.add_field(11,'Argument',7679)
Class.add_field(12,'Feature_',3366)
Class.add_field(13,'Attributes',5378)
Component = Table('Component')
Component.add_field(1,'Component',11592)
Component.add_field(2,'ComponentId',7462)
Component.add_field(3,'Directory_',3400)
Component.add_field(4,'Attributes',1282)
Component.add_field(5,'Condition',7679)
Component.add_field(6,'KeyPath',7496)
Icon = Table('Icon')
Icon.add_field(1,'Name',11592)
Icon.add_field(2,'Data',2304)
ProgId = Table('ProgId')
ProgId.add_field(1,'ProgId',11775)
ProgId.add_field(2,'ProgId_Parent',7679)
ProgId.add_field(3,'Class_',7462)
ProgId.add_field(4,'Description',8191)
ProgId.add_field(5,'Icon_',7496)
ProgId.add_field(6,'IconIndex',5378)
ComboBox = Table('ComboBox')
ComboBox.add_field(1,'Property',11592)
ComboBox.add_field(2,'Order',9474)
ComboBox.add_field(3,'Value',3392)
ComboBox.add_field(4,'Text',8000)
CompLocator = Table('CompLocator')
CompLocator.add_field(1,'Signature_',11592)
CompLocator.add_field(2,'ComponentId',3366)
CompLocator.add_field(3,'Type',5378)
Complus = Table('Complus')
Complus.add_field(1,'Component_',11592)
Complus.add_field(2,'ExpType',13570)
Directory = Table('Directory')
Directory.add_field(1,'Directory',11592)
Directory.add_field(2,'Directory_Parent',7496)
Directory.add_field(3,'DefaultDir',4095)
Control = Table('Control')
Control.add_field(1,'Dialog_',11592)
Control.add_field(2,'Control',11570)
Control.add_field(3,'Type',3348)
Control.add_field(4,'X',1282)
Control.add_field(5,'Y',1282)
Control.add_field(6,'Width',1282)
Control.add_field(7,'Height',1282)
Control.add_field(8,'Attributes',4356)
Control.add_field(9,'Property',7474)
Control.add_field(10,'Text',7936)
Control.add_field(11,'Control_Next',7474)
Control.add_field(12,'Help',7986)
Dialog = Table('Dialog')
Dialog.add_field(1,'Dialog',11592)
Dialog.add_field(2,'HCentering',1282)
Dialog.add_field(3,'VCentering',1282)
Dialog.add_field(4,'Width',1282)
Dialog.add_field(5,'Height',1282)
Dialog.add_field(6,'Attributes',4356)
Dialog.add_field(7,'Title',8064)
Dialog.add_field(8,'Control_First',3378)
Dialog.add_field(9,'Control_Default',7474)
Dialog.add_field(10,'Control_Cancel',7474)
ControlCondition = Table('ControlCondition')
ControlCondition.add_field(1,'Dialog_',11592)
ControlCondition.add_field(2,'Control_',11570)
ControlCondition.add_field(3,'Action',11570)
ControlCondition.add_field(4,'Condition',11775)
ControlEvent = Table('ControlEvent')
ControlEvent.add_field(1,'Dialog_',11592)
ControlEvent.add_field(2,'Control_',11570)
ControlEvent.add_field(3,'Event',11570)
ControlEvent.add_field(4,'Argument',11775)
ControlEvent.add_field(5,'Condition',15871)
ControlEvent.add_field(6,'Ordering',5378)
CreateFolder = Table('CreateFolder')
CreateFolder.add_field(1,'Directory_',11592)
CreateFolder.add_field(2,'Component_',11592)
CustomAction = Table('CustomAction')
CustomAction.add_field(1,'Action',11592)
CustomAction.add_field(2,'Type',1282)
CustomAction.add_field(3,'Source',7496)
CustomAction.add_field(4,'Target',7679)
DrLocator = Table('DrLocator')
DrLocator.add_field(1,'Signature_',11592)
DrLocator.add_field(2,'Parent',15688)
DrLocator.add_field(3,'Path',15871)
DrLocator.add_field(4,'Depth',5378)
DuplicateFile = Table('DuplicateFile')
DuplicateFile.add_field(1,'FileKey',11592)
DuplicateFile.add_field(2,'Component_',3400)
DuplicateFile.add_field(3,'File_',3400)
DuplicateFile.add_field(4,'DestName',8191)
DuplicateFile.add_field(5,'DestFolder',7496)
Environment = Table('Environment')
Environment.add_field(1,'Environment',11592)
Environment.add_field(2,'Name',4095)
Environment.add_field(3,'Value',8191)
Environment.add_field(4,'Component_',3400)
Error = Table('Error')
Error.add_field(1,'Error',9474)
Error.add_field(2,'Message',7936)
EventMapping = Table('EventMapping')
EventMapping.add_field(1,'Dialog_',11592)
EventMapping.add_field(2,'Control_',11570)
EventMapping.add_field(3,'Event',11570)
EventMapping.add_field(4,'Attribute',3378)
Extension = Table('Extension')
Extension.add_field(1,'Extension',11775)
Extension.add_field(2,'Component_',11592)
Extension.add_field(3,'ProgId_',7679)
Extension.add_field(4,'MIME_',7488)
Extension.add_field(5,'Feature_',3366)
MIME = Table('MIME')
MIME.add_field(1,'ContentType',11584)
MIME.add_field(2,'Extension_',3583)
MIME.add_field(3,'CLSID',7462)
FeatureComponents = Table('FeatureComponents')
FeatureComponents.add_field(1,'Feature_',11558)
FeatureComponents.add_field(2,'Component_',11592)
FileSFPCatalog = Table('FileSFPCatalog')
FileSFPCatalog.add_field(1,'File_',11592)
FileSFPCatalog.add_field(2,'SFPCatalog_',11775)
SFPCatalog = Table('SFPCatalog')
SFPCatalog.add_field(1,'SFPCatalog',11775)
SFPCatalog.add_field(2,'Catalog',2304)
SFPCatalog.add_field(3,'Dependency',7424)
Font = Table('Font')
Font.add_field(1,'File_',11592)
Font.add_field(2,'FontTitle',7552)
IniFile = Table('IniFile')
IniFile.add_field(1,'IniFile',11592)
IniFile.add_field(2,'FileName',4095)
IniFile.add_field(3,'DirProperty',7496)
IniFile.add_field(4,'Section',3936)
IniFile.add_field(5,'Key',3968)
IniFile.add_field(6,'Value',4095)
IniFile.add_field(7,'Action',1282)
IniFile.add_field(8,'Component_',3400)
IniLocator = Table('IniLocator')
IniLocator.add_field(1,'Signature_',11592)
IniLocator.add_field(2,'FileName',3583)
IniLocator.add_field(3,'Section',3424)
IniLocator.add_field(4,'Key',3456)
IniLocator.add_field(5,'Field',5378)
IniLocator.add_field(6,'Type',5378)
InstallExecuteSequence = Table('InstallExecuteSequence')
InstallExecuteSequence.add_field(1,'Action',11592)
InstallExecuteSequence.add_field(2,'Condition',7679)
InstallExecuteSequence.add_field(3,'Sequence',5378)
InstallUISequence = Table('InstallUISequence')
InstallUISequence.add_field(1,'Action',11592)
InstallUISequence.add_field(2,'Condition',7679)
InstallUISequence.add_field(3,'Sequence',5378)
IsolatedComponent = Table('IsolatedComponent')
IsolatedComponent.add_field(1,'Component_Shared',11592)
IsolatedComponent.add_field(2,'Component_Application',11592)
LaunchCondition = Table('LaunchCondition')
LaunchCondition.add_field(1,'Condition',11775)
LaunchCondition.add_field(2,'Description',4095)
ListBox = Table('ListBox')
ListBox.add_field(1,'Property',11592)
ListBox.add_field(2,'Order',9474)
ListBox.add_field(3,'Value',3392)
ListBox.add_field(4,'Text',8000)
ListView = Table('ListView')
ListView.add_field(1,'Property',11592)
ListView.add_field(2,'Order',9474)
ListView.add_field(3,'Value',3392)
ListView.add_field(4,'Text',8000)
ListView.add_field(5,'Binary_',7496)
LockPermissions = Table('LockPermissions')
LockPermissions.add_field(1,'LockObject',11592)
LockPermissions.add_field(2,'Table',11552)
LockPermissions.add_field(3,'Domain',15871)
LockPermissions.add_field(4,'User',11775)
LockPermissions.add_field(5,'Permission',4356)
Media = Table('Media')
Media.add_field(1,'DiskId',9474)
Media.add_field(2,'LastSequence',1282)
Media.add_field(3,'DiskPrompt',8000)
Media.add_field(4,'Cabinet',7679)
Media.add_field(5,'VolumeLabel',7456)
Media.add_field(6,'Source',7496)
MoveFile = Table('MoveFile')
MoveFile.add_field(1,'FileKey',11592)
MoveFile.add_field(2,'Component_',3400)
MoveFile.add_field(3,'SourceName',8191)
MoveFile.add_field(4,'DestName',8191)
MoveFile.add_field(5,'SourceFolder',7496)
MoveFile.add_field(6,'DestFolder',3400)
MoveFile.add_field(7,'Options',1282)
MsiAssembly = Table('MsiAssembly')
MsiAssembly.add_field(1,'Component_',11592)
MsiAssembly.add_field(2,'Feature_',3366)
MsiAssembly.add_field(3,'File_Manifest',7496)
MsiAssembly.add_field(4,'File_Application',7496)
MsiAssembly.add_field(5,'Attributes',5378)
MsiAssemblyName = Table('MsiAssemblyName')
MsiAssemblyName.add_field(1,'Component_',11592)
MsiAssemblyName.add_field(2,'Name',11775)
MsiAssemblyName.add_field(3,'Value',3583)
MsiDigitalCertificate = Table('MsiDigitalCertificate')
MsiDigitalCertificate.add_field(1,'DigitalCertificate',11592)
MsiDigitalCertificate.add_field(2,'CertData',2304)
MsiDigitalSignature = Table('MsiDigitalSignature')
MsiDigitalSignature.add_field(1,'Table',11552)
MsiDigitalSignature.add_field(2,'SignObject',11592)
MsiDigitalSignature.add_field(3,'DigitalCertificate_',3400)
MsiDigitalSignature.add_field(4,'Hash',6400)
MsiFileHash = Table('MsiFileHash')
MsiFileHash.add_field(1,'File_',11592)
MsiFileHash.add_field(2,'Options',1282)
MsiFileHash.add_field(3,'HashPart1',260)
MsiFileHash.add_field(4,'HashPart2',260)
MsiFileHash.add_field(5,'HashPart3',260)
MsiFileHash.add_field(6,'HashPart4',260)
MsiPatchHeaders = Table('MsiPatchHeaders')
MsiPatchHeaders.add_field(1,'StreamRef',11558)
MsiPatchHeaders.add_field(2,'Header',2304)
ODBCAttribute = Table('ODBCAttribute')
ODBCAttribute.add_field(1,'Driver_',11592)
ODBCAttribute.add_field(2,'Attribute',11560)
ODBCAttribute.add_field(3,'Value',8191)
ODBCDriver = Table('ODBCDriver')
ODBCDriver.add_field(1,'Driver',11592)
ODBCDriver.add_field(2,'Component_',3400)
ODBCDriver.add_field(3,'Description',3583)
ODBCDriver.add_field(4,'File_',3400)
ODBCDriver.add_field(5,'File_Setup',7496)
ODBCDataSource = Table('ODBCDataSource')
ODBCDataSource.add_field(1,'DataSource',11592)
ODBCDataSource.add_field(2,'Component_',3400)
ODBCDataSource.add_field(3,'Description',3583)
ODBCDataSource.add_field(4,'DriverDescription',3583)
ODBCDataSource.add_field(5,'Registration',1282)
ODBCSourceAttribute = Table('ODBCSourceAttribute')
ODBCSourceAttribute.add_field(1,'DataSource_',11592)
ODBCSourceAttribute.add_field(2,'Attribute',11552)
ODBCSourceAttribute.add_field(3,'Value',8191)
ODBCTranslator = Table('ODBCTranslator')
ODBCTranslator.add_field(1,'Translator',11592)
ODBCTranslator.add_field(2,'Component_',3400)
ODBCTranslator.add_field(3,'Description',3583)
ODBCTranslator.add_field(4,'File_',3400)
ODBCTranslator.add_field(5,'File_Setup',7496)
Patch = Table('Patch')
Patch.add_field(1,'File_',11592)
Patch.add_field(2,'Sequence',9474)
Patch.add_field(3,'PatchSize',260)
Patch.add_field(4,'Attributes',1282)
Patch.add_field(5,'Header',6400)
Patch.add_field(6,'StreamRef_',7462)
PatchPackage = Table('PatchPackage')
PatchPackage.add_field(1,'PatchId',11558)
PatchPackage.add_field(2,'Media_',1282)
PublishComponent = Table('PublishComponent')
PublishComponent.add_field(1,'ComponentId',11558)
PublishComponent.add_field(2,'Qualifier',11775)
PublishComponent.add_field(3,'Component_',11592)
PublishComponent.add_field(4,'AppData',8191)
PublishComponent.add_field(5,'Feature_',3366)
RadioButton = Table('RadioButton')
RadioButton.add_field(1,'Property',11592)
RadioButton.add_field(2,'Order',9474)
RadioButton.add_field(3,'Value',3392)
RadioButton.add_field(4,'X',1282)
RadioButton.add_field(5,'Y',1282)
RadioButton.add_field(6,'Width',1282)
RadioButton.add_field(7,'Height',1282)
RadioButton.add_field(8,'Text',8000)
RadioButton.add_field(9,'Help',7986)
Registry = Table('Registry')
Registry.add_field(1,'Registry',11592)
Registry.add_field(2,'Root',1282)
Registry.add_field(3,'Key',4095)
Registry.add_field(4,'Name',8191)
Registry.add_field(5,'Value',7936)
Registry.add_field(6,'Component_',3400)
RegLocator = Table('RegLocator')
RegLocator.add_field(1,'Signature_',11592)
RegLocator.add_field(2,'Root',1282)
RegLocator.add_field(3,'Key',3583)
RegLocator.add_field(4,'Name',7679)
RegLocator.add_field(5,'Type',5378)
RemoveFile = Table('RemoveFile')
RemoveFile.add_field(1,'FileKey',11592)
RemoveFile.add_field(2,'Component_',3400)
RemoveFile.add_field(3,'FileName',8191)
RemoveFile.add_field(4,'DirProperty',3400)
RemoveFile.add_field(5,'InstallMode',1282)
RemoveIniFile = Table('RemoveIniFile')
RemoveIniFile.add_field(1,'RemoveIniFile',11592)
RemoveIniFile.add_field(2,'FileName',4095)
RemoveIniFile.add_field(3,'DirProperty',7496)
RemoveIniFile.add_field(4,'Section',3936)
RemoveIniFile.add_field(5,'Key',3968)
RemoveIniFile.add_field(6,'Value',8191)
RemoveIniFile.add_field(7,'Action',1282)
RemoveIniFile.add_field(8,'Component_',3400)
RemoveRegistry = Table('RemoveRegistry')
RemoveRegistry.add_field(1,'RemoveRegistry',11592)
RemoveRegistry.add_field(2,'Root',1282)
RemoveRegistry.add_field(3,'Key',4095)
RemoveRegistry.add_field(4,'Name',8191)
RemoveRegistry.add_field(5,'Component_',3400)
ReserveCost = Table('ReserveCost')
ReserveCost.add_field(1,'ReserveKey',11592)
ReserveCost.add_field(2,'Component_',3400)
ReserveCost.add_field(3,'ReserveFolder',7496)
ReserveCost.add_field(4,'ReserveLocal',260)
ReserveCost.add_field(5,'ReserveSource',260)
SelfReg = Table('SelfReg')
SelfReg.add_field(1,'File_',11592)
SelfReg.add_field(2,'Cost',5378)
ServiceControl = Table('ServiceControl')
ServiceControl.add_field(1,'ServiceControl',11592)
ServiceControl.add_field(2,'Name',4095)
ServiceControl.add_field(3,'Event',1282)
ServiceControl.add_field(4,'Arguments',8191)
ServiceControl.add_field(5,'Wait',5378)
ServiceControl.add_field(6,'Component_',3400)
ServiceInstall = Table('ServiceInstall')
ServiceInstall.add_field(1,'ServiceInstall',11592)
ServiceInstall.add_field(2,'Name',3583)
ServiceInstall.add_field(3,'DisplayName',8191)
ServiceInstall.add_field(4,'ServiceType',260)
ServiceInstall.add_field(5,'StartType',260)
ServiceInstall.add_field(6,'ErrorControl',260)
ServiceInstall.add_field(7,'LoadOrderGroup',7679)
ServiceInstall.add_field(8,'Dependencies',7679)
ServiceInstall.add_field(9,'StartName',7679)
ServiceInstall.add_field(10,'Password',7679)
ServiceInstall.add_field(11,'Arguments',7679)
ServiceInstall.add_field(12,'Component_',3400)
ServiceInstall.add_field(13,'Description',8191)
Shortcut = Table('Shortcut')
Shortcut.add_field(1,'Shortcut',11592)
Shortcut.add_field(2,'Directory_',3400)
Shortcut.add_field(3,'Name',3968)
Shortcut.add_field(4,'Component_',3400)
Shortcut.add_field(5,'Target',3400)
Shortcut.add_field(6,'Arguments',7679)
Shortcut.add_field(7,'Description',8191)
Shortcut.add_field(8,'Hotkey',5378)
Shortcut.add_field(9,'Icon_',7496)
Shortcut.add_field(10,'IconIndex',5378)
Shortcut.add_field(11,'ShowCmd',5378)
Shortcut.add_field(12,'WkDir',7496)
Signature = Table('Signature')
Signature.add_field(1,'Signature',11592)
Signature.add_field(2,'FileName',3583)
Signature.add_field(3,'MinVersion',7444)
Signature.add_field(4,'MaxVersion',7444)
Signature.add_field(5,'MinSize',4356)
Signature.add_field(6,'MaxSize',4356)
Signature.add_field(7,'MinDate',4356)
Signature.add_field(8,'MaxDate',4356)
Signature.add_field(9,'Languages',7679)
TextStyle = Table('TextStyle')
TextStyle.add_field(1,'TextStyle',11592)
TextStyle.add_field(2,'FaceName',3360)
TextStyle.add_field(3,'Size',1282)
TextStyle.add_field(4,'Color',4356)
TextStyle.add_field(5,'StyleBits',5378)
TypeLib = Table('TypeLib')
TypeLib.add_field(1,'LibID',11558)
TypeLib.add_field(2,'Language',9474)
TypeLib.add_field(3,'Component_',11592)
TypeLib.add_field(4,'Version',4356)
TypeLib.add_field(5,'Description',8064)
TypeLib.add_field(6,'Directory_',7496)
TypeLib.add_field(7,'Feature_',3366)
TypeLib.add_field(8,'Cost',4356)
UIText = Table('UIText')
UIText.add_field(1,'Key',11592)
UIText.add_field(2,'Text',8191)
Upgrade = Table('Upgrade')
Upgrade.add_field(1,'UpgradeCode',11558)
Upgrade.add_field(2,'VersionMin',15636)
Upgrade.add_field(3,'VersionMax',15636)
Upgrade.add_field(4,'Language',15871)
Upgrade.add_field(5,'Attributes',8452)
Upgrade.add_field(6,'Remove',7679)
Upgrade.add_field(7,'ActionProperty',3400)
Verb = Table('Verb')
Verb.add_field(1,'Extension_',11775)
Verb.add_field(2,'Verb',11552)
Verb.add_field(3,'Sequence',5378)
Verb.add_field(4,'Command',8191)
Verb.add_field(5,'Argument',8191)
tables=[_Validation, ActionText, AdminExecuteSequence, Condition, AdminUISequence, AdvtExecuteSequence, AdvtUISequence, AppId, AppSearch, Property, BBControl, Billboard, Feature, Binary, BindImage, File, CCPSearch, CheckBox, Class, Component, Icon, ProgId, ComboBox, CompLocator, Complus, Directory, Control, Dialog, ControlCondition, ControlEvent, CreateFolder, CustomAction, DrLocator, DuplicateFile, Environment, Error, EventMapping, Extension, MIME, FeatureComponents, FileSFPCatalog, SFPCatalog, Font, IniFile, IniLocator, InstallExecuteSequence, InstallUISequence, IsolatedComponent, LaunchCondition, ListBox, ListView, LockPermissions, Media, MoveFile, MsiAssembly, MsiAssemblyName, MsiDigitalCertificate, MsiDigitalSignature, MsiFileHash, MsiPatchHeaders, ODBCAttribute, ODBCDriver, ODBCDataSource, ODBCSourceAttribute, ODBCTranslator, Patch, PatchPackage, PublishComponent, RadioButton, Registry, RegLocator, RemoveFile, RemoveIniFile, RemoveRegistry, ReserveCost, SelfReg, ServiceControl, ServiceInstall, Shortcut, Signature, TextStyle, TypeLib, UIText, Upgrade, Verb]
_Validation_records = [
('_Validation','Table','N',None, None, None, None, 'Identifier',None, 'Name of table',),
('_Validation','Column','N',None, None, None, None, 'Identifier',None, 'Name of column',),
('_Validation','Description','Y',None, None, None, None, 'Text',None, 'Description of column',),
('_Validation','Set','Y',None, None, None, None, 'Text',None, 'Set of values that are permitted',),
('_Validation','Category','Y',None, None, None, None, None, 'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL','String category',),
('_Validation','KeyColumn','Y',1,32,None, None, None, None, 'Column to which foreign key connects',),
('_Validation','KeyTable','Y',None, None, None, None, 'Identifier',None, 'For foreign key, Name of table to which data must link',),
('_Validation','MaxValue','Y',-2147483647,2147483647,None, None, None, None, 'Maximum value allowed',),
('_Validation','MinValue','Y',-2147483647,2147483647,None, None, None, None, 'Minimum value allowed',),
('_Validation','Nullable','N',None, None, None, None, None, 'Y;N;@','Whether the column is nullable',),
('ActionText','Description','Y',None, None, None, None, 'Text',None, 'Localized description displayed in progress dialog and log when action is executing.',),
('ActionText','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to be described.',),
('ActionText','Template','Y',None, None, None, None, 'Template',None, 'Optional localized format template used to format action data records for display during action execution.',),
('AdminExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('Condition','Condition','Y',None, None, None, None, 'Condition',None, 'Expression evaluated to determine if Level in the Feature table is to change.',),
('Condition','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Reference to a Feature entry in Feature table.',),
('Condition','Level','N',0,32767,None, None, None, None, 'New selection Level to set in Feature table if Condition evaluates to TRUE.',),
('AdminUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdminUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdminUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AdvtUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('AdvtUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('AdvtUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('AppId','AppId','N',None, None, None, None, 'Guid',None, None, ),
('AppId','ActivateAtStorage','Y',0,1,None, None, None, None, None, ),
('AppId','DllSurrogate','Y',None, None, None, None, 'Text',None, None, ),
('AppId','LocalService','Y',None, None, None, None, 'Text',None, None, ),
('AppId','RemoteServerName','Y',None, None, None, None, 'Formatted',None, None, ),
('AppId','RunAsInteractiveUser','Y',0,1,None, None, None, None, None, ),
('AppId','ServiceParameters','Y',None, None, None, None, 'Text',None, None, ),
('AppSearch','Property','N',None, None, None, None, 'Identifier',None, 'The property associated with a Signature',),
('AppSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('Property','Property','N',None, None, None, None, 'Identifier',None, 'Name of property, uppercase if settable by launcher or loader.',),
('Property','Value','N',None, None, None, None, 'Text',None, 'String value for property. Never null or empty.',),
('BBControl','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('BBControl','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('BBControl','Text','Y',None, None, None, None, 'Text',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('BBControl','BBControl','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.',),
('BBControl','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('BBControl','Billboard_','N',None, None, 'Billboard',1,'Identifier',None, 'External key to the Billboard table, name of the billboard.',),
('BBControl','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('BBControl','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('BBControl','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Billboard','Action','Y',None, None, None, None, 'Identifier',None, 'The name of an action. The billboard is displayed during the progress messages received from this action.',),
('Billboard','Billboard','N',None, None, None, None, 'Identifier',None, 'Name of the billboard.',),
('Billboard','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'An external key to the Feature Table. The billboard is shown only if this feature is being installed.',),
('Billboard','Ordering','Y',0,32767,None, None, None, None, 'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.',),
('Feature','Description','Y',None, None, None, None, 'Text',None, 'Longer descriptive text describing a visible feature item.',),
('Feature','Attributes','N',None, None, None, None, None, '0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54','Feature attributes',),
('Feature','Feature','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular feature record.',),
('Feature','Directory_','Y',None, None, 'Directory',1,'UpperCase',None, 'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.',),
('Feature','Level','N',0,32767,None, None, None, None, 'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.',),
('Feature','Title','Y',None, None, None, None, 'Text',None, 'Short text identifying a visible feature item.',),
('Feature','Display','Y',0,32767,None, None, None, None, 'Numeric sort order, used to force a specific display ordering.',),
('Feature','Feature_Parent','Y',None, None, 'Feature',1,'Identifier',None, 'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.',),
('Binary','Name','N',None, None, None, None, 'Identifier',None, 'Unique key identifying the binary data.',),
('Binary','Data','N',None, None, None, None, 'Binary',None, 'The unformatted binary data.',),
('BindImage','File_','N',None, None, 'File',1,'Identifier',None, 'The index into the File table. This must be an executable file.',),
('BindImage','Path','Y',None, None, None, None, 'Paths',None, 'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .',),
('File','Sequence','N',1,32767,None, None, None, None, 'Sequence with respect to the media images; order must track cabinet order.',),
('File','Attributes','Y',0,32767,None, None, None, None, 'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)',),
('File','File','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.',),
('File','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file.',),
('File','FileName','N',None, None, None, None, 'Filename',None, 'File name used for installation, may be localized. This may contain a "short name|long name" pair.',),
('File','FileSize','N',0,2147483647,None, None, None, None, 'Size of file in bytes (integer).',),
('File','Language','Y',None, None, None, None, 'Language',None, 'List of decimal language Ids, comma-separated if more than one.',),
('File','Version','Y',None, None, 'File',1,'Version',None, 'Version string for versioned files; Blank for unversioned files.',),
('CCPSearch','Signature_','N',None, None, 'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
('CheckBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to the item.',),
('CheckBox','Value','Y',None, None, None, None, 'Formatted',None, 'The value string associated with the item.',),
('Class','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Class.',),
('Class','Attributes','Y',None, 32767,None, None, None, None, 'Class registration attributes.',),
('Class','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Class','AppId_','Y',None, None, 'AppId',1,'Guid',None, 'Optional AppID containing DCOM information for associated application (string GUID).',),
('Class','Argument','Y',None, None, None, None, 'Formatted',None, 'optional argument for LocalServers.',),
('Class','CLSID','N',None, None, None, None, 'Guid',None, 'The CLSID of an OLE factory.',),
('Class','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Class','Context','N',None, None, None, None, 'Identifier',None, 'The numeric server context for this server. CLSCTX_xxxx',),
('Class','DefInprocHandler','Y',None, None, None, None, 'Filename','1;2;3','Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"',),
('Class','FileTypeMask','Y',None, None, None, None, 'Text',None, 'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
('Class','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.',),
('Class','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('Class','ProgId_Default','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this CLSID.',),
('Component','Condition','Y',None, None, None, None, 'Condition',None, "A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component.",),
('Component','Attributes','N',None, None, None, None, None, None, 'Remote execution option, one of irsEnum',),
('Component','Component','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular component record.',),
('Component','ComponentId','Y',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Component','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.',),
('Component','KeyPath','Y',None, None, 'File;Registry;ODBCDataSource',1,'Identifier',None, 'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.',),
('Icon','Name','N',None, None, None, None, 'Identifier',None, 'Primary key. Name of the icon file.',),
('Icon','Data','N',None, None, None, None, 'Binary',None, 'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.',),
('ProgId','Description','Y',None, None, None, None, 'Text',None, 'Localized description for the Program identifier.',),
('ProgId','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.',),
('ProgId','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('ProgId','ProgId','N',None, None, None, None, 'Text',None, 'The Program Identifier. Primary key.',),
('ProgId','Class_','Y',None, None, 'Class',1,'Guid',None, 'The CLSID of an OLE factory corresponding to the ProgId.',),
('ProgId','ProgId_Parent','Y',None, None, 'ProgId',1,'Text',None, 'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.',),
('ComboBox','Text','Y',None, None, None, None, 'Formatted',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ComboBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.',),
('ComboBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ComboBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.',),
('CompLocator','Type','Y',0,1,None, None, None, None, 'A boolean value that determines if the registry value is a filename or a directory location.',),
('CompLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('CompLocator','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID unique to this component, version, and language.',),
('Complus','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the ComPlus component.',),
('Complus','ExpType','Y',0,32767,None, None, None, None, 'ComPlus component attributes.',),
('Directory','Directory','N',None, None, None, None, 'Identifier',None, 'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.',),
('Directory','DefaultDir','N',None, None, None, None, 'DefaultDir',None, "The default sub-path under parent's path.",),
('Directory','Directory_Parent','Y',None, None, 'Directory',1,'Identifier',None, 'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.',),
('Control','Type','N',None, None, None, None, 'Identifier',None, 'The type of the control.',),
('Control','Y','N',0,32767,None, None, None, None, 'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Text','Y',None, None, None, None, 'Formatted',None, 'A string used to set the initial text contained within a control (if appropriate).',),
('Control','Property','Y',None, None, None, None, 'Identifier',None, 'The name of a defined property to be linked to this control. ',),
('Control','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this control.',),
('Control','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the control.',),
('Control','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the control.',),
('Control','X','N',0,32767,None, None, None, None, 'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
('Control','Control','N',None, None, None, None, 'Identifier',None, 'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. ',),
('Control','Control_Next','Y',None, None, 'Control',2,'Identifier',None, 'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!',),
('Control','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'External key to the Dialog table, name of the dialog.',),
('Control','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional. ',),
('Dialog','Attributes','Y',0,2147483647,None, None, None, None, 'A 32-bit word that specifies the attribute flags to be applied to this dialog.',),
('Dialog','Height','N',0,32767,None, None, None, None, 'Height of the bounding rectangle of the dialog.',),
('Dialog','Width','N',0,32767,None, None, None, None, 'Width of the bounding rectangle of the dialog.',),
('Dialog','Dialog','N',None, None, None, None, 'Identifier',None, 'Name of the dialog.',),
('Dialog','Control_Cancel','Y',None, None, 'Control',2,'Identifier',None, 'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.',),
('Dialog','Control_Default','Y',None, None, 'Control',2,'Identifier',None, 'Defines the default control. Hitting return is equivalent to pushing this button.',),
('Dialog','Control_First','N',None, None, 'Control',2,'Identifier',None, 'Defines the control that has the focus when the dialog is created.',),
('Dialog','HCentering','N',0,100,None, None, None, None, 'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.',),
('Dialog','Title','Y',None, None, None, None, 'Formatted',None, "A text string specifying the title to be displayed in the title bar of the dialog's window.",),
('Dialog','VCentering','N',0,100,None, None, None, None, 'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.',),
('ControlCondition','Action','N',None, None, None, None, None, 'Default;Disable;Enable;Hide;Show','The desired action to be taken on the specified control.',),
('ControlCondition','Condition','N',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions the action should be triggered.',),
('ControlCondition','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlCondition','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('ControlEvent','Condition','Y',None, None, None, None, 'Condition',None, 'A standard conditional statement that specifies under which conditions an event should be triggered.',),
('ControlEvent','Ordering','Y',0,2147483647,None, None, None, None, 'An integer used to order several events tied to the same control. Can be left blank.',),
('ControlEvent','Argument','N',None, None, None, None, 'Formatted',None, 'A value to be used as a modifier when triggering a particular event.',),
('ControlEvent','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the dialog.',),
('ControlEvent','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control',),
('ControlEvent','Event','N',None, None, None, None, 'Formatted',None, 'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.',),
('CreateFolder','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('CreateFolder','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Primary key, could be foreign key into the Directory table.',),
('CustomAction','Type','N',1,16383,None, None, None, None, 'The numeric custom action type, consisting of source location, code type, entry, option flags.',),
('CustomAction','Action','N',None, None, None, None, 'Identifier',None, 'Primary key, name of action, normally appears in sequence table unless private use.',),
('CustomAction','Source','Y',None, None, None, None, 'CustomSource',None, 'The table reference of the source of the code.',),
('CustomAction','Target','Y',None, None, None, None, 'Formatted',None, 'Excecution parameter, depends on the type of custom action',),
('DrLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('DrLocator','Path','Y',None, None, None, None, 'AnyPath',None, 'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.',),
('DrLocator','Depth','Y',0,32767,None, None, None, None, 'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.',),
('DrLocator','Parent','Y',None, None, None, None, 'Identifier',None, 'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.',),
('DuplicateFile','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key referencing the source file to be duplicated.',),
('DuplicateFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the duplicate file.',),
('DuplicateFile','DestFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.',),
('DuplicateFile','DestName','Y',None, None, None, None, 'Filename',None, 'Filename to be given to the duplicate file.',),
('DuplicateFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('Environment','Name','N',None, None, None, None, 'Text',None, 'The name of the environmental value.',),
('Environment','Value','Y',None, None, None, None, 'Formatted',None, 'The value to set in the environmental settings.',),
('Environment','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the environmental value.',),
('Environment','Environment','N',None, None, None, None, 'Identifier',None, 'Unique identifier for the environmental variable setting',),
('Error','Error','N',0,32767,None, None, None, None, 'Integer error number, obtained from header file IError(...) macros.',),
('Error','Message','Y',None, None, None, None, 'Template',None, 'Error formatting template, obtained from user ed. or localizers.',),
('EventMapping','Dialog_','N',None, None, 'Dialog',1,'Identifier',None, 'A foreign key to the Dialog table, name of the Dialog.',),
('EventMapping','Control_','N',None, None, 'Control',2,'Identifier',None, 'A foreign key to the Control table, name of the control.',),
('EventMapping','Event','N',None, None, None, None, 'Identifier',None, 'An identifier that specifies the type of the event that the control subscribes to.',),
('EventMapping','Attribute','N',None, None, None, None, 'Identifier',None, 'The name of the control attribute, that is set when this event is received.',),
('Extension','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
('Extension','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Extension','Extension','N',None, None, None, None, 'Text',None, 'The extension associated with the table row.',),
('Extension','MIME_','Y',None, None, 'MIME',1,'Text',None, 'Optional Context identifier, typically "type/format" associated with the extension',),
('Extension','ProgId_','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this extension.',),
('MIME','CLSID','Y',None, None, None, None, 'Guid',None, 'Optional associated CLSID.',),
('MIME','ContentType','N',None, None, None, None, 'Text',None, 'Primary key. Context identifier, typically "type/format".',),
('MIME','Extension_','N',None, None, 'Extension',1,'Text',None, 'Optional associated extension (without dot)',),
('FeatureComponents','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('FeatureComponents','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('FileSFPCatalog','File_','N',None, None, 'File',1,'Identifier',None, 'File associated with the catalog',),
('FileSFPCatalog','SFPCatalog_','N',None, None, 'SFPCatalog',1,'Filename',None, 'Catalog associated with the file',),
('SFPCatalog','SFPCatalog','N',None, None, None, None, 'Filename',None, 'File name for the catalog.',),
('SFPCatalog','Catalog','N',None, None, None, None, 'Binary',None, 'SFP Catalog',),
('SFPCatalog','Dependency','Y',None, None, None, None, 'Formatted',None, 'Parent catalog - only used by SFP',),
('Font','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing font file.',),
('Font','FontTitle','Y',None, None, None, None, 'Text',None, 'Font name.',),
('IniFile','Action','N',None, None, None, None, None, '0;1;3','The type of modification to be made, one of iifEnum',),
('IniFile','Value','N',None, None, None, None, 'Formatted',None, 'The value to be written.',),
('IniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the .INI value.',),
('IniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to write the information',),
('IniFile','IniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('IniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('IniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('IniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('IniLocator','Type','Y',0,2,None, None, None, None, 'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.',),
('IniLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
('IniLocator','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name.',),
('IniLocator','Key','N',None, None, None, None, 'Text',None, 'Key value (followed by an equals sign in INI file).',),
('IniLocator','Section','N',None, None, None, None, 'Text',None, 'Section name within in file (within square brackets in INI file).',),
('IniLocator','Field','Y',0,32767,None, None, None, None, 'The field in the .INI line. If Field is null or 0 the entire line is read.',),
('InstallExecuteSequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallExecuteSequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallExecuteSequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('InstallUISequence','Action','N',None, None, None, None, 'Identifier',None, 'Name of action to invoke, either in the engine or the handler DLL.',),
('InstallUISequence','Condition','Y',None, None, None, None, 'Condition',None, 'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
('InstallUISequence','Sequence','Y',-4,32767,None, None, None, None, 'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
('IsolatedComponent','Component_Application','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item for application',),
('IsolatedComponent','Component_Shared','N',None, None, 'Component',1,'Identifier',None, 'Key to Component table item to be isolated',),
('LaunchCondition','Description','N',None, None, None, None, 'Formatted',None, 'Localizable text to display when condition fails and install must abort.',),
('LaunchCondition','Condition','N',None, None, None, None, 'Condition',None, 'Expression which must evaluate to TRUE in order for install to commence.',),
('ListBox','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListBox','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.',),
('ListBox','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListBox','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Text','Y',None, None, None, None, 'Text',None, 'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
('ListView','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this item. All the items tied to the same property become part of the same listview.',),
('ListView','Value','N',None, None, None, None, 'Identifier',None, 'The value string associated with this item. Selecting the line will set the associated property to this value.',),
('ListView','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('ListView','Binary_','Y',None, None, 'Binary',1,'Identifier',None, 'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.',),
('LockPermissions','Table','N',None, None, None, None, 'Identifier','Directory;File;Registry','Reference to another table name',),
('LockPermissions','Domain','Y',None, None, None, None, 'Formatted',None, 'Domain name for user whose permissions are being set. (usually a property)',),
('LockPermissions','LockObject','N',None, None, None, None, 'Identifier',None, 'Foreign key into Registry or File table',),
('LockPermissions','Permission','Y',-2147483647,2147483647,None, None, None, None, 'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)',),
('LockPermissions','User','N',None, None, None, None, 'Formatted',None, 'User for permissions to be set. (usually a property)',),
('Media','Source','Y',None, None, None, None, 'Property',None, 'The property defining the location of the cabinet file.',),
('Media','Cabinet','Y',None, None, None, None, 'Cabinet',None, 'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.',),
('Media','DiskId','N',1,32767,None, None, None, None, 'Primary key, integer to determine sort order for table.',),
('Media','DiskPrompt','Y',None, None, None, None, 'Text',None, 'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.',),
('Media','LastSequence','N',0,32767,None, None, None, None, 'File sequence number for the last file for this media.',),
('Media','VolumeLabel','Y',None, None, None, None, 'Text',None, 'The label attributed to the volume.',),
('ModuleComponents','Component','N',None, None, 'Component',1,'Identifier',None, 'Component contained in the module.',),
('ModuleComponents','Language','N',None, None, 'ModuleSignature',2,None, None, 'Default language ID for module (may be changed by transform).',),
('ModuleComponents','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module containing the component.',),
('ModuleSignature','Language','N',None, None, None, None, None, None, 'Default decimal language of module.',),
('ModuleSignature','Version','N',None, None, None, None, 'Version',None, 'Version of the module.',),
('ModuleSignature','ModuleID','N',None, None, None, None, 'Identifier',None, 'Module identifier (String.GUID).',),
('ModuleDependency','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'Module requiring the dependency.',),
('ModuleDependency','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'Language of module requiring the dependency.',),
('ModuleDependency','RequiredID','N',None, None, None, None, None, None, 'String.GUID of required module.',),
('ModuleDependency','RequiredLanguage','N',None, None, None, None, None, None, 'LanguageID of the required module.',),
('ModuleDependency','RequiredVersion','Y',None, None, None, None, 'Version',None, 'Version of the required version.',),
('ModuleExclusion','ModuleID','N',None, None, 'ModuleSignature',1,'Identifier',None, 'String.GUID of module with exclusion requirement.',),
('ModuleExclusion','ModuleLanguage','N',None, None, 'ModuleSignature',2,None, None, 'LanguageID of module with exclusion requirement.',),
('ModuleExclusion','ExcludedID','N',None, None, None, None, None, None, 'String.GUID of excluded module.',),
('ModuleExclusion','ExcludedLanguage','N',None, None, None, None, None, None, 'Language of excluded module.',),
('ModuleExclusion','ExcludedMaxVersion','Y',None, None, None, None, 'Version',None, 'Maximum version of excluded module.',),
('ModuleExclusion','ExcludedMinVersion','Y',None, None, None, None, 'Version',None, 'Minimum version of excluded module.',),
('MoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry',),
('MoveFile','DestFolder','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('MoveFile','DestName','Y',None, None, None, None, 'Filename',None, 'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file',),
('MoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular MoveFile record',),
('MoveFile','Options','N',0,1,None, None, None, None, 'Integer value specifying the MoveFile operating mode, one of imfoEnum',),
('MoveFile','SourceFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the source directory',),
('MoveFile','SourceName','Y',None, None, None, None, 'Text',None, "Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards.",),
('MsiAssembly','Attributes','Y',None, None, None, None, None, None, 'Assembly attributes',),
('MsiAssembly','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into Feature table.',),
('MsiAssembly','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiAssembly','File_Application','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.',),
('MsiAssembly','File_Manifest','Y',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the manifest file for the assembly.',),
('MsiAssemblyName','Name','N',None, None, None, None, 'Text',None, 'The name part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Value','N',None, None, None, None, 'Text',None, 'The value part of the name-value pairs for the assembly name.',),
('MsiAssemblyName','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into Component table.',),
('MsiDigitalCertificate','CertData','N',None, None, None, None, 'Binary',None, 'A certificate context blob for a signer certificate',),
('MsiDigitalCertificate','DigitalCertificate','N',None, None, None, None, 'Identifier',None, 'A unique identifier for the row',),
('MsiDigitalSignature','Table','N',None, None, None, None, None, 'Media','Reference to another table name (only Media table is supported)',),
('MsiDigitalSignature','DigitalCertificate_','N',None, None, 'MsiDigitalCertificate',1,'Identifier',None, 'Foreign key to MsiDigitalCertificate table identifying the signer certificate',),
('MsiDigitalSignature','Hash','Y',None, None, None, None, 'Binary',None, 'The encoded hash blob from the digital signature',),
('MsiDigitalSignature','SignObject','N',None, None, None, None, 'Text',None, 'Foreign key to Media table',),
('MsiFileHash','File_','N',None, None, 'File',1,'Identifier',None, 'Primary key, foreign key into File table referencing file with this hash',),
('MsiFileHash','Options','N',0,32767,None, None, None, None, 'Various options and attributes for this hash.',),
('MsiFileHash','HashPart1','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart2','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart3','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiFileHash','HashPart4','N',None, None, None, None, None, None, 'Size of file in bytes (integer).',),
('MsiPatchHeaders','StreamRef','N',None, None, None, None, 'Identifier',None, 'Primary key. A unique identifier for the row.',),
('MsiPatchHeaders','Header','N',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('ODBCAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC driver attribute',),
('ODBCAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC driver attribute',),
('ODBCAttribute','Driver_','N',None, None, 'ODBCDriver',1,'Identifier',None, 'Reference to ODBC driver in ODBCDriver table',),
('ODBCDriver','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for driver, non-localized',),
('ODBCDriver','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key driver file',),
('ODBCDriver','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDriver','Driver','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for driver',),
('ODBCDriver','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key driver setup DLL',),
('ODBCDataSource','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for data source',),
('ODBCDataSource','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCDataSource','DataSource','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for data source',),
('ODBCDataSource','DriverDescription','N',None, None, None, None, 'Text',None, 'Reference to driver description, may be existing driver',),
('ODBCDataSource','Registration','N',0,1,None, None, None, None, 'Registration option: 0=machine, 1=user, others t.b.d.',),
('ODBCSourceAttribute','Value','Y',None, None, None, None, 'Text',None, 'Value for ODBC data source attribute',),
('ODBCSourceAttribute','Attribute','N',None, None, None, None, 'Text',None, 'Name of ODBC data source attribute',),
('ODBCSourceAttribute','DataSource_','N',None, None, 'ODBCDataSource',1,'Identifier',None, 'Reference to ODBC data source in ODBCDataSource table',),
('ODBCTranslator','Description','N',None, None, None, None, 'Text',None, 'Text used as registered name for translator',),
('ODBCTranslator','File_','N',None, None, 'File',1,'Identifier',None, 'Reference to key translator file',),
('ODBCTranslator','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reference to associated component',),
('ODBCTranslator','File_Setup','Y',None, None, 'File',1,'Identifier',None, 'Optional reference to key translator setup DLL',),
('ODBCTranslator','Translator','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized.internal token for translator',),
('Patch','Sequence','N',0,32767,None, None, None, None, 'Primary key, sequence with respect to the media images; order must track cabinet order.',),
('Patch','Attributes','N',0,32767,None, None, None, None, 'Integer containing bit flags representing patch attributes',),
('Patch','File_','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.',),
('Patch','Header','Y',None, None, None, None, 'Binary',None, 'Binary stream. The patch header, used for patch validation.',),
('Patch','PatchSize','N',0,2147483647,None, None, None, None, 'Size of patch in bytes (integer).',),
('Patch','StreamRef_','Y',None, None, None, None, 'Identifier',None, 'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.',),
('PatchPackage','Media_','N',0,32767,None, None, None, None, 'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.',),
('PatchPackage','PatchId','N',None, None, None, None, 'Guid',None, 'A unique string GUID representing this patch.',),
('PublishComponent','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Foreign key into the Feature table.',),
('PublishComponent','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table.',),
('PublishComponent','ComponentId','N',None, None, None, None, 'Guid',None, 'A string GUID that represents the component id that will be requested by the alien product.',),
('PublishComponent','AppData','Y',None, None, None, None, 'Text',None, 'This is localisable Application specific data that can be associated with a Qualified Component.',),
('PublishComponent','Qualifier','N',None, None, None, None, 'Text',None, 'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.',),
('RadioButton','Y','N',0,32767,None, None, None, None, 'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Text','Y',None, None, None, None, 'Text',None, 'The visible title to be assigned to the radio button.',),
('RadioButton','Property','N',None, None, None, None, 'Identifier',None, 'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.',),
('RadioButton','Height','N',0,32767,None, None, None, None, 'The height of the button.',),
('RadioButton','Width','N',0,32767,None, None, None, None, 'The width of the button.',),
('RadioButton','X','N',0,32767,None, None, None, None, 'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.',),
('RadioButton','Value','N',None, None, None, None, 'Formatted',None, 'The value string associated with this button. Selecting the button will set the associated property to this value.',),
('RadioButton','Order','N',1,32767,None, None, None, None, 'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
('RadioButton','Help','Y',None, None, None, None, 'Text',None, 'The help strings used with the button. The text is optional.',),
('Registry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('Registry','Value','Y',None, None, None, None, 'Formatted',None, 'The registry value.',),
('Registry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the installing of the registry value.',),
('Registry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('Registry','Registry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Registry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RegLocator','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RegLocator','Type','Y',0,18,None, None, None, None, 'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.',),
('RegLocator','Signature_','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.',),
('RegLocator','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RegLocator','Root','N',0,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum.',),
('RemoveFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key referencing Component that controls the file to be removed.',),
('RemoveFile','FileKey','N',None, None, None, None, 'Identifier',None, 'Primary key used to identify a particular file entry',),
('RemoveFile','FileName','Y',None, None, None, None, 'WildCardFilename',None, 'Name of the file to be removed.',),
('RemoveFile','DirProperty','N',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.',),
('RemoveFile','InstallMode','N',None, None, None, None, None, '1;2;3','Installation option, one of iimEnum.',),
('RemoveIniFile','Action','N',None, None, None, None, None, '2;4','The type of modification to be made, one of iifEnum.',),
('RemoveIniFile','Value','Y',None, None, None, None, 'Formatted',None, 'The value to be deleted. The value is required when Action is iifIniRemoveTag',),
('RemoveIniFile','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the .INI value.',),
('RemoveIniFile','FileName','N',None, None, None, None, 'Filename',None, 'The .INI file name in which to delete the information',),
('RemoveIniFile','DirProperty','Y',None, None, None, None, 'Identifier',None, 'Foreign key into the Directory table denoting the directory where the .INI file is.',),
('RemoveIniFile','Key','N',None, None, None, None, 'Formatted',None, 'The .INI file key below Section.',),
('RemoveIniFile','Section','N',None, None, None, None, 'Formatted',None, 'The .INI file Section.',),
('RemoveIniFile','RemoveIniFile','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('RemoveRegistry','Name','Y',None, None, None, None, 'Formatted',None, 'The registry value name.',),
('RemoveRegistry','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table referencing component that controls the deletion of the registry value.',),
('RemoveRegistry','Key','N',None, None, None, None, 'RegPath',None, 'The key for the registry value.',),
('RemoveRegistry','Root','N',-1,3,None, None, None, None, 'The predefined root key for the registry value, one of rrkEnum',),
('RemoveRegistry','RemoveRegistry','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ReserveCost','Component_','N',None, None, 'Component',1,'Identifier',None, 'Reserve a specified amount of space if this component is to be installed.',),
('ReserveCost','ReserveFolder','Y',None, None, None, None, 'Identifier',None, 'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
('ReserveCost','ReserveKey','N',None, None, None, None, 'Identifier',None, 'Primary key that uniquely identifies a particular ReserveCost record',),
('ReserveCost','ReserveLocal','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed locally.',),
('ReserveCost','ReserveSource','N',0,2147483647,None, None, None, None, 'Disk space to reserve if linked component is installed to run from the source location.',),
('SelfReg','File_','N',None, None, 'File',1,'Identifier',None, 'Foreign key into the File table denoting the module that needs to be registered.',),
('SelfReg','Cost','Y',0,32767,None, None, None, None, 'The cost of registering the module.',),
('ServiceControl','Name','N',None, None, None, None, 'Formatted',None, 'Name of a service. /, \\, comma and space are invalid',),
('ServiceControl','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceControl','Event','N',0,187,None, None, None, None, 'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete',),
('ServiceControl','ServiceControl','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceControl','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments for the service. Separate by [~].',),
('ServiceControl','Wait','Y',0,1,None, None, None, None, 'Boolean for whether to wait for the service to fully start',),
('ServiceInstall','Name','N',None, None, None, None, 'Formatted',None, 'Internal Name of the Service',),
('ServiceInstall','Description','Y',None, None, None, None, 'Text',None, 'Description of service.',),
('ServiceInstall','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table that controls the startup of the service',),
('ServiceInstall','Arguments','Y',None, None, None, None, 'Formatted',None, 'Arguments to include in every start of the service, passed to WinMain',),
('ServiceInstall','ServiceInstall','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('ServiceInstall','Dependencies','Y',None, None, None, None, 'Formatted',None, 'Other services this depends on to start. Separate by [~], and end with [~][~]',),
('ServiceInstall','DisplayName','Y',None, None, None, None, 'Formatted',None, 'External Name of the Service',),
('ServiceInstall','ErrorControl','N',-2147483647,2147483647,None, None, None, None, 'Severity of error if service fails to start',),
('ServiceInstall','LoadOrderGroup','Y',None, None, None, None, 'Formatted',None, 'LoadOrderGroup',),
('ServiceInstall','Password','Y',None, None, None, None, 'Formatted',None, 'password to run service with. (with StartName)',),
('ServiceInstall','ServiceType','N',-2147483647,2147483647,None, None, None, None, 'Type of the service',),
('ServiceInstall','StartName','Y',None, None, None, None, 'Formatted',None, 'User or object name to run service as',),
('ServiceInstall','StartType','N',0,4,None, None, None, None, 'Type of the service',),
('Shortcut','Name','N',None, None, None, None, 'Filename',None, 'The name of the shortcut to be created.',),
('Shortcut','Description','Y',None, None, None, None, 'Text',None, 'The description for the shortcut.',),
('Shortcut','Component_','N',None, None, 'Component',1,'Identifier',None, 'Foreign key into the Component table denoting the component whose selection gates the the shortcut creation/deletion.',),
('Shortcut','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Foreign key into the File table denoting the external icon file for the shortcut.',),
('Shortcut','IconIndex','Y',-32767,32767,None, None, None, None, 'The icon index for the shortcut.',),
('Shortcut','Directory_','N',None, None, 'Directory',1,'Identifier',None, 'Foreign key into the Directory table denoting the directory where the shortcut file is created.',),
('Shortcut','Target','N',None, None, None, None, 'Shortcut',None, 'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.',),
('Shortcut','Arguments','Y',None, None, None, None, 'Formatted',None, 'The command-line arguments for the shortcut.',),
('Shortcut','Shortcut','N',None, None, None, None, 'Identifier',None, 'Primary key, non-localized token.',),
('Shortcut','Hotkey','Y',0,32767,None, None, None, None, 'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. ',),
('Shortcut','ShowCmd','Y',None, None, None, None, None, '1;3;7','The show command for the application window.The following values may be used.',),
('Shortcut','WkDir','Y',None, None, None, None, 'Identifier',None, 'Name of property defining location of working directory.',),
('Signature','FileName','N',None, None, None, None, 'Filename',None, 'The name of the file. This may contain a "short name|long name" pair.',),
('Signature','Signature','N',None, None, None, None, 'Identifier',None, 'The table key. The Signature represents a unique file signature.',),
('Signature','Languages','Y',None, None, None, None, 'Language',None, 'The languages supported by the file.',),
('Signature','MaxDate','Y',0,2147483647,None, None, None, None, 'The maximum creation date of the file.',),
('Signature','MaxSize','Y',0,2147483647,None, None, None, None, 'The maximum size of the file. ',),
('Signature','MaxVersion','Y',None, None, None, None, 'Text',None, 'The maximum version of the file.',),
('Signature','MinDate','Y',0,2147483647,None, None, None, None, 'The minimum creation date of the file.',),
('Signature','MinSize','Y',0,2147483647,None, None, None, None, 'The minimum size of the file.',),
('Signature','MinVersion','Y',None, None, None, None, 'Text',None, 'The minimum version of the file.',),
('TextStyle','TextStyle','N',None, None, None, None, 'Identifier',None, 'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.',),
('TextStyle','Color','Y',0,16777215,None, None, None, None, 'An integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).',),
('TextStyle','FaceName','N',None, None, None, None, 'Text',None, 'A string indicating the name of the font used. Required. The string must be at most 31 characters long.',),
('TextStyle','Size','N',0,32767,None, None, None, None, 'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.',),
('TextStyle','StyleBits','Y',0,15,None, None, None, None, 'A combination of style bits.',),
('TypeLib','Description','Y',None, None, None, None, 'Text',None, None, ),
('TypeLib','Feature_','N',None, None, 'Feature',1,'Identifier',None, 'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.',),
('TypeLib','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('TypeLib','Directory_','Y',None, None, 'Directory',1,'Identifier',None, 'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.',),
('TypeLib','Language','N',0,32767,None, None, None, None, 'The language of the library.',),
('TypeLib','Version','Y',0,16777215,None, None, None, None, 'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. ',),
('TypeLib','Cost','Y',0,2147483647,None, None, None, None, 'The cost associated with the registration of the typelib. This column is currently optional.',),
('TypeLib','LibID','N',None, None, None, None, 'Guid',None, 'The GUID that represents the library.',),
('UIText','Text','Y',None, None, None, None, 'Text',None, 'The localized version of the string.',),
('UIText','Key','N',None, None, None, None, 'Identifier',None, 'A unique key that identifies the particular string.',),
('Upgrade','Attributes','N',0,2147483647,None, None, None, None, 'The attributes of this product set.',),
('Upgrade','Language','Y',None, None, None, None, 'Language',None, 'A comma-separated list of languages for either products in this set or products not in this set.',),
('Upgrade','ActionProperty','N',None, None, None, None, 'UpperCase',None, 'The property to set when a product in this set is found.',),
('Upgrade','Remove','Y',None, None, None, None, 'Formatted',None, 'The list of features to remove when uninstalling a product from this set. The default is "ALL".',),
('Upgrade','UpgradeCode','N',None, None, None, None, 'Guid',None, 'The UpgradeCode GUID belonging to the products in this set.',),
('Upgrade','VersionMax','Y',None, None, None, None, 'Text',None, 'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Upgrade','VersionMin','Y',None, None, None, None, 'Text',None, 'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
('Verb','Sequence','Y',0,32767,None, None, None, None, 'Order within the verbs for a particular extension. Also used simply to specify the default verb.',),
('Verb','Argument','Y',None, None, None, None, 'Formatted',None, 'Optional value for the command arguments.',),
('Verb','Extension_','N',None, None, 'Extension',1,'Text',None, 'The extension associated with the table row.',),
('Verb','Verb','N',None, None, None, None, 'Text',None, 'The verb for the command.',),
('Verb','Command','Y',None, None, None, None, 'Formatted',None, 'The command text.',),
]
| apache-2.0 |
oVirt/vdsm | lib/vdsm/common/libvirtconnection.py | 2 | 8069 | #
# Copyright 2009-2017 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import atexit
import threading
import functools
import io
import logging
import os
import signal
import libvirt
from vdsm.common import cache
from vdsm.common import concurrent
from vdsm.common import function
from vdsm.common import pki
from vdsm.common.password import ProtectedPassword
log = logging.getLogger()
SASL_USERNAME = "vdsm@ovirt"
LIBVIRT_PASSWORD_PATH = os.path.join(pki.PKI_DIR, 'keys', 'libvirt_password')
class _EventLoop:
def __init__(self):
self.run = False
self.__thread = None
def start(self):
assert not self.run
self.__thread = concurrent.thread(self.__run, name="libvirt/events",
log=log)
self.run = True
self.__thread.start()
def stop(self, wait=True):
if self.run:
self.run = False
if wait:
self.__thread.join()
self.__thread = None
def __run(self):
try:
libvirt.virEventRegisterDefaultImpl()
while self.run:
libvirt.virEventRunDefaultImpl()
finally:
self.run = False
# Make sure to never reload this module, or you would lose events
__event_loop = _EventLoop()
def start_event_loop():
__event_loop.start()
def stop_event_loop(wait=True):
__event_loop.stop(wait)
__connections = {}
__connectionLock = threading.Lock()
def open_connection(uri=None, username=None, passwd=None):
""" by calling this method you are getting a new and unwrapped connection
if you want to use wrapped and cached connection use the get() method
"""
def req(credentials, user_data):
for cred in credentials:
if cred[0] == libvirt.VIR_CRED_AUTHNAME:
cred[4] = username
elif cred[0] == libvirt.VIR_CRED_PASSPHRASE:
cred[4] = passwd.value if passwd else None
return 0
auth = [[libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_PASSPHRASE],
req, None]
libvirtOpen = functools.partial(
libvirt.openAuth, uri, auth, 0)
return function.retry(libvirtOpen, timeout=10, sleep=0.2)
def _clear():
"""
For clearing connections during the tests.
"""
with __connectionLock:
__connections.clear()
def get(target=None, killOnFailure=True):
"""Return current connection to libvirt or open a new one.
Use target to get/create the connection object linked to that object.
target must have a callable attribute named 'dispatchLibvirtEvents' which
will be registered as a callback on libvirt events.
Wrap methods of connection object so that they catch disconnection, and
take the current process down.
"""
def wrapMethod(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
try:
ret = f(*args, **kwargs)
if isinstance(ret, libvirt.virDomain):
for name in dir(ret):
method = getattr(ret, name)
if callable(method) and name[0] != '_':
setattr(ret, name,
wrapMethod(function.weakmethod(method)))
return ret
except libvirt.libvirtError as e:
edom = e.get_error_domain()
ecode = e.get_error_code()
EDOMAINS = (libvirt.VIR_FROM_REMOTE,
libvirt.VIR_FROM_RPC)
ECODES = (libvirt.VIR_ERR_SYSTEM_ERROR,
libvirt.VIR_ERR_INTERNAL_ERROR,
libvirt.VIR_ERR_NO_CONNECT,
libvirt.VIR_ERR_INVALID_CONN)
if edom in EDOMAINS and ecode in ECODES:
try:
__connections.get(id(target)).pingLibvirt()
except libvirt.libvirtError as e:
edom = e.get_error_domain()
ecode = e.get_error_code()
if edom in EDOMAINS and ecode in ECODES:
log.warning('connection to libvirt broken.'
' ecode: %d edom: %d', ecode, edom)
if killOnFailure:
log.critical('taking calling process down.')
os.kill(os.getpid(), signal.SIGTERM)
else:
raise
raise
return wrapper
with __connectionLock:
conn = __connections.get(id(target))
if not conn:
log.debug('trying to connect libvirt')
password = ProtectedPassword(libvirt_password())
conn = open_connection('qemu:///system', SASL_USERNAME, password)
__connections[id(target)] = conn
setattr(conn, 'pingLibvirt', getattr(conn, 'getLibVersion'))
for name in dir(libvirt.virConnect):
method = getattr(conn, name)
if callable(method) and name[0] != '_':
setattr(conn, name,
wrapMethod(function.weakmethod(method)))
if target is not None:
for ev in (libvirt.VIR_DOMAIN_EVENT_ID_LIFECYCLE,
libvirt.VIR_DOMAIN_EVENT_ID_REBOOT,
libvirt.VIR_DOMAIN_EVENT_ID_RTC_CHANGE,
libvirt.VIR_DOMAIN_EVENT_ID_IO_ERROR_REASON,
libvirt.VIR_DOMAIN_EVENT_ID_GRAPHICS,
# Report stable drive name (e.g. vda) in block job
# events instead of the drive path which may change
# after active commit or block copy. See
# virConnectDomainEventBlockJobCallback in libvirt
# docs.
libvirt.VIR_DOMAIN_EVENT_ID_BLOCK_JOB_2,
libvirt.VIR_DOMAIN_EVENT_ID_WATCHDOG,
libvirt.VIR_DOMAIN_EVENT_ID_JOB_COMPLETED,
libvirt.VIR_DOMAIN_EVENT_ID_DEVICE_REMOVED,
libvirt.VIR_DOMAIN_EVENT_ID_BLOCK_THRESHOLD,
libvirt.VIR_DOMAIN_EVENT_ID_AGENT_LIFECYCLE):
conn.domainEventRegisterAny(None,
ev,
target.dispatchLibvirtEvents,
ev)
# In case we're running into troubles with keeping the connections
# alive we should place here:
# conn.setKeepAlive(interval=5, count=3)
# However the values need to be considered wisely to not affect
# hosts which are hosting a lot of virtual machines
return conn
@cache.memoized
def libvirt_password():
with io.open(LIBVIRT_PASSWORD_PATH, encoding='utf8') as passwd_file:
return passwd_file.readline().rstrip("\n")
def __close_connections():
for conn in __connections.values():
conn.close()
atexit.register(__close_connections)
| gpl-2.0 |
littlstar/chromium.src | build/vs_toolchain.py | 7 | 7579 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import pipes
import shutil
import subprocess
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(1, os.path.join(chrome_src, 'tools'))
sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
json_data_file = os.path.join(script_dir, 'win_toolchain.json')
import gyp
def SetEnvironmentAndGetRuntimeDllDirs():
"""Sets up os.environ to use the depot_tools VS toolchain with gyp, and
returns the location of the VS runtime DLLs so they can be copied into
the output directory after gyp generation.
"""
vs2013_runtime_dll_dirs = None
depot_tools_win_toolchain = \
bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
if not os.path.exists(json_data_file):
Update()
with open(json_data_file, 'r') as tempf:
toolchain_data = json.load(tempf)
toolchain = toolchain_data['path']
version = toolchain_data['version']
version_is_pro = version[-1] != 'e'
win8sdk = toolchain_data['win8sdk']
wdk = toolchain_data['wdk']
# TODO(scottmg): The order unfortunately matters in these. They should be
# split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call
# below). http://crbug.com/345992
vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs']
os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
os.environ['GYP_MSVS_VERSION'] = version
# We need to make sure windows_sdk_path is set to the automated
# toolchain values in GYP_DEFINES, but don't want to override any
# otheroptions.express
# values there.
gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
gyp_defines_dict['windows_sdk_path'] = win8sdk
os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
for k, v in gyp_defines_dict.iteritems())
os.environ['WINDOWSSDKDIR'] = win8sdk
os.environ['WDK_DIR'] = wdk
# Include the VS runtime in the PATH in case it's not machine-installed.
runtime_path = ';'.join(vs2013_runtime_dll_dirs)
os.environ['PATH'] = runtime_path + ';' + os.environ['PATH']
return vs2013_runtime_dll_dirs
def CopyVsRuntimeDlls(output_dir, runtime_dirs):
"""Copies the VS runtime DLLs from the given |runtime_dirs| to the output
directory so that even if not system-installed, built binaries are likely to
be able to run.
This needs to be run after gyp has been run so that the expected target
output directories are already created.
"""
assert sys.platform.startswith(('win32', 'cygwin'))
def copy_runtime_impl(target, source):
"""Copy |source| to |target| if it doesn't already exist or if it need to be
updated.
"""
if (os.path.isdir(os.path.dirname(target)) and
(not os.path.isfile(target) or
os.stat(target).st_mtime != os.stat(source).st_mtime)):
print 'Copying %s to %s...' % (source, target)
if os.path.exists(target):
os.unlink(target)
shutil.copy2(source, target)
def copy_runtime(target_dir, source_dir, dll_pattern):
"""Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
exist, but the target directory does exist."""
for which in ('p', 'r'):
dll = dll_pattern % which
target = os.path.join(target_dir, dll)
source = os.path.join(source_dir, dll)
copy_runtime_impl(target, source)
x86, x64 = runtime_dirs
out_debug = os.path.join(output_dir, 'Debug')
out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64')
out_release = os.path.join(output_dir, 'Release')
out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64')
out_debug_x64 = os.path.join(output_dir, 'Debug_x64')
out_release_x64 = os.path.join(output_dir, 'Release_x64')
if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64):
os.makedirs(out_debug_nacl64)
if os.path.exists(out_release) and not os.path.exists(out_release_nacl64):
os.makedirs(out_release_nacl64)
copy_runtime(out_debug, x86, 'msvc%s120d.dll')
copy_runtime(out_release, x86, 'msvc%s120.dll')
copy_runtime(out_debug_x64, x64, 'msvc%s120d.dll')
copy_runtime(out_release_x64, x64, 'msvc%s120.dll')
copy_runtime(out_debug_nacl64, x64, 'msvc%s120d.dll')
copy_runtime(out_release_nacl64, x64, 'msvc%s120.dll')
# Copy the PGO runtime library to the release directories.
if os.environ.get('GYP_MSVS_OVERRIDE_PATH'):
pgo_x86_runtime_dir = os.path.join(os.environ.get('GYP_MSVS_OVERRIDE_PATH'),
'VC', 'bin')
pgo_x64_runtime_dir = os.path.join(pgo_x86_runtime_dir, 'amd64')
pgo_runtime_dll = 'pgort120.dll'
source_x86 = os.path.join(pgo_x86_runtime_dir, pgo_runtime_dll)
if os.path.exists(source_x86):
copy_runtime_impl(os.path.join(out_release, pgo_runtime_dll), source_x86)
source_x64 = os.path.join(pgo_x64_runtime_dir, pgo_runtime_dll)
if os.path.exists(source_x64):
copy_runtime_impl(os.path.join(out_release_x64, pgo_runtime_dll),
source_x64)
def _GetDesiredVsToolchainHashes():
"""Load a list of SHA1s corresponding to the toolchains that we want installed
to build with."""
sha1path = os.path.join(script_dir,
'..', 'buildtools', 'toolchain_vs2013.hash')
with open(sha1path, 'rb') as f:
return f.read().strip().splitlines()
def Update():
"""Requests an update of the toolchain to the specific hashes we have at
this revision. The update outputs a .json of the various configuration
information required to pass to gyp which we use in |GetToolchainDir()|.
"""
depot_tools_win_toolchain = \
bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
import find_depot_tools
depot_tools_path = find_depot_tools.add_depot_tools_to_path()
json_data_file = os.path.join(script_dir, 'win_toolchain.json')
get_toolchain_args = [
sys.executable,
os.path.join(depot_tools_path,
'win_toolchain',
'get_toolchain_if_necessary.py'),
'--output-json', json_data_file,
] + _GetDesiredVsToolchainHashes()
subprocess.check_call(get_toolchain_args)
return 0
def GetToolchainDir():
"""Gets location information about the current toolchain (must have been
previously updated by 'update'). This is used for the GN build."""
SetEnvironmentAndGetRuntimeDllDirs()
print '''vs_path = "%s"
sdk_path = "%s"
vs_version = "%s"
wdk_dir = "%s"
''' % (
os.environ['GYP_MSVS_OVERRIDE_PATH'],
os.environ['WINDOWSSDKDIR'],
os.environ['GYP_MSVS_VERSION'],
os.environ['WDK_DIR'])
def main():
if not sys.platform.startswith(('win32', 'cygwin')):
return 0
commands = {
'update': Update,
'get_toolchain_dir': GetToolchainDir,
# TODO(scottmg): Add copy_dlls for GN builds (gyp_chromium calls
# CopyVsRuntimeDlls via import, currently).
}
if len(sys.argv) < 2 or sys.argv[1] not in commands:
print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands)
return 1
return commands[sys.argv[1]]()
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
sujitbehera27/MyRoboticsProjects-Arduino | src/resource/Python/examples/ChessGame.mehtaatur.py | 4 | 2559 | from time import sleep
from org.myrobotlab.service import Speech
from org.myrobotlab.service import Runtime
#Starting the required Services
serial = Runtime.createAndStart("serial","Serial")
chessgame = Runtime.createAndStart("chessgame", "ChessGame")
log = Runtime.createAndStart("log", "Log")
speech = Runtime.create("speech","Speech")
#Configureing Speech Service
speech.startService()
speech.setLanguage("en")
speech.speak("Game Begins!")
count = 0
chessMove = ""
#Connecting Arduino via Serial
if not serial.isConnected():
serial.connect("COM9")
# Adding Listeners
serial.addListener("publishByte", python.name, "input")
chessgame.addListener("computerMoved", python.name, "voice")
chessgame.addListener("computerMoved", log.name, "log")
chessgame.addListener("makeMove", serial.name, "write")
#Function taking I/P from Serial and sending the data to Chess Game
def input():
global chessMove
global count
code = msg_serial_publishByte.data[0]
if (code !=10 and code != 13 and count < 4):
chessMove += chr(code)
count += 1
elif (code == 10 and count == 4):
chessgame.move(chessMove)
part1 = chessMove[0:2]
part2 = chessMove[2:4]
feedback = "You Played " + part1 + " to " + part2
speech.speak(feedback)
print feedback
count = 0
chessMove = ""
# Function "voice" which decodes the move played by the computer and gives a voice feedback
def voice():
incoming = msg_chessgame_computerMoved.data[0]
x = y = z = m = False
x = incoming.startswith("B")
y = incoming.startswith("N")
z = incoming.startswith("Q")
m = incoming.startswith("R")
if ( x == True):
part1 = incoming[1:3]
part2 = incoming[4:6]
feedback = "Computer played Bishop from " + part1 + " to " + part2
speech.speak(feedback)
print feedback
if ( y == True):
part1 = incoming[1:3]
part2 = incoming[4:6]
feedback = "Computer played Knight from " + part1 + " to " + part2
speech.speak(feedback)
print feedback
if ( z == True):
part1 = incoming[1:3]
part2 = incoming[4:6]
feedback = "Computer played Queen from " + part1 + " to " + part2
speech.speak(feedback)
print feedback
if ( m == True):
part1 = incoming[1:3]
part2 = incoming[4:6]
feedback = "Computer played Rook from " + part1 + " to " + part2
speech.speak(feedback)
print feedback
if ( (m == False) and (y == False) and (x == False) and (z == False) ):
part1 = incoming[0:2]
part2 = incoming[3:5]
feedback = "Computer played Pawn from " + part1 + " to " + part2
speech.speak(feedback)
print feedback
| apache-2.0 |
BlindHunter/django | django/core/checks/messages.py | 103 | 2396 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.encoding import force_str, python_2_unicode_compatible
# Levels
DEBUG = 10
INFO = 20
WARNING = 30
ERROR = 40
CRITICAL = 50
@python_2_unicode_compatible
class CheckMessage(object):
def __init__(self, level, msg, hint=None, obj=None, id=None):
assert isinstance(level, int), "The first argument should be level."
self.level = level
self.msg = msg
self.hint = hint
self.obj = obj
self.id = id
def __eq__(self, other):
return all(getattr(self, attr) == getattr(other, attr)
for attr in ['level', 'msg', 'hint', 'obj', 'id'])
def __ne__(self, other):
return not (self == other)
def __str__(self):
from django.db import models
if self.obj is None:
obj = "?"
elif isinstance(self.obj, models.base.ModelBase):
# We need to hardcode ModelBase and Field cases because its __str__
# method doesn't return "applabel.modellabel" and cannot be changed.
obj = self.obj._meta.label
else:
obj = force_str(self.obj)
id = "(%s) " % self.id if self.id else ""
hint = "\n\tHINT: %s" % self.hint if self.hint else ''
return "%s: %s%s%s" % (obj, id, self.msg, hint)
def __repr__(self):
return "<%s: level=%r, msg=%r, hint=%r, obj=%r, id=%r>" % \
(self.__class__.__name__, self.level, self.msg, self.hint, self.obj, self.id)
def is_serious(self, level=ERROR):
return self.level >= level
def is_silenced(self):
from django.conf import settings
return self.id in settings.SILENCED_SYSTEM_CHECKS
class Debug(CheckMessage):
def __init__(self, *args, **kwargs):
super(Debug, self).__init__(DEBUG, *args, **kwargs)
class Info(CheckMessage):
def __init__(self, *args, **kwargs):
super(Info, self).__init__(INFO, *args, **kwargs)
class Warning(CheckMessage):
def __init__(self, *args, **kwargs):
super(Warning, self).__init__(WARNING, *args, **kwargs)
class Error(CheckMessage):
def __init__(self, *args, **kwargs):
super(Error, self).__init__(ERROR, *args, **kwargs)
class Critical(CheckMessage):
def __init__(self, *args, **kwargs):
super(Critical, self).__init__(CRITICAL, *args, **kwargs)
| bsd-3-clause |
endlessm/chromium-browser | third_party/catapult/third_party/google-endpoints/future/builtins/newnext.py | 70 | 2014 | '''
This module provides a newnext() function in Python 2 that mimics the
behaviour of ``next()`` in Python 3, falling back to Python 2's behaviour for
compatibility if this fails.
``newnext(iterator)`` calls the iterator's ``__next__()`` method if it exists. If this
doesn't exist, it falls back to calling a ``next()`` method.
For example:
>>> class Odds(object):
... def __init__(self, start=1):
... self.value = start - 2
... def __next__(self): # note the Py3 interface
... self.value += 2
... return self.value
... def __iter__(self):
... return self
...
>>> iterator = Odds()
>>> next(iterator)
1
>>> next(iterator)
3
If you are defining your own custom iterator class as above, it is preferable
to explicitly decorate the class with the @implements_iterator decorator from
``future.utils`` as follows:
>>> @implements_iterator
... class Odds(object):
... # etc
... pass
This next() function is primarily for consuming iterators defined in Python 3
code elsewhere that we would like to run on Python 2 or 3.
'''
_builtin_next = next
_SENTINEL = object()
def newnext(iterator, default=_SENTINEL):
"""
next(iterator[, default])
Return the next item from the iterator. If default is given and the iterator
is exhausted, it is returned instead of raising StopIteration.
"""
# args = []
# if default is not _SENTINEL:
# args.append(default)
try:
try:
return iterator.__next__()
except AttributeError:
try:
return iterator.next()
except AttributeError:
raise TypeError("'{0}' object is not an iterator".format(
iterator.__class__.__name__))
except StopIteration as e:
if default is _SENTINEL:
raise e
else:
return default
__all__ = ['newnext']
| bsd-3-clause |
insin/forum | forum/settings.py | 2 | 5741 | # Django settings for using the forum application as a standalone project.
import os
DIRNAME = os.path.dirname(__file__)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
INTERNAL_IPS = ('127.0.0.1',)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(DIRNAME, 'database.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Belfast'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '4z-(+=l(wkd)1aj+wn)(r%9684uj2589o&uu_w$ids#ww='
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
]
ROOT_URLCONF = 'forum.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(DIRNAME, 'templates'),
)
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.humanize',
'registration',
'forum',
]
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
if DEBUG:
try:
import debug_toolbar
MIDDLEWARE_CLASSES.append('debug_toolbar.middleware.DebugToolbarMiddleware')
INSTALLED_APPS.append('debug_toolbar')
except ImportError:
pass
# Auth settings
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = '/'
# Session settings
SESSION_ENGINE = 'forum.sessions.redis_session_backend'
# Registration settings
ACCOUNT_ACTIVATION_DAYS = 10
# Forum settings
FORUM_STANDALONE = True
FORUM_USE_REDIS = True
FORUM_USE_NODEJS = True
FORUM_POST_FORMATTER = 'forum.formatters.BBCodeFormatter'
| mit |
ininex/geofire-python | resource/lib/python2.7/site-packages/gcloud/streaming/test_exceptions.py | 8 | 2973 | import unittest2
class Test_HttpError(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.streaming.exceptions import HttpError
return HttpError
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
RESPONSE = {'status': '404'}
CONTENT = b'CONTENT'
URL = 'http://www.example.com'
exception = self._makeOne(RESPONSE, CONTENT, URL)
self.assertEqual(exception.response, RESPONSE)
self.assertEqual(exception.content, CONTENT)
self.assertEqual(exception.url, URL)
self.assertEqual(exception.status_code, 404)
self.assertEqual(
str(exception),
"HttpError accessing <http://www.example.com>: "
"response: <{'status': '404'}>, content <CONTENT>")
def test_from_response(self):
RESPONSE = {'status': '404'}
CONTENT = b'CONTENT'
URL = 'http://www.example.com'
class _Response(object):
info = RESPONSE
content = CONTENT
request_url = URL
klass = self._getTargetClass()
exception = klass.from_response(_Response())
self.assertTrue(isinstance(exception, klass))
self.assertEqual(exception.response, RESPONSE)
self.assertEqual(exception.content, CONTENT)
self.assertEqual(exception.url, URL)
class Test_RetryAfterError(unittest2.TestCase):
def _getTargetClass(self):
from gcloud.streaming.exceptions import RetryAfterError
return RetryAfterError
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
RESPONSE = {'status': '404'}
CONTENT = b'CONTENT'
URL = 'http://www.example.com'
RETRY_AFTER = 60
exception = self._makeOne(RESPONSE, CONTENT, URL, RETRY_AFTER)
self.assertEqual(exception.response, RESPONSE)
self.assertEqual(exception.content, CONTENT)
self.assertEqual(exception.url, URL)
self.assertEqual(exception.retry_after, RETRY_AFTER)
self.assertEqual(
str(exception),
"HttpError accessing <http://www.example.com>: "
"response: <{'status': '404'}>, content <CONTENT>")
def test_from_response(self):
RESPONSE = {'status': '404'}
CONTENT = b'CONTENT'
URL = 'http://www.example.com'
RETRY_AFTER = 60
class _Response(object):
info = RESPONSE
content = CONTENT
request_url = URL
retry_after = RETRY_AFTER
klass = self._getTargetClass()
exception = klass.from_response(_Response())
self.assertTrue(isinstance(exception, klass))
self.assertEqual(exception.response, RESPONSE)
self.assertEqual(exception.content, CONTENT)
self.assertEqual(exception.url, URL)
self.assertEqual(exception.retry_after, RETRY_AFTER)
| mit |
ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/template/smartif.py | 90 | 6245 | """
Parser and utilities for the smart 'if' tag
"""
# Using a simple top down parser, as described here:
# http://effbot.org/zone/simple-top-down-parsing.htm.
# 'led' = left denotation
# 'nud' = null denotation
# 'bp' = binding power (left = lbp, right = rbp)
class TokenBase(object):
"""
Base class for operators and literals, mainly for debugging and for throwing
syntax errors.
"""
id = None # node/token type name
value = None # used by literals
first = second = None # used by tree nodes
def nud(self, parser):
# Null denotation - called in prefix context
raise parser.error_class(
"Not expecting '%s' in this position in if tag." % self.id
)
def led(self, left, parser):
# Left denotation - called in infix context
raise parser.error_class(
"Not expecting '%s' as infix operator in if tag." % self.id
)
def display(self):
"""
Returns what to display in error messages for this node
"""
return self.id
def __repr__(self):
out = [str(x) for x in [self.id, self.first, self.second] if x is not None]
return "(" + " ".join(out) + ")"
def infix(bp, func):
"""
Creates an infix operator, given a binding power and a function that
evaluates the node
"""
class Operator(TokenBase):
lbp = bp
def led(self, left, parser):
self.first = left
self.second = parser.expression(bp)
return self
def eval(self, context):
try:
return func(context, self.first, self.second)
except Exception:
# Templates shouldn't throw exceptions when rendering. We are
# most likely to get exceptions for things like {% if foo in bar
# %} where 'bar' does not support 'in', so default to False
return False
return Operator
def prefix(bp, func):
"""
Creates a prefix operator, given a binding power and a function that
evaluates the node.
"""
class Operator(TokenBase):
lbp = bp
def nud(self, parser):
self.first = parser.expression(bp)
self.second = None
return self
def eval(self, context):
try:
return func(context, self.first)
except Exception:
return False
return Operator
# Operator precedence follows Python.
# NB - we can get slightly more accurate syntax error messages by not using the
# same object for '==' and '='.
# We defer variable evaluation to the lambda to ensure that terms are
# lazily evaluated using Python's boolean parsing logic.
OPERATORS = {
'or': infix(6, lambda context, x, y: x.eval(context) or y.eval(context)),
'and': infix(7, lambda context, x, y: x.eval(context) and y.eval(context)),
'not': prefix(8, lambda context, x: not x.eval(context)),
'in': infix(9, lambda context, x, y: x.eval(context) in y.eval(context)),
'not in': infix(9, lambda context, x, y: x.eval(context) not in y.eval(context)),
'=': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'==': infix(10, lambda context, x, y: x.eval(context) == y.eval(context)),
'!=': infix(10, lambda context, x, y: x.eval(context) != y.eval(context)),
'>': infix(10, lambda context, x, y: x.eval(context) > y.eval(context)),
'>=': infix(10, lambda context, x, y: x.eval(context) >= y.eval(context)),
'<': infix(10, lambda context, x, y: x.eval(context) < y.eval(context)),
'<=': infix(10, lambda context, x, y: x.eval(context) <= y.eval(context)),
}
# Assign 'id' to each:
for key, op in OPERATORS.items():
op.id = key
class Literal(TokenBase):
"""
A basic self-resolvable object similar to a Django template variable.
"""
# IfParser uses Literal in create_var, but TemplateIfParser overrides
# create_var so that a proper implementation that actually resolves
# variables, filters etc is used.
id = "literal"
lbp = 0
def __init__(self, value):
self.value = value
def display(self):
return repr(self.value)
def nud(self, parser):
return self
def eval(self, context):
return self.value
def __repr__(self):
return "(%s %r)" % (self.id, self.value)
class EndToken(TokenBase):
lbp = 0
def nud(self, parser):
raise parser.error_class("Unexpected end of expression in if tag.")
EndToken = EndToken()
class IfParser(object):
error_class = ValueError
def __init__(self, tokens):
# pre-pass necessary to turn 'not','in' into single token
l = len(tokens)
mapped_tokens = []
i = 0
while i < l:
token = tokens[i]
if token == "not" and i + 1 < l and tokens[i+1] == "in":
token = "not in"
i += 1 # skip 'in'
mapped_tokens.append(self.translate_token(token))
i += 1
self.tokens = mapped_tokens
self.pos = 0
self.current_token = self.next()
def translate_token(self, token):
try:
op = OPERATORS[token]
except (KeyError, TypeError):
return self.create_var(token)
else:
return op()
def next(self):
if self.pos >= len(self.tokens):
return EndToken
else:
retval = self.tokens[self.pos]
self.pos += 1
return retval
def parse(self):
retval = self.expression()
# Check that we have exhausted all the tokens
if self.current_token is not EndToken:
raise self.error_class("Unused '%s' at end of if expression." %
self.current_token.display())
return retval
def expression(self, rbp=0):
t = self.current_token
self.current_token = self.next()
left = t.nud(self)
while rbp < self.current_token.lbp:
t = self.current_token
self.current_token = self.next()
left = t.led(left, self)
return left
def create_var(self, value):
return Literal(value)
| bsd-3-clause |
ChandraAddala/blueflood | contrib/enum_metrics/enum_metrics/errant_enums.py | 3 | 7747 | import sys
import argparse
import json
import dbclient as db
import esclient as es
import config as cf
LOCALHOST = 'localhost'
def parse_arguments(args):
"""Parses the supplied arguments"""
parser = argparse.ArgumentParser(prog="errant_enums.py", description='Script to delete errant enums')
subparsers = parser.add_subparsers(help='commands')
# A list command to list all excess enums.
list_parser = subparsers.add_parser('list', help='List all excess enums')
# A delete command to delete a given excess enum.
delete_parser = subparsers.add_parser('delete', help='Delete errant enum')
delete_parser.add_argument('--dryrun',
action='store_true', help='Display errant enums related data for the given metric name.')
delete_parser.add_argument('-m', '--metricName',
required=True, help='metric name to be deleted')
delete_parser.add_argument('-t', '--tenantId',
required=True, help='tenantId corresponding to the metric name to be deleted')
for p in [list_parser, delete_parser]:
p.add_argument('-e', '--env', choices=cf.Config.get_environments(),
default=LOCALHOST, help='Environment we are pointing to')
args = parser.parse_args(args)
print args
return args
def list_excess_enums(db_client):
metrics_excess_enums = db_client.get_all_metrics_excess_enums()
print '\n**** Listing all excess enums ****\n'
print 'Column family: metrics_excess_enums'
for x in metrics_excess_enums: print x
def clear_excess_enums(args, db_client, es_client):
total_records_deleted = clear_from_db(db_client=db_client, metric_name=args.metricName,
tenant_id=args.tenantId, dryrun=args.dryrun)
is_excess_enum = True if total_records_deleted else False
clear_from_es(es_client=es_client, metric_name=args.metricName, tenant_id=args.tenantId,
dryrun=args.dryrun, is_excess_enum=is_excess_enum)
def clear_from_db(db_client, metric_name, tenant_id, dryrun):
"""
Returns total number of records being deleted from all tables
"""
print '\n***** Deleting from Cassandra *****\n'
excess_enum_related_dict = db_client.get_excess_enums_relevant_data(tenant_id=tenant_id, metric_name=metric_name)
print_excess_enums_relevant_data(excess_enum_related_dict, tenant_id=tenant_id, metric_name=metric_name)
if not dryrun:
delete_excess_enums_relevant_data(db_client, excess_enum_related_dict)
db_client.close()
return sum(len(x) for x in excess_enum_related_dict.itervalues())
def clear_from_es(es_client, metric_name, tenant_id, dryrun, is_excess_enum):
print '\n***** Deleting from Elastic Cluster *****\n'
metric_metadata = es_client.get_metric_metadata(metric_name=metric_name, tenant_id=tenant_id)
enums_data = es_client.get_enums_data(metric_name=metric_name, tenant_id=tenant_id)
if (metric_metadata['found'] or enums_data['found']) and not is_excess_enum:
print "**** WARNING: Records exists in ES but is not an excess enum as per cassandra. " \
"Data wont be deleted from ES. ****"
print_enum_related_data(metric_metadata, enums_data)
# Delete from ES only if it is an excess_enum as per cassandra
if (not dryrun) and is_excess_enum:
if metric_metadata['found']:
es_client.delete_metric_metadata(metric_name=metric_name, tenant_id=tenant_id)
else:
print 'Document NOT FOUND in index metric_metadata for id: [%s] routing: [%s]' % \
(metric_metadata['_id'], tenant_id)
if enums_data['found']:
es_client.delete_enums_data(metric_name=metric_name, tenant_id=tenant_id)
else:
print 'Document NOT FOUND in index enums for id: [%s] routing: [%s]' % \
(enums_data['_id'], tenant_id)
def delete_excess_enums_relevant_data(client, excess_enum_related_dict):
if excess_enum_related_dict:
key = excess_enum_related_dict['metrics_excess_enums'][0][0] # Grabbing the key from the first row
print 'Deleting metrics data related to excess enum key: [%s] \n' % key
client.delete_metrics_excess_enums(key)
client.delete_metrics_preaggregated_full(key)
client.delete_metrics_preaggregated_5m(key)
client.delete_metrics_preaggregated_20m(key)
client.delete_metrics_preaggregated_60m(key)
client.delete_metrics_preaggregated_240m(key)
client.delete_metrics_preaggregated_1440m(key)
print '\nDeleted successfully metrics data related to excess enum key: [%s] \n' % key
def print_excess_enums_relevant_data(excess_enum_related_dict, tenant_id, metric_name):
print '\nColumn family: metrics_excess_enums'
if not excess_enum_related_dict:
print 'Row NOT FOUND for tenant_id: [%s] metric_name: [%s] ' % (tenant_id, metric_name)
else:
for x in excess_enum_related_dict['metrics_excess_enums']: print x
key = excess_enum_related_dict['metrics_excess_enums'][0][0] # Grabbing the key from the first row
print '\nColumn family: metrics_preaggregated_full'
if not excess_enum_related_dict['metrics_preaggregated_full']:
print 'Row corresponding to excess_enums [%s] NOT FOUND' % key
for x in excess_enum_related_dict['metrics_preaggregated_full']: print x
print '\nColumn family: metrics_preaggregated_5m'
if not excess_enum_related_dict['metrics_preaggregated_5m']:
print 'Row corresponding to excess_enums [%s] NOT FOUND' % key
for x in excess_enum_related_dict['metrics_preaggregated_5m']: print x
print '\nColumn family: metrics_preaggregated_20m'
if not excess_enum_related_dict['metrics_preaggregated_20m']:
print 'Row corresponding to excess_enums [%s] NOT FOUND' % key
for x in excess_enum_related_dict['metrics_preaggregated_20m']: print x
print '\nColumn family: metrics_preaggregated_60m'
if not excess_enum_related_dict['metrics_preaggregated_60m']:
print 'Row corresponding to excess_enums [%s] NOT FOUND' % key
for x in excess_enum_related_dict['metrics_preaggregated_60m']: print x
print '\nColumn family: metrics_preaggregated_240m'
if not excess_enum_related_dict['metrics_preaggregated_240m']:
print 'Row corresponding to excess_enums [%s] NOT FOUND' % key
for x in excess_enum_related_dict['metrics_preaggregated_240m']: print x
print '\nColumn family: metrics_preaggregated_1440m'
if not excess_enum_related_dict['metrics_preaggregated_1440m']:
print 'Row corresponding to excess_enums [%s] NOT FOUND' % key
for x in excess_enum_related_dict['metrics_preaggregated_1440m']: print x
print '\n'
def print_enum_related_data(metric_meta_data, enums_data):
print '\nmetric_metadata:' if metric_meta_data['found'] else 'metric_metadata NOT FOUND: '
print json.dumps(metric_meta_data, indent=2)
print '\nenums:' if enums_data['found'] else 'enums NOT FOUND: '
print json.dumps(enums_data, indent=2)
def main():
args = parse_arguments(sys.argv[1:])
config = cf.Config(args.env.lower())
db_client = db.DBClient()
db_client.connect(config.get_cassandra_nodes())
es_client = es.ESClient(config.get_es_nodes())
# 'delete' command has the namespace: Namespace(dryrun=True, metricName='metric_name', tenantId='tenant_id')
if 'dryrun' in args:
clear_excess_enums(args, db_client, es_client)
else:
list_excess_enums(db_client)
if __name__ == "__main__":
main()
| apache-2.0 |
Garrett-R/scikit-learn | examples/linear_model/plot_polynomial_interpolation.py | 251 | 1895 | #!/usr/bin/env python
"""
========================
Polynomial interpolation
========================
This example demonstrates how to approximate a function with a polynomial of
degree n_degree by using ridge regression. Concretely, from n_samples 1d
points, it suffices to build the Vandermonde matrix, which is n_samples x
n_degree+1 and has the following form:
[[1, x_1, x_1 ** 2, x_1 ** 3, ...],
[1, x_2, x_2 ** 2, x_2 ** 3, ...],
...]
Intuitively, this matrix can be interpreted as a matrix of pseudo features (the
points raised to some power). The matrix is akin to (but different from) the
matrix induced by a polynomial kernel.
This example shows that you can do non-linear regression with a linear model,
using a pipeline to add non-linear features. Kernel methods extend this idea
and can induce very high (even infinite) dimensional feature spaces.
"""
print(__doc__)
# Author: Mathieu Blondel
# Jake Vanderplas
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import Ridge
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import make_pipeline
def f(x):
""" function to approximate by polynomial interpolation"""
return x * np.sin(x)
# generate points used to plot
x_plot = np.linspace(0, 10, 100)
# generate points and keep a subset of them
x = np.linspace(0, 10, 100)
rng = np.random.RandomState(0)
rng.shuffle(x)
x = np.sort(x[:20])
y = f(x)
# create matrix versions of these arrays
X = x[:, np.newaxis]
X_plot = x_plot[:, np.newaxis]
plt.plot(x_plot, f(x_plot), label="ground truth")
plt.scatter(x, y, label="training points")
for degree in [3, 4, 5]:
model = make_pipeline(PolynomialFeatures(degree), Ridge())
model.fit(X, y)
y_plot = model.predict(X_plot)
plt.plot(x_plot, y_plot, label="degree %d" % degree)
plt.legend(loc='lower left')
plt.show()
| bsd-3-clause |
charris/numpy | numpy/ma/tests/test_regression.py | 16 | 3079 | import numpy as np
from numpy.testing import (
assert_, assert_array_equal, assert_allclose, suppress_warnings
)
class TestRegression:
def test_masked_array_create(self):
# Ticket #17
x = np.ma.masked_array([0, 1, 2, 3, 0, 4, 5, 6],
mask=[0, 0, 0, 1, 1, 1, 0, 0])
assert_array_equal(np.ma.nonzero(x), [[1, 2, 6, 7]])
def test_masked_array(self):
# Ticket #61
np.ma.array(1, mask=[1])
def test_mem_masked_where(self):
# Ticket #62
from numpy.ma import masked_where, MaskType
a = np.zeros((1, 1))
b = np.zeros(a.shape, MaskType)
c = masked_where(b, a)
a-c
def test_masked_array_multiply(self):
# Ticket #254
a = np.ma.zeros((4, 1))
a[2, 0] = np.ma.masked
b = np.zeros((4, 2))
a*b
b*a
def test_masked_array_repeat(self):
# Ticket #271
np.ma.array([1], mask=False).repeat(10)
def test_masked_array_repr_unicode(self):
# Ticket #1256
repr(np.ma.array(u"Unicode"))
def test_atleast_2d(self):
# Ticket #1559
a = np.ma.masked_array([0.0, 1.2, 3.5], mask=[False, True, False])
b = np.atleast_2d(a)
assert_(a.mask.ndim == 1)
assert_(b.mask.ndim == 2)
def test_set_fill_value_unicode_py3(self):
# Ticket #2733
a = np.ma.masked_array(['a', 'b', 'c'], mask=[1, 0, 0])
a.fill_value = 'X'
assert_(a.fill_value == 'X')
def test_var_sets_maskedarray_scalar(self):
# Issue gh-2757
a = np.ma.array(np.arange(5), mask=True)
mout = np.ma.array(-1, dtype=float)
a.var(out=mout)
assert_(mout._data == 0)
def test_ddof_corrcoef(self):
# See gh-3336
x = np.ma.masked_equal([1, 2, 3, 4, 5], 4)
y = np.array([2, 2.5, 3.1, 3, 5])
# this test can be removed after deprecation.
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "bias and ddof have no effect")
r0 = np.ma.corrcoef(x, y, ddof=0)
r1 = np.ma.corrcoef(x, y, ddof=1)
# ddof should not have an effect (it gets cancelled out)
assert_allclose(r0.data, r1.data)
def test_mask_not_backmangled(self):
# See gh-10314. Test case taken from gh-3140.
a = np.ma.MaskedArray([1., 2.], mask=[False, False])
assert_(a.mask.shape == (2,))
b = np.tile(a, (2, 1))
# Check that the above no longer changes a.shape to (1, 2)
assert_(a.mask.shape == (2,))
assert_(b.shape == (2, 2))
assert_(b.mask.shape == (2, 2))
def test_empty_list_on_structured(self):
# See gh-12464. Indexing with empty list should give empty result.
ma = np.ma.MaskedArray([(1, 1.), (2, 2.), (3, 3.)], dtype='i4,f4')
assert_array_equal(ma[[]], ma[:0])
def test_masked_array_tobytes_fortran(self):
ma = np.ma.arange(4).reshape((2,2))
assert_array_equal(ma.tobytes(order='F'), ma.T.tobytes())
| bsd-3-clause |
FluidityStokes/fluidity | tests/mms_sediment/mms_sediment.py | 2 | 9341 |
# This file was *autogenerated* from the file mms_sediment.sage
from sage.all_cmdline import * # import sage library
_sage_const_2 = Integer(2); _sage_const_0 = Integer(0); _sage_const_2p39 = RealNumber('2.39'); _sage_const_0p65 = RealNumber('0.65'); _sage_const_0p66 = RealNumber('0.66'); _sage_const_10p0 = RealNumber('10.0'); _sage_const_0p02 = RealNumber('0.02'); _sage_const_0p01 = RealNumber('0.01'); _sage_const_0p07 = RealNumber('0.07'); _sage_const_1p0 = RealNumber('1.0'); _sage_const_1p1 = RealNumber('1.1'); _sage_const_1p3 = RealNumber('1.3'); _sage_const_1p4 = RealNumber('1.4'); _sage_const_1p7 = RealNumber('1.7'); _sage_const_0p1 = RealNumber('0.1'); _sage_const_0p0 = RealNumber('0.0'); _sage_const_0p3 = RealNumber('0.3'); _sage_const_0p2 = RealNumber('0.2'); _sage_const_0p5 = RealNumber('0.5'); _sage_const_0p4 = RealNumber('0.4'); _sage_const_0p6 = RealNumber('0.6'); _sage_const_0p9 = RealNumber('0.9'); _sage_const_3p0 = RealNumber('3.0'); _sage_const_2p3 = RealNumber('2.3'); _sage_const_2p1 = RealNumber('2.1'); _sage_const_2p0 = RealNumber('2.0'); _sage_const_5p0 = RealNumber('5.0'); _sage_const_0p75 = RealNumber('0.75'); _sage_const_0p288 = RealNumber('0.288'); _sage_const_0p33 = RealNumber('0.33'); _sage_const_1p625 = RealNumber('1.625'); _sage_const_1p3en7 = RealNumber('1.3e-7')
y = var('y')
def function(phi_0, phi_x, phi_y, phi_xy,
f_sin_x, f_cos_x, f_sin_y, f_cos_y, f_sin_xy, f_cos_xy,
alpha_x, alpha_y, alpha_xy):
f_0 = phi_0
f_x = phi_x*(f_sin_x*sin(alpha_x*x) + f_cos_x*cos(alpha_x*x))
f_y = phi_y*(f_sin_y*sin(alpha_y*y) + f_cos_y*cos(alpha_y*y))
f_xy = phi_xy*(f_sin_xy*sin(alpha_xy*x*y/pi) + f_cos_xy*cos(alpha_xy*x*y/pi))
f = f_0 + f_x + f_y + f_xy
return f
u = function(_sage_const_0p5 , _sage_const_1p0 , _sage_const_0p3 , _sage_const_0p0 ,
_sage_const_0p0 , _sage_const_1p0 , _sage_const_0p5 , _sage_const_0p5 , _sage_const_1p0 , _sage_const_0p0 ,
_sage_const_0p9 , _sage_const_0p75 , _sage_const_1p0 )
p = function(-_sage_const_1p0 , _sage_const_1p0 , _sage_const_1p0 , _sage_const_1p0 ,
_sage_const_1p0 , _sage_const_0p0 , _sage_const_0p0 , _sage_const_1p0 , _sage_const_1p0 , _sage_const_0p0 ,
_sage_const_1p0 , _sage_const_1p0 , _sage_const_1p0 )
s1 = function(_sage_const_0p1 , -_sage_const_0p01 , -_sage_const_0p02 , _sage_const_0p07 ,
_sage_const_1p0 , _sage_const_0p0 , _sage_const_0p0 , _sage_const_1p0 , _sage_const_1p0 , _sage_const_0p0 ,
_sage_const_1p7 , _sage_const_2p1 , _sage_const_1p3 )
s2 = function(_sage_const_0p1 , _sage_const_0p01 , -_sage_const_0p01 , -_sage_const_0p1 ,
_sage_const_1p0 , _sage_const_0p0 , _sage_const_0p0 , _sage_const_1p0 , _sage_const_1p0 , _sage_const_0p0 ,
_sage_const_1p4 , _sage_const_3p0 , _sage_const_0p6 )
s1_d = function(_sage_const_10p0 , _sage_const_0p2 , _sage_const_0p4 , -_sage_const_0p2 ,
_sage_const_1p0 , _sage_const_0p0 , _sage_const_0p0 , _sage_const_1p0 , _sage_const_1p0 , _sage_const_0p0 ,
_sage_const_1p7 , _sage_const_1p1 , _sage_const_0p3 )
s2_d = function(_sage_const_5p0 , -_sage_const_0p1 , -_sage_const_0p2 , _sage_const_0p1 ,
_sage_const_1p0 , _sage_const_0p0 , _sage_const_0p0 , _sage_const_1p0 , _sage_const_1p0 , _sage_const_0p0 ,
_sage_const_1p4 , _sage_const_1p3 , _sage_const_2p3 )
v = integral(-diff(u,x),y) # divergence free
nu = _sage_const_1p0 /(_sage_const_1p0 - (s1 + s2)/_sage_const_0p65 )**_sage_const_1p625 # concentration dependent viscosity
# fix nullspace in pressure by imposing zero average on bottom boundary:
p -= integral(p.subs({y:_sage_const_0 }), (x, _sage_const_0 , pi))/pi
s1_R = _sage_const_0p33
s2_R = _sage_const_0p66
rho = s1_R*s1 + s2_R*s2
tau_xx = _sage_const_2 *nu*diff(u,x)
tau_xy = nu*(diff(u,y) + diff(v,x))
tau_yy = _sage_const_2 *nu*diff(v,y)
tau_yx = nu*(diff(u,y) + diff(v,x))
s1_us = _sage_const_0p33 *(_sage_const_1p0 - (s1 + s2))**_sage_const_2p39 # hindered settling
s2_us = _sage_const_0p66 *(_sage_const_1p0 - (s1 + s2))**_sage_const_2p39 # hindered settling
Su = u*diff(u,x) + v*diff(u,y) - diff(tau_xx, x) - diff(tau_xy, y) - rho + diff(p,x)
Sv = u*diff(v,x) + v*diff(v,y) - diff(tau_yx, x) - diff(tau_yy, y) - rho + diff(p,y)
Ss1 = (u + s1_us)*diff(s1,x) + (v + s1_us)*diff(s1,y) - _sage_const_1p0 *(diff(s1, x, x) + diff(s1, y, y))
Ss2 = (u + s2_us)*diff(s2,x) + (v + s2_us)*diff(s2,y) - _sage_const_1p0 *(diff(s2, x, x) + diff(s2, y, y))
# EROSION RATE
s1_D = _sage_const_1p0
s2_D = _sage_const_2p0
R_p1 = ((s1_R*s1_D**_sage_const_3p0 )**_sage_const_0p5 )/nu
R_p2 = ((s2_R*s2_D**_sage_const_3p0 )**_sage_const_0p5 )/nu
mu = (s1_d*s1_D + s2_d*s2_D)/(s1_d + s2_d)
sigma = ((s1_d*(s1_D - mu)**_sage_const_2p0 + s2_d*(s2_D - mu)**_sage_const_2p0 )/(s1_d + s2_d))**_sage_const_0p5
lambda_m = _sage_const_1p0 - _sage_const_0p288 *sigma
tau_b = u**_sage_const_2p0 + v**_sage_const_2p0 + _sage_const_2 **_sage_const_0p5 *u*v # drag shear stress // |cD*|u|*u| // cD = 1.0
d_50 = s1_D
Z_1 = (lambda_m*(tau_b**_sage_const_0p5 )*(R_p1**_sage_const_0p6 )*(s1_D/d_50)**_sage_const_0p2 )/s1_us
Z_2 = (lambda_m*(tau_b**_sage_const_0p5 )*(R_p2**_sage_const_0p6 )*(s2_D/d_50)**_sage_const_0p2 )/s2_us
F_1 = s1_d / (s1_d + s2_d)
F_2 = s2_d / (s1_d + s2_d)
A = _sage_const_1p3en7
E_1 = s1_us*F_1*(A*Z_1**_sage_const_5p0 )/(_sage_const_1p0 + A*(Z_1**_sage_const_5p0 )/_sage_const_0p3 )
E_2 = s2_us*F_2*(A*Z_2**_sage_const_5p0 )/(_sage_const_1p0 + A*(Z_2**_sage_const_5p0 )/_sage_const_0p3 )
# DEPOSIT RATE
D_1_x = (u + s1_us)*s1
D_1_y = (v + s1_us)*s1
D_2_x = (u + s2_us)*s2
D_2_y = (v + s2_us)*s2
print('from math import sin, cos, tanh, pi, e, sqrt')
print('')
print('def u(X):')
print(' return', str(u).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def v(X):')
print(' return', str(v).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('' )
print('def p(X):')
print(' return', str(p).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('' )
print('def s1(X):')
print(' return', str(s1).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('' )
print('def s2(X):')
print(' return', str(s2).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def s1_d(X):')
print(' return', str(s1_d).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('' )
print('def s2_d(X):')
print(' return', str(s2_d).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def rho(X):')
print(' return', str(rho).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def nu(X):')
print(' return', str(nu).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def mu(X):')
print(' return', str(mu).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def sigma(X):')
print(' return', str(sigma).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def tau_b(X):')
print(' return', str(tau_b).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def E_1(X):')
print(' if X[1] < 1e-6:')
print(' return', str(E_1).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print(' else:')
print(' return 0.0')
print('')
print('def E_2(X):')
print(' if X[1] < 1e-6:')
print(' return', str(E_2).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print(' else:')
print(' return 0.0')
print('')
print('def D_1(X):')
print(' if X[1] < 1e-6:')
print(' return', str(-D_1_y).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print(' else:')
print(' return 0.0')
print('')
print('def D_2(X):')
print(' if X[1] < 1e-6:')
print(' return', str(-D_2_y).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print(' else:')
print(' return 0.0')
print('')
print('def forcing_u(X):')
print(' return', str(Su).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def forcing_v(X):')
print(' return', str(Sv).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def forcing_s1(X):')
print(' return', str(Ss1).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def forcing_s2(X):')
print(' return', str(Ss2).replace('^', '**').replace('000000000000', '').replace('x', 'X[0]').replace('y', 'X[1]'))
print('')
print('def velocity(X):')
print(' return [u(X), v(X)]')
print('')
print('def forcing_velocity(X):')
print(' return [forcing_u(X), forcing_v(X)]')
| lgpl-2.1 |
vmax-feihu/hue | desktop/core/ext-py/kazoo-2.0/kazoo/tests/test_retry.py | 50 | 2205 | import unittest
from nose.tools import eq_
class TestRetrySleeper(unittest.TestCase):
def _pass(self):
pass
def _fail(self, times=1):
from kazoo.retry import ForceRetryError
scope = dict(times=0)
def inner():
if scope['times'] >= times:
pass
else:
scope['times'] += 1
raise ForceRetryError('Failed!')
return inner
def _makeOne(self, *args, **kwargs):
from kazoo.retry import KazooRetry
return KazooRetry(*args, **kwargs)
def test_reset(self):
retry = self._makeOne(delay=0, max_tries=2)
retry(self._fail())
eq_(retry._attempts, 1)
retry.reset()
eq_(retry._attempts, 0)
def test_too_many_tries(self):
from kazoo.retry import RetryFailedError
retry = self._makeOne(delay=0)
self.assertRaises(RetryFailedError, retry, self._fail(times=999))
eq_(retry._attempts, 1)
def test_maximum_delay(self):
def sleep_func(_time):
pass
retry = self._makeOne(delay=10, max_tries=100, sleep_func=sleep_func)
retry(self._fail(times=10))
self.assertTrue(retry._cur_delay < 4000, retry._cur_delay)
# gevent's sleep function is picky about the type
eq_(type(retry._cur_delay), float)
def test_copy(self):
_sleep = lambda t: None
retry = self._makeOne(sleep_func=_sleep)
rcopy = retry.copy()
self.assertTrue(rcopy.sleep_func is _sleep)
class TestKazooRetry(unittest.TestCase):
def _makeOne(self, **kw):
from kazoo.retry import KazooRetry
return KazooRetry(**kw)
def test_connection_closed(self):
from kazoo.exceptions import ConnectionClosedError
retry = self._makeOne()
def testit():
raise ConnectionClosedError()
self.assertRaises(ConnectionClosedError, retry, testit)
def test_session_expired(self):
from kazoo.exceptions import SessionExpiredError
retry = self._makeOne(max_tries=1)
def testit():
raise SessionExpiredError()
self.assertRaises(Exception, retry, testit)
| apache-2.0 |
chadyred/odoo_addons | smile_module_record/models/models.py | 5 | 1547 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Smile (<http://www.smile.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models
magic_fields = ['create_uid', 'create_date', 'write_uid', 'write_date']
def get_fields_to_export(self):
fields_to_export = ['id']
for column, field in self._fields.iteritems():
if column in magic_fields:
continue
if field.type == 'one2many' \
or not field.store:
continue
if field.type in ('many2many', 'many2one'):
column += ':id'
fields_to_export.append(column)
return fields_to_export
models.Model.get_fields_to_export = get_fields_to_export
| agpl-3.0 |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/host/lib/python2.7/test/test_funcattrs.py | 117 | 12749 | from test import test_support
import types
import unittest
class FuncAttrsTest(unittest.TestCase):
def setUp(self):
class F:
def a(self):
pass
def b():
return 3
self.f = F
self.fi = F()
self.b = b
def cannot_set_attr(self, obj, name, value, exceptions):
# Helper method for other tests.
try:
setattr(obj, name, value)
except exceptions:
pass
else:
self.fail("shouldn't be able to set %s to %r" % (name, value))
try:
delattr(obj, name)
except exceptions:
pass
else:
self.fail("shouldn't be able to del %s" % name)
class FunctionPropertiesTest(FuncAttrsTest):
# Include the external setUp method that is common to all tests
def test_module(self):
self.assertEqual(self.b.__module__, __name__)
def test_dir_includes_correct_attrs(self):
self.b.known_attr = 7
self.assertIn('known_attr', dir(self.b),
"set attributes not in dir listing of method")
# Test on underlying function object of method
self.f.a.im_func.known_attr = 7
self.assertIn('known_attr', dir(self.f.a),
"set attribute on unbound method implementation in "
"class not in dir")
self.assertIn('known_attr', dir(self.fi.a),
"set attribute on unbound method implementations, "
"should show up in next dir")
def test_duplicate_function_equality(self):
# Body of `duplicate' is the exact same as self.b
def duplicate():
'my docstring'
return 3
self.assertNotEqual(self.b, duplicate)
def test_copying_func_code(self):
def test(): pass
self.assertEqual(test(), None)
test.func_code = self.b.func_code
self.assertEqual(test(), 3) # self.b always returns 3, arbitrarily
def test_func_globals(self):
self.assertIs(self.b.func_globals, globals())
self.cannot_set_attr(self.b, 'func_globals', 2, TypeError)
def test_func_closure(self):
a = 12
def f(): print a
c = f.func_closure
self.assertIsInstance(c, tuple)
self.assertEqual(len(c), 1)
# don't have a type object handy
self.assertEqual(c[0].__class__.__name__, "cell")
self.cannot_set_attr(f, "func_closure", c, TypeError)
def test_empty_cell(self):
def f(): print a
try:
f.func_closure[0].cell_contents
except ValueError:
pass
else:
self.fail("shouldn't be able to read an empty cell")
a = 12
def test_func_name(self):
self.assertEqual(self.b.__name__, 'b')
self.assertEqual(self.b.func_name, 'b')
self.b.__name__ = 'c'
self.assertEqual(self.b.__name__, 'c')
self.assertEqual(self.b.func_name, 'c')
self.b.func_name = 'd'
self.assertEqual(self.b.__name__, 'd')
self.assertEqual(self.b.func_name, 'd')
# __name__ and func_name must be a string
self.cannot_set_attr(self.b, '__name__', 7, TypeError)
self.cannot_set_attr(self.b, 'func_name', 7, TypeError)
# __name__ must be available when in restricted mode. Exec will raise
# AttributeError if __name__ is not available on f.
s = """def f(): pass\nf.__name__"""
exec s in {'__builtins__': {}}
# Test on methods, too
self.assertEqual(self.f.a.__name__, 'a')
self.assertEqual(self.fi.a.__name__, 'a')
self.cannot_set_attr(self.f.a, "__name__", 'a', AttributeError)
self.cannot_set_attr(self.fi.a, "__name__", 'a', AttributeError)
def test_func_code(self):
num_one, num_two = 7, 8
def a(): pass
def b(): return 12
def c(): return num_one
def d(): return num_two
def e(): return num_one, num_two
for func in [a, b, c, d, e]:
self.assertEqual(type(func.func_code), types.CodeType)
self.assertEqual(c(), 7)
self.assertEqual(d(), 8)
d.func_code = c.func_code
self.assertEqual(c.func_code, d.func_code)
self.assertEqual(c(), 7)
# self.assertEqual(d(), 7)
try:
b.func_code = c.func_code
except ValueError:
pass
else:
self.fail("func_code with different numbers of free vars should "
"not be possible")
try:
e.func_code = d.func_code
except ValueError:
pass
else:
self.fail("func_code with different numbers of free vars should "
"not be possible")
def test_blank_func_defaults(self):
self.assertEqual(self.b.func_defaults, None)
del self.b.func_defaults
self.assertEqual(self.b.func_defaults, None)
def test_func_default_args(self):
def first_func(a, b):
return a+b
def second_func(a=1, b=2):
return a+b
self.assertEqual(first_func.func_defaults, None)
self.assertEqual(second_func.func_defaults, (1, 2))
first_func.func_defaults = (1, 2)
self.assertEqual(first_func.func_defaults, (1, 2))
self.assertEqual(first_func(), 3)
self.assertEqual(first_func(3), 5)
self.assertEqual(first_func(3, 5), 8)
del second_func.func_defaults
self.assertEqual(second_func.func_defaults, None)
try:
second_func()
except TypeError:
pass
else:
self.fail("func_defaults does not update; deleting it does not "
"remove requirement")
class InstancemethodAttrTest(FuncAttrsTest):
def test_im_class(self):
self.assertEqual(self.f.a.im_class, self.f)
self.assertEqual(self.fi.a.im_class, self.f)
self.cannot_set_attr(self.f.a, "im_class", self.f, TypeError)
self.cannot_set_attr(self.fi.a, "im_class", self.f, TypeError)
def test_im_func(self):
self.f.b = self.b
self.assertEqual(self.f.b.im_func, self.b)
self.assertEqual(self.fi.b.im_func, self.b)
self.cannot_set_attr(self.f.b, "im_func", self.b, TypeError)
self.cannot_set_attr(self.fi.b, "im_func", self.b, TypeError)
def test_im_self(self):
self.assertEqual(self.f.a.im_self, None)
self.assertEqual(self.fi.a.im_self, self.fi)
self.cannot_set_attr(self.f.a, "im_self", None, TypeError)
self.cannot_set_attr(self.fi.a, "im_self", self.fi, TypeError)
def test_im_func_non_method(self):
# Behavior should be the same when a method is added via an attr
# assignment
self.f.id = types.MethodType(id, None, self.f)
self.assertEqual(self.fi.id(), id(self.fi))
self.assertNotEqual(self.fi.id(), id(self.f))
# Test usage
try:
self.f.id.unknown_attr
except AttributeError:
pass
else:
self.fail("using unknown attributes should raise AttributeError")
# Test assignment and deletion
self.cannot_set_attr(self.f.id, 'unknown_attr', 2, AttributeError)
self.cannot_set_attr(self.fi.id, 'unknown_attr', 2, AttributeError)
def test_implicit_method_properties(self):
self.f.a.im_func.known_attr = 7
self.assertEqual(self.f.a.known_attr, 7)
self.assertEqual(self.fi.a.known_attr, 7)
class ArbitraryFunctionAttrTest(FuncAttrsTest):
def test_set_attr(self):
# setting attributes only works on function objects
self.b.known_attr = 7
self.assertEqual(self.b.known_attr, 7)
for func in [self.f.a, self.fi.a]:
try:
func.known_attr = 7
except AttributeError:
pass
else:
self.fail("setting attributes on methods should raise error")
def test_delete_unknown_attr(self):
try:
del self.b.unknown_attr
except AttributeError:
pass
else:
self.fail("deleting unknown attribute should raise TypeError")
def test_setting_attrs_duplicates(self):
try:
self.f.a.klass = self.f
except AttributeError:
pass
else:
self.fail("setting arbitrary attribute in unbound function "
" should raise AttributeError")
self.f.a.im_func.klass = self.f
for method in [self.f.a, self.fi.a, self.fi.a.im_func]:
self.assertEqual(method.klass, self.f)
def test_unset_attr(self):
for func in [self.b, self.f.a, self.fi.a]:
try:
func.non_existent_attr
except AttributeError:
pass
else:
self.fail("using unknown attributes should raise "
"AttributeError")
class FunctionDictsTest(FuncAttrsTest):
def test_setting_dict_to_invalid(self):
self.cannot_set_attr(self.b, '__dict__', None, TypeError)
self.cannot_set_attr(self.b, 'func_dict', None, TypeError)
from UserDict import UserDict
d = UserDict({'known_attr': 7})
self.cannot_set_attr(self.f.a.im_func, '__dict__', d, TypeError)
self.cannot_set_attr(self.fi.a.im_func, '__dict__', d, TypeError)
def test_setting_dict_to_valid(self):
d = {'known_attr': 7}
self.b.__dict__ = d
# Setting dict is only possible on the underlying function objects
self.f.a.im_func.__dict__ = d
# Test assignment
self.assertIs(d, self.b.__dict__)
self.assertIs(d, self.b.func_dict)
# ... and on all the different ways of referencing the method's func
self.assertIs(d, self.f.a.im_func.__dict__)
self.assertIs(d, self.f.a.__dict__)
self.assertIs(d, self.fi.a.im_func.__dict__)
self.assertIs(d, self.fi.a.__dict__)
# Test value
self.assertEqual(self.b.known_attr, 7)
self.assertEqual(self.b.__dict__['known_attr'], 7)
self.assertEqual(self.b.func_dict['known_attr'], 7)
# ... and again, on all the different method's names
self.assertEqual(self.f.a.im_func.known_attr, 7)
self.assertEqual(self.f.a.known_attr, 7)
self.assertEqual(self.fi.a.im_func.known_attr, 7)
self.assertEqual(self.fi.a.known_attr, 7)
def test_delete_func_dict(self):
try:
del self.b.__dict__
except TypeError:
pass
else:
self.fail("deleting function dictionary should raise TypeError")
try:
del self.b.func_dict
except TypeError:
pass
else:
self.fail("deleting function dictionary should raise TypeError")
def test_unassigned_dict(self):
self.assertEqual(self.b.__dict__, {})
def test_func_as_dict_key(self):
value = "Some string"
d = {}
d[self.b] = value
self.assertEqual(d[self.b], value)
class FunctionDocstringTest(FuncAttrsTest):
def test_set_docstring_attr(self):
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
docstr = "A test method that does nothing"
self.b.__doc__ = self.f.a.im_func.__doc__ = docstr
self.assertEqual(self.b.__doc__, docstr)
self.assertEqual(self.b.func_doc, docstr)
self.assertEqual(self.f.a.__doc__, docstr)
self.assertEqual(self.fi.a.__doc__, docstr)
self.cannot_set_attr(self.f.a, "__doc__", docstr, AttributeError)
self.cannot_set_attr(self.fi.a, "__doc__", docstr, AttributeError)
def test_delete_docstring(self):
self.b.__doc__ = "The docstring"
del self.b.__doc__
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
self.b.func_doc = "The docstring"
del self.b.func_doc
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
class StaticMethodAttrsTest(unittest.TestCase):
def test_func_attribute(self):
def f():
pass
c = classmethod(f)
self.assertTrue(c.__func__ is f)
s = staticmethod(f)
self.assertTrue(s.__func__ is f)
def test_main():
test_support.run_unittest(FunctionPropertiesTest, InstancemethodAttrTest,
ArbitraryFunctionAttrTest, FunctionDictsTest,
FunctionDocstringTest,
StaticMethodAttrsTest)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
thaim/ansible | lib/ansible/modules/network/panos/_panos_restart.py | 41 | 2976 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage PaloAltoNetworks Firewall
# (c) 2016, techbizdev <techbizdev@paloaltonetworks.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: panos_restart
short_description: restart a device
description:
- Restart a device
author: "Luigi Mori (@jtschichold), Ivan Bojer (@ivanbojer)"
version_added: "2.3"
requirements:
- pan-python
deprecated:
alternative: Use U(https://galaxy.ansible.com/PaloAltoNetworks/paloaltonetworks) instead.
removed_in: "2.12"
why: Consolidating code base.
extends_documentation_fragment: panos
'''
EXAMPLES = '''
- panos_restart:
ip_address: "192.168.1.1"
username: "admin"
password: "admin"
'''
RETURN = '''
status:
description: success status
returned: success
type: str
sample: "okey dokey"
'''
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
import sys
import traceback
try:
import pan.xapi
HAS_LIB = True
except ImportError:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
def main():
argument_spec = dict(
ip_address=dict(),
password=dict(no_log=True),
username=dict(default='admin')
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_LIB:
module.fail_json(msg='pan-python required for this module')
ip_address = module.params["ip_address"]
if not ip_address:
module.fail_json(msg="ip_address should be specified")
password = module.params["password"]
if not password:
module.fail_json(msg="password is required")
username = module.params['username']
xapi = pan.xapi.PanXapi(
hostname=ip_address,
api_username=username,
api_password=password
)
try:
xapi.op(cmd="<request><restart><system></system></restart></request>")
except Exception as e:
if 'succeeded' in to_native(e):
module.exit_json(changed=True, msg=to_native(e))
else:
module.fail_json(msg=to_native(e), exception=traceback.format_exc())
module.exit_json(changed=True, msg="okey dokey")
if __name__ == '__main__':
main()
| mit |
byran/Arduino | arduino-core/src/processing/app/i18n/python/requests/packages/charade/compat.py | 190 | 1033 | ######################## BEGIN LICENSE BLOCK ########################
# Contributor(s):
# Ian Cordasco - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
def wrap_ord(a):
if isinstance(a, str):
return ord(a)
elif isinstance(a, int):
return a
| lgpl-2.1 |
mmaslowskicc/pytest-dbfixtures | src/pytest_dbfixtures/factories/mysql_client.py | 3 | 3381 | # Copyright (C) 2013 by Clearcode <http://clearcode.cc>
# and associates (see AUTHORS).
# This file is part of pytest-dbfixtures.
# pytest-dbfixtures is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# pytest-dbfixtures is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with pytest-dbfixtures. If not, see <http://www.gnu.org/licenses/>.
import pytest
from pytest_dbfixtures.utils import get_config, try_import, get_process_fixture
def mysql(process_fixture_name, user=None, passwd=None, db=None,
charset='utf8', collation='utf8_general_ci'):
"""
Factory. Create connection to mysql. If you want you can give a scope,
default is 'session'.
For charset and collation meaning,
see `Database Character Set and Collation
<https://dev.mysql.com/doc/refman/5.5/en/charset-database.html>`_
:param str process_fixture_name: process fixture name
:param str user: mysql server user
:param str passwd: mysql server's password
:param str db: database's name
:param str charset: MySQL characterset to use by default
for *tests* database
:param str collation: MySQL collation to use by default
for *tests* database
:returns: function ``mysql_fixture`` with suit scope
:rtype: func
"""
@pytest.fixture
def mysql_fixture(request):
"""
#. Get config.
#. Try to import MySQLdb package.
#. Connect to mysql server.
#. Create database.
#. Use proper database.
#. Drop database after tests.
:param FixtureRequest request: fixture request object
:rtype: MySQLdb.connections.Connection
:returns: connection to database
"""
proc_fixture = get_process_fixture(request, process_fixture_name)
config = get_config(request)
mysql_port = proc_fixture.port
mysql_host = proc_fixture.host
mysql_user = user or config.mysql.user
mysql_passwd = passwd or config.mysql.password
mysql_db = db or config.mysql.db
unixsocket = '/tmp/mysql.{port}.sock'.format(port=mysql_port)
MySQLdb, config = try_import(
'MySQLdb', request, pypi_package='mysqlclient'
)
mysql_conn = MySQLdb.connect(
host=mysql_host,
unix_socket=unixsocket,
user=mysql_user,
passwd=mysql_passwd,
)
mysql_conn.query(
'''CREATE DATABASE {name}
DEFAULT CHARACTER SET {charset}
DEFAULT COLLATE {collation}'''
.format(
name=mysql_db, charset=charset, collation=collation
)
)
mysql_conn.query('USE %s' % mysql_db)
def drop_database():
mysql_conn.query('DROP DATABASE IF EXISTS %s' % mysql_db)
mysql_conn.close()
request.addfinalizer(drop_database)
return mysql_conn
return mysql_fixture
| lgpl-3.0 |
FranMachio/Plugin.Video.Fran.machio | servers/turbobit.py | 44 | 1401 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para turbobit
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[turbobit.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# http://turbobit.net/scz8lxrrgllr.html
# http://www.turbobit.net/uzo3gcyfmt4b.html
# http://turbobit.net/eaz9ha3gop65/deadliest.catch.s08e09-killers.mp4.html
patronvideos = '(turbobit.net/[0-9a-z]+)'
logger.info("[turbobit.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[turbobit]"
url = "http://"+match+".html"
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'turbobit' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
| gpl-2.0 |
adhoc-dev/sale-workflow | sale_product_set/wizard/product_set_add.py | 28 | 1585 | # -*- coding: utf-8 -*-
from openerp import models, fields, api, _
import openerp.addons.decimal_precision as dp
class ProductSetAd(models.TransientModel):
_name = 'product.set.add'
_rec_name = 'product_set_id'
_descritpion = "Wizard model to add product set into a quotation"
product_set_id = fields.Many2one(
'product.set', _('Product set'), required=True)
quantity = fields.Float(
string=_('Quantity'),
digits=dp.get_precision('Product Unit of Measure'), required=True,
default=1)
@api.multi
def add_set(self):
""" Add product set, multiplied by quantity in sale order line """
so_id = self._context['active_id']
if not so_id:
return
so = self.env['sale.order'].browse(so_id)
max_sequence = 0
if so.order_line:
max_sequence = max([line.sequence for line in so.order_line])
sale_order_line = self.env['sale.order.line']
for set_line in self.product_set_id.set_line_ids:
sale_order_line.create(
self.prepare_sale_order_line_data(
so_id, self.product_set_id, set_line,
max_sequence=max_sequence))
def prepare_sale_order_line_data(self, sale_order_id, set, set_line,
max_sequence=0):
return {
'order_id': sale_order_id,
'product_id': set_line.product_id.id,
'product_uom_qty': set_line.quantity * self.quantity,
'sequence': max_sequence + set_line.sequence,
}
| agpl-3.0 |
juntatalor/qexx | stock/utils.py | 1 | 5886 | __author__ = 'Сергей'
from django.db import models
from django.db.models import Sum, Q, F, Case, When, Value
from django.db.models.functions import Coalesce
from cart.models import CartProducts
from products.models import Product, PricedProduct
from orders.models import OrderProducts
from stock.models import ComingProducts, StockRecord, StockOrderProducts
from stock.constants import FIFO, LIFO
def create_sr_main(order, method=FIFO):
# здесь Raw SQL гораздо эффективнее Django ORM (LEFT OUTER JOIN по двум полям)
# Запись списаний
query = ('SELECT * FROM "orders_orderproducts" '
'LEFT OUTER JOIN "stock_stockorderproducts" '
'ON "orders_orderproducts"."id" = "stock_stockorderproducts"."order_products_id" '
'AND "orders_orderproducts"."priced_product_id" = "stock_stockorderproducts"."priced_product_id" '
'WHERE "orders_orderproducts"."order_id" = %s '
'AND "orders_orderproducts"."amount" - COALESCE("stock_stockorderproducts"."distributed_amount", 0) != 0')
for order_product in OrderProducts.objects.raw(query, [order.id]):
amount = order_product.amount - (order_product.distributed_amount or 0)
if amount > 0:
create_stock_records(order_product, order_product.priced_product, amount, order_product.amount, method)
else:
return_products(order_product, order_product.priced_product, abs(amount), order_product.amount, method)
def create_sr_additional(order, method=FIFO):
# здесь Raw SQL гораздо эффективнее Django ORM (LEFT OUTER JOIN по двум полям)
# Запись списаний для дополнительных товаров
query = ('SELECT * FROM "orders_orderproducts" '
'INNER JOIN "products_pricedproduct" '
'ON "products_pricedproduct"."id" = "orders_orderproducts"."priced_product_id" '
'INNER JOIN "products_product" '
'ON "products_product"."id" = "products_pricedproduct"."product_id" '
'INNER JOIN "products_packageproduct" '
'ON "products_product"."id" = "products_packageproduct"."product_id" '
'LEFT OUTER JOIN "stock_stockorderproducts" '
'ON "orders_orderproducts"."id" = "stock_stockorderproducts"."order_products_id" '
'AND "products_packageproduct"."additional_product_id" = "stock_stockorderproducts"."priced_product_id" '
'WHERE "orders_orderproducts"."order_id" = %s '
'AND "orders_orderproducts"."amount" - COALESCE("stock_stockorderproducts"."distributed_amount", 0) != 0')
for order_product in OrderProducts.objects.raw(query, [order.id]):
amount = order_product.amount - (order_product.distributed_amount or 0)
if amount > 0:
create_stock_records(order_product, order_product.additional_product, amount, order_product.amount, method)
else:
return_products(order_product, order_product.additional_product, abs(amount), order_product.amount, method)
def create_stock_records(order_product, priced_product, distribute_amount, amount, method):
# Количество позиций, которые осталось раскидать по партиям
amount_left = distribute_amount
if method == FIFO:
ordering = 'coming__date_received'
else:
# LIFO - последний пришел, первый ушел
ordering = '-coming__date_received'
# Остатки по полученным партиям с данным продуктом
records = ComingProducts.objects. \
filter(priced_product=priced_product,
coming__received=True). \
order_by(ordering). \
annotate(used=Coalesce(Sum('stockrecord__amount'), 0)). \
filter(used__lte=F('amount'))
for record in records:
if amount_left == 0:
break
if amount_left > record.amount:
distributed_amount = record.amount
amount_left -= record.amount
else:
distributed_amount = amount_left
amount_left = 0
# Запись в базу
StockRecord.objects.create(priced_product=priced_product,
order_products=order_product,
coming_products=record,
amount=distributed_amount)
StockOrderProducts.objects.update_or_create(order_products=order_product,
priced_product=priced_product,
defaults={'distributed_amount': amount})
def return_products(order_product, priced_product, return_amount, amount, method):
amount_left = return_amount
if method == FIFO:
ordering = 'coming_products__coming__date_received'
else:
# LIFO - последний пришел, первый ушел
ordering = '-coming_products__coming__date_received'
# Остатки по полученным партиям с данным продуктом
records = StockRecord.objects. \
filter(order_products=order_product,
priced_product=priced_product). \
order_by(ordering)
for record in records:
if amount_left == 0:
break
if amount_left >= record.amount:
amount_left -= record.amount
record.delete()
else:
record.amount -= amount_left
record.save()
amount_left = 0
StockOrderProducts.objects.update_or_create(order_products=order_product,
priced_product=priced_product,
defaults={'distributed_amount': amount})
| mit |
jamison904/android_kernel_samsung_trlte | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
ankitjain87/django-allauth | allauth/socialaccount/south_migrations/0008_client_id.py | 78 | 6323 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
for app in orm.SocialApp.objects.all():
app.client_id = app.key
app.key = ''
app.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 12, 22, 12, 51, 18, 10544)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 12, 22, 12, 51, 18, 10426)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'socialaccount.socialaccount': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'socialaccount.socialapp': {
'Meta': {'object_name': 'SocialApp'},
'client_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'})
},
'socialaccount.socialtoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'token_secret': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['socialaccount']
| mit |
darbik/bitcoin | bitcoin/tests/atm.py | 1 | 2426 | # Test example of how the flow of the atm would work without a kivy frontend
# Has lots of print statements to help with testing and make things clearer while testing
from random import randint
btcPrice = 600.00
atmAmount = 7500.00
btcAmount = 8 # only need about 2/3 of atmamount in btc
buyFee = 1.05
sellFee = 1.05
go = 'y'
feesMade = 0 # max $750 per transaction
volume = 0
while go == 'y':
x = randint(5, 15)
y = randint(5, 15) # to simulate random price movements
btcPrice += x
btcPrice -= y
if atmAmount > 7500:
additionalBuyFee = (((atmAmount / 7500) - 1) * 0.35)
additionalSellFee = (((atmAmount / 7500) - 1) * 0.35)
buyBtcPrice = (buyFee + additionalBuyFee) * btcPrice
sellBtcPrice = (sellFee + additionalSellFee) * btcPrice * 0.905
elif atmAmount < 7500:
additionalBuyFee = (((atmAmount / 7500) - 1) * 0.35)
additionalSellFee = (((atmAmount / 7500) - 1) * 0.35)
buyBtcPrice = (buyFee + additionalBuyFee) * btcPrice
sellBtcPrice = (sellFee + additionalSellFee) * btcPrice * 0.905
else:
buyBtcPrice = buyFee * btcPrice
sellBtcPrice = sellFee * btcPrice * 0.905
print "\nREAL BTC PRICE : ", btcPrice # market price
print "\nBuy BTC price : ", buyBtcPrice # market price + fees
print "Sell BTC price : ", sellBtcPrice
print "ATM amount : ", atmAmount
print "BTC amount : ", btcAmount
print "Fees made in BTC : ", feesMade
print "Volume in fiat : ", volume
buyorsell = raw_input("\nBuy or sell?")
howmuch = int(raw_input("How much?"))
if howmuch > 500 or howmuch <= 0:
while True:
print "Max $750, please try again."
howmuch = int(raw_input("How much?"))
if howmuch <= 500:
break
if buyorsell == 'buy':
print "You just bought ", (howmuch / buyBtcPrice) * 0.95, " BTC"
atmAmount += howmuch
btcAmount -= (howmuch / buyBtcPrice) * 0.95
feesMade += howmuch / buyBtcPrice * 0.05
volume += howmuch
if buyorsell == 'sell':
print "You just sold", (howmuch / sellBtcPrice) * 0.95, " BTC"
atmAmount -= howmuch
btcAmount += (howmuch / sellBtcPrice) * 0.95
feesMade += howmuch / sellBtcPrice * 0.05
volume += howmuch
| mit |
imsparsh/python-for-android | python-build/python-libs/gdata/src/gdata/tlslite/__init__.py | 409 | 1129 | """
TLS Lite is a free python library that implements SSL v3, TLS v1, and
TLS v1.1. TLS Lite supports non-traditional authentication methods
such as SRP, shared keys, and cryptoIDs, in addition to X.509
certificates. TLS Lite is pure python, however it can access OpenSSL,
cryptlib, pycrypto, and GMPY for faster crypto operations. TLS Lite
integrates with httplib, xmlrpclib, poplib, imaplib, smtplib,
SocketServer, asyncore, and Twisted.
To use, do::
from tlslite.api import *
Then use the L{tlslite.TLSConnection.TLSConnection} class with a socket,
or use one of the integration classes in L{tlslite.integration}.
@version: 0.3.8
"""
__version__ = "0.3.8"
__all__ = ["api",
"BaseDB",
"Checker",
"constants",
"errors",
"FileObject",
"HandshakeSettings",
"mathtls",
"messages",
"Session",
"SessionCache",
"SharedKeyDB",
"TLSConnection",
"TLSRecordLayer",
"VerifierDB",
"X509",
"X509CertChain",
"integration",
"utils"]
| apache-2.0 |
Kast0rTr0y/ansible | lib/ansible/modules/network/ios/ios_command.py | 5 | 7247 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {
'status': ['preview'],
'supported_by': 'core',
'version': '1.0'
}
DOCUMENTATION = """
---
module: ios_command
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Run commands on remote devices running Cisco IOS
description:
- Sends arbitrary commands to an ios node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
- This module does not support running commands in configuration mode.
Please use M(ios_config) to configure IOS devices.
options:
commands:
description:
- List of commands to send to the remote ios device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retries has expired.
required: true
wait_for:
description:
- List of conditions to evaluate against the output of the
command. The task will wait for each condition to be true
before moving forward. If the conditional is not true
within the configured number of retries, the task fails.
See examples.
required: false
default: null
aliases: ['waitfor']
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the wait_for must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
required: false
default: all
choices: ['any', 'all']
version_added: "2.2"
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the
I(wait_for) conditions.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditions, the interval indicates how long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
tasks:
- name: run show version on remote devices
ios_command:
commands: show version
- name: run show version and check to see if output contains IOS
ios_command:
commands: show version
wait_for: result[0] contains IOS
- name: run multiple commands on remote nodes
ios_command:
commands:
- show version
- show interfaces
- name: run multiple commands and evaluate the output
ios_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains IOS
- result[1] contains Loopback0
"""
RETURN = """
stdout:
description: The set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: The list of conditionals that have failed
returned: failed
type: list
sample: ['...', '...']
"""
import time
from ansible.module_utils.ios import run_commands
from ansible.module_utils.ios import ios_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network_common import ComplexList
from ansible.module_utils.netcli import Conditional
from ansible.module_utils.six import string_types
def to_lines(stdout):
for item in stdout:
if isinstance(item, string_types):
item = str(item).split('\n')
yield item
def parse_commands(module, warnings):
command = ComplexList(dict(
command=dict(key=True),
prompt=dict(),
response=dict()
), module)
commands = command(module.params['commands'])
for index, item in enumerate(commands):
if module.check_mode and not item['command'].startswith('show'):
warnings.append(
'only show commands are supported when using check mode, not '
'executing `%s`' % item['command']
)
elif item['command'].startswith('conf'):
module.fail_json(
msg='ios_command does not support running config mode '
'commands. Please use ios_config instead'
)
return commands
def main():
"""main entry point for module execution
"""
argument_spec = dict(
# { command: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['all', 'any']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
argument_spec.update(ios_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
commands = parse_commands(module, warnings)
result['warnings'] = warnings
wait_for = module.params['wait_for'] or list()
conditionals = [Conditional(c) for c in wait_for]
retries = module.params['retries']
interval = module.params['interval']
match = module.params['match']
while retries > 0:
responses = run_commands(module, commands)
for item in list(conditionals):
if item(responses):
if match == 'any':
conditionals = list()
break
conditionals.remove(item)
if not conditionals:
break
time.sleep(interval)
retries -= 1
if conditionals:
failed_conditions = [item.raw for item in conditionals]
msg = 'One or more conditional statements have not be satisfied'
module.fail_json(msg=msg, failed_conditions=failed_conditions)
result = {
'changed': False,
'stdout': responses,
'stdout_lines': list(to_lines(responses))
}
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
nan86150/ImageFusion | lib/python2.7/site-packages/scipy/io/matlab/tests/test_mio_utils.py | 44 | 1792 | """ Testing
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal, \
run_module_suite, assert_
from scipy.io.matlab.mio_utils import cproduct, squeeze_element, \
chars_to_strings
def test_cproduct():
assert_(cproduct(()) == 1)
assert_(cproduct((1,)) == 1)
assert_(cproduct((1,3)) == 3)
assert_(cproduct([1,3]) == 3)
def test_squeeze_element():
a = np.zeros((1,3))
assert_array_equal(np.squeeze(a), squeeze_element(a))
# 0d output from squeeze gives scalar
sq_int = squeeze_element(np.zeros((1,1), dtype=np.float))
assert_(isinstance(sq_int, float))
# Unless it's a structured array
sq_sa = squeeze_element(np.zeros((1,1),dtype=[('f1', 'f')]))
assert_(isinstance(sq_sa, np.ndarray))
def test_chars_strings():
# chars as strings
strings = ['learn ', 'python', 'fast ', 'here ']
str_arr = np.array(strings, dtype='U6') # shape (4,)
chars = [list(s) for s in strings]
char_arr = np.array(chars, dtype='U1') # shape (4,6)
assert_array_equal(chars_to_strings(char_arr), str_arr)
ca2d = char_arr.reshape((2,2,6))
sa2d = str_arr.reshape((2,2))
assert_array_equal(chars_to_strings(ca2d), sa2d)
ca3d = char_arr.reshape((1,2,2,6))
sa3d = str_arr.reshape((1,2,2))
assert_array_equal(chars_to_strings(ca3d), sa3d)
# Fortran ordered arrays
char_arrf = np.array(chars, dtype='U1', order='F') # shape (4,6)
assert_array_equal(chars_to_strings(char_arrf), str_arr)
# empty array
arr = np.array([['']], dtype='U1')
out_arr = np.array([''], dtype='U1')
assert_array_equal(chars_to_strings(arr), out_arr)
if __name__ == "__main__":
run_module_suite()
| mit |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/test/test_hmac.py | 36 | 17382 | # coding: utf-8
import hmac
import hashlib
import unittest
import warnings
from test import test_support
class TestVectorsTestCase(unittest.TestCase):
def test_md5_vectors(self):
# Test the HMAC module against test vectors from the RFC.
def md5test(key, data, digest):
h = hmac.HMAC(key, data)
self.assertEqual(h.hexdigest().upper(), digest.upper())
md5test(chr(0x0b) * 16,
"Hi There",
"9294727A3638BB1C13F48EF8158BFC9D")
md5test("Jefe",
"what do ya want for nothing?",
"750c783e6ab0b503eaa86e310a5db738")
md5test(chr(0xAA)*16,
chr(0xDD)*50,
"56be34521d144c88dbb8c733f0e8b3f6")
md5test("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"697eaf0aca3a3aea3a75164746ffaa79")
md5test(chr(0x0C) * 16,
"Test With Truncation",
"56461ef2342edc00f9bab995690efd4c")
md5test(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd")
md5test(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"6f630fad67cda0ee1fb1f562db3aa53e")
def test_sha_vectors(self):
def shatest(key, data, digest):
h = hmac.HMAC(key, data, digestmod=hashlib.sha1)
self.assertEqual(h.hexdigest().upper(), digest.upper())
shatest(chr(0x0b) * 20,
"Hi There",
"b617318655057264e28bc0b6fb378c8ef146be00")
shatest("Jefe",
"what do ya want for nothing?",
"effcdf6ae5eb2fa2d27416d5f184df9c259a7c79")
shatest(chr(0xAA)*20,
chr(0xDD)*50,
"125d7342b9ac11cd91a39af48aa17b4f63f175d3")
shatest("".join([chr(i) for i in range(1, 26)]),
chr(0xCD) * 50,
"4c9007f4026250c6bc8414f9bf50c86c2d7235da")
shatest(chr(0x0C) * 20,
"Test With Truncation",
"4c1a03424b55e07fe7f27be1d58bb9324a9a5a04")
shatest(chr(0xAA) * 80,
"Test Using Larger Than Block-Size Key - Hash Key First",
"aa4ae5e15272d00e95705637ce8a3b55ed402112")
shatest(chr(0xAA) * 80,
("Test Using Larger Than Block-Size Key "
"and Larger Than One Block-Size Data"),
"e8e99d0f45237d786d6bbaa7965c7808bbff1a91")
def _rfc4231_test_cases(self, hashfunc):
def hmactest(key, data, hexdigests):
h = hmac.HMAC(key, data, digestmod=hashfunc)
self.assertEqual(h.hexdigest().lower(), hexdigests[hashfunc])
# 4.2. Test Case 1
hmactest(key = '\x0b'*20,
data = 'Hi There',
hexdigests = {
hashlib.sha224: '896fb1128abbdf196832107cd49df33f'
'47b4b1169912ba4f53684b22',
hashlib.sha256: 'b0344c61d8db38535ca8afceaf0bf12b'
'881dc200c9833da726e9376c2e32cff7',
hashlib.sha384: 'afd03944d84895626b0825f4ab46907f'
'15f9dadbe4101ec682aa034c7cebc59c'
'faea9ea9076ede7f4af152e8b2fa9cb6',
hashlib.sha512: '87aa7cdea5ef619d4ff0b4241a1d6cb0'
'2379f4e2ce4ec2787ad0b30545e17cde'
'daa833b7d6b8a702038b274eaea3f4e4'
'be9d914eeb61f1702e696c203a126854',
})
# 4.3. Test Case 2
hmactest(key = 'Jefe',
data = 'what do ya want for nothing?',
hexdigests = {
hashlib.sha224: 'a30e01098bc6dbbf45690f3a7e9e6d0f'
'8bbea2a39e6148008fd05e44',
hashlib.sha256: '5bdcc146bf60754e6a042426089575c7'
'5a003f089d2739839dec58b964ec3843',
hashlib.sha384: 'af45d2e376484031617f78d2b58a6b1b'
'9c7ef464f5a01b47e42ec3736322445e'
'8e2240ca5e69e2c78b3239ecfab21649',
hashlib.sha512: '164b7a7bfcf819e2e395fbe73b56e0a3'
'87bd64222e831fd610270cd7ea250554'
'9758bf75c05a994a6d034f65f8f0e6fd'
'caeab1a34d4a6b4b636e070a38bce737',
})
# 4.4. Test Case 3
hmactest(key = '\xaa'*20,
data = '\xdd'*50,
hexdigests = {
hashlib.sha224: '7fb3cb3588c6c1f6ffa9694d7d6ad264'
'9365b0c1f65d69d1ec8333ea',
hashlib.sha256: '773ea91e36800e46854db8ebd09181a7'
'2959098b3ef8c122d9635514ced565fe',
hashlib.sha384: '88062608d3e6ad8a0aa2ace014c8a86f'
'0aa635d947ac9febe83ef4e55966144b'
'2a5ab39dc13814b94e3ab6e101a34f27',
hashlib.sha512: 'fa73b0089d56a284efb0f0756c890be9'
'b1b5dbdd8ee81a3655f83e33b2279d39'
'bf3e848279a722c806b485a47e67c807'
'b946a337bee8942674278859e13292fb',
})
# 4.5. Test Case 4
hmactest(key = ''.join([chr(x) for x in xrange(0x01, 0x19+1)]),
data = '\xcd'*50,
hexdigests = {
hashlib.sha224: '6c11506874013cac6a2abc1bb382627c'
'ec6a90d86efc012de7afec5a',
hashlib.sha256: '82558a389a443c0ea4cc819899f2083a'
'85f0faa3e578f8077a2e3ff46729665b',
hashlib.sha384: '3e8a69b7783c25851933ab6290af6ca7'
'7a9981480850009cc5577c6e1f573b4e'
'6801dd23c4a7d679ccf8a386c674cffb',
hashlib.sha512: 'b0ba465637458c6990e5a8c5f61d4af7'
'e576d97ff94b872de76f8050361ee3db'
'a91ca5c11aa25eb4d679275cc5788063'
'a5f19741120c4f2de2adebeb10a298dd',
})
# 4.7. Test Case 6
hmactest(key = '\xaa'*131,
data = 'Test Using Larger Than Block-Siz'
'e Key - Hash Key First',
hexdigests = {
hashlib.sha224: '95e9a0db962095adaebe9b2d6f0dbce2'
'd499f112f2d2b7273fa6870e',
hashlib.sha256: '60e431591ee0b67f0d8a26aacbf5b77f'
'8e0bc6213728c5140546040f0ee37f54',
hashlib.sha384: '4ece084485813e9088d2c63a041bc5b4'
'4f9ef1012a2b588f3cd11f05033ac4c6'
'0c2ef6ab4030fe8296248df163f44952',
hashlib.sha512: '80b24263c7c1a3ebb71493c1dd7be8b4'
'9b46d1f41b4aeec1121b013783f8f352'
'6b56d037e05f2598bd0fd2215d6a1e52'
'95e64f73f63f0aec8b915a985d786598',
})
# 4.8. Test Case 7
hmactest(key = '\xaa'*131,
data = 'This is a test using a larger th'
'an block-size key and a larger t'
'han block-size data. The key nee'
'ds to be hashed before being use'
'd by the HMAC algorithm.',
hexdigests = {
hashlib.sha224: '3a854166ac5d9f023f54d517d0b39dbd'
'946770db9c2b95c9f6f565d1',
hashlib.sha256: '9b09ffa71b942fcb27635fbcd5b0e944'
'bfdc63644f0713938a7f51535c3a35e2',
hashlib.sha384: '6617178e941f020d351e2f254e8fd32c'
'602420feb0b8fb9adccebb82461e99c5'
'a678cc31e799176d3860e6110c46523e',
hashlib.sha512: 'e37b6a775dc87dbaa4dfa9f96e5e3ffd'
'debd71f8867289865df5a32d20cdc944'
'b6022cac3c4982b10d5eeb55c3e4de15'
'134676fb6de0446065c97440fa8c6a58',
})
def test_sha224_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha224)
def test_sha256_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha256)
def test_sha384_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha384)
def test_sha512_rfc4231(self):
self._rfc4231_test_cases(hashlib.sha512)
def test_legacy_block_size_warnings(self):
class MockCrazyHash(object):
"""Ain't no block_size attribute here."""
def __init__(self, *args):
self._x = hashlib.sha1(*args)
self.digest_size = self._x.digest_size
def update(self, v):
self._x.update(v)
def digest(self):
return self._x.digest()
with warnings.catch_warnings():
warnings.simplefilter('error', RuntimeWarning)
with self.assertRaises(RuntimeWarning):
hmac.HMAC('a', 'b', digestmod=MockCrazyHash)
self.fail('Expected warning about missing block_size')
MockCrazyHash.block_size = 1
with self.assertRaises(RuntimeWarning):
hmac.HMAC('a', 'b', digestmod=MockCrazyHash)
self.fail('Expected warning about small block_size')
class ConstructorTestCase(unittest.TestCase):
def test_normal(self):
# Standard constructor call.
failed = 0
try:
h = hmac.HMAC("key")
except:
self.fail("Standard constructor call raised exception.")
def test_withtext(self):
# Constructor call with text.
try:
h = hmac.HMAC("key", "hash this!")
except:
self.fail("Constructor call with text argument raised exception.")
def test_withmodule(self):
# Constructor call with text and digest module.
try:
h = hmac.HMAC("key", "", hashlib.sha1)
except:
self.fail("Constructor call with hashlib.sha1 raised exception.")
class SanityTestCase(unittest.TestCase):
def test_default_is_md5(self):
# Testing if HMAC defaults to MD5 algorithm.
# NOTE: this whitebox test depends on the hmac class internals
h = hmac.HMAC("key")
self.assertTrue(h.digest_cons == hashlib.md5)
def test_exercise_all_methods(self):
# Exercising all methods once.
# This must not raise any exceptions
try:
h = hmac.HMAC("my secret key")
h.update("compute the hash of this text!")
dig = h.digest()
dig = h.hexdigest()
h2 = h.copy()
except:
self.fail("Exception raised during normal usage of HMAC class.")
class CopyTestCase(unittest.TestCase):
def test_attributes(self):
# Testing if attributes are of same type.
h1 = hmac.HMAC("key")
h2 = h1.copy()
self.assertTrue(h1.digest_cons == h2.digest_cons,
"digest constructors don't match.")
self.assertTrue(type(h1.inner) == type(h2.inner),
"Types of inner don't match.")
self.assertTrue(type(h1.outer) == type(h2.outer),
"Types of outer don't match.")
def test_realcopy(self):
# Testing if the copy method created a real copy.
h1 = hmac.HMAC("key")
h2 = h1.copy()
# Using id() in case somebody has overridden __cmp__.
self.assertTrue(id(h1) != id(h2), "No real copy of the HMAC instance.")
self.assertTrue(id(h1.inner) != id(h2.inner),
"No real copy of the attribute 'inner'.")
self.assertTrue(id(h1.outer) != id(h2.outer),
"No real copy of the attribute 'outer'.")
def test_equality(self):
# Testing if the copy has the same digests.
h1 = hmac.HMAC("key")
h1.update("some random text")
h2 = h1.copy()
self.assertTrue(h1.digest() == h2.digest(),
"Digest of copy doesn't match original digest.")
self.assertTrue(h1.hexdigest() == h2.hexdigest(),
"Hexdigest of copy doesn't match original hexdigest.")
class CompareDigestTestCase(unittest.TestCase):
def test_compare_digest(self):
# Testing input type exception handling
a, b = 100, 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = 100, b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = u"foobar", b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", u"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
# Testing bytes of different lengths
a, b = b"foobar", b"foo"
self.assertFalse(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xde\xad"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, different values
a, b = b"foobar", b"foobaz"
self.assertFalse(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xab\xad\x1d\xea"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, same values
a, b = b"foobar", b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = b"\xde\xad\xbe\xef", b"\xde\xad\xbe\xef"
self.assertTrue(hmac.compare_digest(a, b))
# Testing bytearrays of same lengths, same values
a, b = bytearray(b"foobar"), bytearray(b"foobar")
self.assertTrue(hmac.compare_digest(a, b))
# Testing bytearrays of diffeent lengths
a, b = bytearray(b"foobar"), bytearray(b"foo")
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytearrays of same lengths, different values
a, b = bytearray(b"foobar"), bytearray(b"foobaz")
self.assertFalse(hmac.compare_digest(a, b))
# Testing byte and bytearray of same lengths, same values
a, b = bytearray(b"foobar"), b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
self.assertTrue(hmac.compare_digest(b, a))
# Testing byte bytearray of diffeent lengths
a, b = bytearray(b"foobar"), b"foo"
self.assertFalse(hmac.compare_digest(a, b))
self.assertFalse(hmac.compare_digest(b, a))
# Testing byte and bytearray of same lengths, different values
a, b = bytearray(b"foobar"), b"foobaz"
self.assertFalse(hmac.compare_digest(a, b))
self.assertFalse(hmac.compare_digest(b, a))
# Testing str of same lengths
a, b = "foobar", "foobar"
self.assertTrue(hmac.compare_digest(a, b))
# Testing str of diffeent lengths
a, b = "foo", "foobar"
self.assertFalse(hmac.compare_digest(a, b))
# Testing bytes of same lengths, different values
a, b = "foobar", "foobaz"
self.assertFalse(hmac.compare_digest(a, b))
# Testing error cases
a, b = u"foobar", b"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", u"foobar"
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = b"foobar", 1
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = 100, 200
self.assertRaises(TypeError, hmac.compare_digest, a, b)
a, b = "fooä", "fooä"
self.assertTrue(hmac.compare_digest(a, b))
with test_support.check_py3k_warnings():
# subclasses are supported by ignore __eq__
class mystr(str):
def __eq__(self, other):
return False
a, b = mystr("foobar"), mystr("foobar")
self.assertTrue(hmac.compare_digest(a, b))
a, b = mystr("foobar"), "foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = mystr("foobar"), mystr("foobaz")
self.assertFalse(hmac.compare_digest(a, b))
with test_support.check_py3k_warnings():
class mybytes(bytes):
def __eq__(self, other):
return False
a, b = mybytes(b"foobar"), mybytes(b"foobar")
self.assertTrue(hmac.compare_digest(a, b))
a, b = mybytes(b"foobar"), b"foobar"
self.assertTrue(hmac.compare_digest(a, b))
a, b = mybytes(b"foobar"), mybytes(b"foobaz")
self.assertFalse(hmac.compare_digest(a, b))
def test_main():
test_support.run_unittest(
TestVectorsTestCase,
ConstructorTestCase,
SanityTestCase,
CopyTestCase,
CompareDigestTestCase,
)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
SaintGimp/BeagleBoneHardware | gimpbbio/gimpbbio/devices.py | 1 | 1756 | from . import gpio
import time
import datetime
# This wraps a pin and treats it as a mechanical switch that needs
# to be debounced. In order to do proper debouncing we need to watch
# for both rising and falling events, but those events might not
# accurately report the state of the pin. Instead we have to keep
# track of whether the switch is low or high ourselves. We also
# integrate over the changes to filter out bounce noise.
class Switch:
def __init__(self, pin):
self.pin = pin
self.ignore_queued_changes_duration = datetime.timedelta(milliseconds = 3)
def watch(self, on_high = None, on_low = None):
self._on_high = on_high
self._on_low = on_low
self._current_state = self.pin.is_high()
self.pin.watch(gpio.BOTH, self._on_change)
def _on_change(self, pin):
# TODO: we sometimes get a stray event immediately after finishing
# the debounce
new_state = self._debounced_state(3, 30)
if new_state != self._current_state:
self._current_state = new_state
if self._current_state and self._on_high:
self._on_high(self)
if not self._current_state and self._on_low:
self._on_low(self)
def _debounced_state(self, poll_interval_ms, debounce_time_ms):
maximum = debounce_time_ms / poll_interval_ms
integrator = maximum / 2.0
while True:
if self.pin.is_high():
integrator += 1
else:
integrator -= 1
if integrator <= 0:
return False
elif integrator >= maximum:
return True
time.sleep(poll_interval_ms / 1000)
| mit |
jmartinm/inspire-next | inspire/modules/harvester/manage.py | 2 | 2990 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014, 2015 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Manage harvester module."""
from __future__ import print_function
import sys
from invenio_base.globals import cfg
from invenio_workflows.registry import workflows
from invenio_ext.script import Manager
from .utils import validate_date
from .tasks import run_harvest
manager = Manager(usage=__doc__)
def get_harvesting_workflows():
"""Return the workflows enabled in the harvester module."""
enabled_workflows = []
for name in cfg.get("HARVESTER_WORKFLOWS", list()):
if workflows.get(name):
enabled_workflows.append(name)
return enabled_workflows
@manager.option('--workflow', '-w', dest='workflow')
@manager.option('--from', '-f', dest='from_date',
help='Get records from this date and on.')
@manager.option('--to', '-t', dest='to_date',
help='Get records until this date.')
@manager.option('--reharvest', '-R', dest='reharvest', action="store_true",
help='Indicate a full reharvest.')
def run(workflow, from_date, to_date, reharvest=False):
"""Run a harvesting workflow from the command line.
Usage: inveniomanage harvester run -w workflow_name -f 2014-01-01 -t 2014-12-31
"""
if not workflow:
print("Missing workflow!", file=sys.stderr)
print("Usage: inveniomanage harvester run -w workflow_name -f 2014-01-01 -t 2014-12-31")
list_workflows()
return
if workflow not in get_harvesting_workflows():
print("* Invalid workflow name",
file=sys.stderr)
list_workflows()
return
if from_date:
validate_date(from_date)
if to_date:
validate_date(to_date)
args = {
"workflow": workflow,
"from_date": from_date,
"to_date": to_date,
"reharvest": reharvest
}
job = run_harvest.delay(**args)
print("Scheduled job {0} with args: {1}".format(job.id, args))
@manager.command
def list_workflows():
"""List available workflows."""
print("Available workflows:")
for name in get_harvesting_workflows():
print(name)
| gpl-2.0 |
K-ran/Angduino-nodejs | node_modules/socket.io/node_modules/engine.io/node_modules/engine.io-parser/node_modules/utf8/tests/generate-test-data.py | 1788 | 1435 | #!/usr/bin/env python
import re
import json
# https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae
# http://stackoverflow.com/a/13436167/96656
def unisymbol(codePoint):
if codePoint >= 0x0000 and codePoint <= 0xFFFF:
return unichr(codePoint)
elif codePoint >= 0x010000 and codePoint <= 0x10FFFF:
highSurrogate = int((codePoint - 0x10000) / 0x400) + 0xD800
lowSurrogate = int((codePoint - 0x10000) % 0x400) + 0xDC00
return unichr(highSurrogate) + unichr(lowSurrogate)
else:
return 'Error'
def hexify(codePoint):
return 'U+' + hex(codePoint)[2:].upper().zfill(6)
def writeFile(filename, contents):
print filename
with open(filename, 'w') as f:
f.write(contents.strip() + '\n')
data = []
for codePoint in range(0x000000, 0x10FFFF + 1):
# Skip non-scalar values.
if codePoint >= 0xD800 and codePoint <= 0xDFFF:
continue
symbol = unisymbol(codePoint)
# http://stackoverflow.com/a/17199950/96656
bytes = symbol.encode('utf8').decode('latin1')
data.append({
'codePoint': codePoint,
'decoded': symbol,
'encoded': bytes
});
jsonData = json.dumps(data, sort_keys=False, indent=2, separators=(',', ': '))
# Use tabs instead of double spaces for indentation
jsonData = jsonData.replace(' ', '\t')
# Escape hexadecimal digits in escape sequences
jsonData = re.sub(
r'\\u([a-fA-F0-9]{4})',
lambda match: r'\u{}'.format(match.group(1).upper()),
jsonData
)
writeFile('data.json', jsonData)
| mit |
shankisg/wye | wye/regions/models.py | 3 | 1160 | from django.contrib.auth.models import User
from django.db import models
from wye.base.models import TimeAuditModel
class State(TimeAuditModel):
'''
'''
name = models.CharField(max_length=300, unique=True)
class Meta:
db_table = 'states'
def __str__(self):
return '{}'.format(self.name)
class Location(TimeAuditModel):
'''
'''
name = models.CharField(max_length=300, unique=True)
state = models.ForeignKey(State)
class Meta:
db_table = 'locations'
def __str__(self):
return '{}, {}'.format(self.name, self.state.name)
class RegionalLead(models.Model):
location = models.ForeignKey(Location)
leads = models.ManyToManyField(User)
class Meta:
db_table = 'regional_lead'
verbose_name = 'RegionalLead'
verbose_name_plural = 'RegionalLeads'
def __str__(self):
return '{}'.format(self.location)
@property
def get_count_of_leads(self):
return self.leads.count()
@classmethod
def is_regional_lead(cls, user, location):
return cls.objects.filter(
leads=user, location=location).exists()
| mit |
joyider/NoJoy_DI | test/svc_test.py | 1 | 2446 | #!/usr/bin/python3.5
# -*- coding: utf-8 -*-
# NoJoy-DI (c) 2016 by Andre Karlsson<andre.karlsson@protractus.se>
#
# This file is part of NoJoy_DI.
#
# NoJoy_DI is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NoJoy_DI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Foobar. If not, see <http://www.gnu.org/licenses/>.
#
#
# Filename: svc_test.py by: andrek
# Timesamp: 2016-05-02 :: 14:19
import unittest
from NoJoy_DI.di import DI
from NoJoy_DI.service import Service
from NoJoy_DI.patterns import DefaultPattern, SingletonPattern, BasePattern
import pprint
class VarClass(object):
pass
class MyVariableSvc(object):
var = "var"
class SuperSvc(object):
def __init__(self, obj, text, value):
super(SuperSvc, self).__init__()
print("service instance: %s" % obj)
print("container parameter: %s" % text)
print("provided value: %s" % value)
print("I am: ", self.__class__)
class AService(object):
a = None
b = None
def __init__(self, param:VarClass):
super(AService, self).__init__()
self.a = param
print("AService.__init__: %s" % param.__class__.__name__)
def some_method(self, param:VarClass):
self.b = param
print("AService.some_method: %s" % param.__class__.__name__)
di = DI()
di.attempt(VarClass)
di.add_variable('Variable_name', "variable_data")
di.attempt(SuperSvc, True).input(obj__svc=VarClass, text__param="Variable_name", value="The DATA")
di.set(VarClass)
di.attempt(AService).set_signature().call("some_method", True)
print("Continer.get: %s" % di.get(AService).__class__.__name__)
id = di.get(SuperSvc)
id2 = di.get_raw(SuperSvc)
di.set(MyVariableSvc)
myc1 = di.get(MyVariableSvc)
myc2 = di.get(MyVariableSvc)
myc2.var = "new_var"
def test_answer():
"""
Simple test case to verify that the classes are instantiated as expected.
:return:
"""
assert isinstance(id2, Service)
assert isinstance(di.get(AService), AService)
assert isinstance(id, SuperSvc)
assert myc1 != myc2
assert myc2.var == myc1.var
| gpl-3.0 |
ashutrix03/inteygrate_flaskapp-master | google/protobuf/descriptor_pool.py | 18 | 29177 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides DescriptorPool to use as a container for proto2 descriptors.
The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
a collection of protocol buffer descriptors for use when dynamically creating
message types at runtime.
For most applications protocol buffers should be used via modules generated by
the protocol buffer compiler tool. This should only be used when the type of
protocol buffers used in an application or library cannot be predetermined.
Below is a straightforward example on how to use this class:
pool = DescriptorPool()
file_descriptor_protos = [ ... ]
for file_descriptor_proto in file_descriptor_protos:
pool.Add(file_descriptor_proto)
my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
The message descriptor can be used in conjunction with the message_factory
module in order to create a protocol buffer class that can be encoded and
decoded.
If you want to get a Python class for the specified proto, use the
helper functions inside google.protobuf.message_factory
directly instead of this class.
"""
__author__ = 'matthewtoia@google.com (Matt Toia)'
from google.protobuf import descriptor
from google.protobuf import descriptor_database
from google.protobuf import text_encoding
_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS # pylint: disable=protected-access
def _NormalizeFullyQualifiedName(name):
"""Remove leading period from fully-qualified type name.
Due to b/13860351 in descriptor_database.py, types in the root namespace are
generated with a leading period. This function removes that prefix.
Args:
name: A str, the fully-qualified symbol name.
Returns:
A str, the normalized fully-qualified symbol name.
"""
return name.lstrip('.')
def _OptionsOrNone(descriptor_proto):
"""Returns the value of the field `options`, or None if it is not set."""
if descriptor_proto.HasField('options'):
return descriptor_proto.options
else:
return None
class DescriptorPool(object):
"""A collection of protobufs dynamically constructed by descriptor protos."""
if _USE_C_DESCRIPTORS:
def __new__(cls, descriptor_db=None):
# pylint: disable=protected-access
return descriptor._message.DescriptorPool(descriptor_db)
def __init__(self, descriptor_db=None):
"""Initializes a Pool of proto buffs.
The descriptor_db argument to the constructor is provided to allow
specialized file descriptor proto lookup code to be triggered on demand. An
example would be an implementation which will read and compile a file
specified in a call to FindFileByName() and not require the call to Add()
at all. Results from this database will be cached internally here as well.
Args:
descriptor_db: A secondary source of file descriptors.
"""
self._internal_db = descriptor_database.DescriptorDatabase()
self._descriptor_db = descriptor_db
self._descriptors = {}
self._enum_descriptors = {}
self._file_descriptors = {}
def Add(self, file_desc_proto):
"""Adds the FileDescriptorProto and its types to this pool.
Args:
file_desc_proto: The FileDescriptorProto to add.
"""
self._internal_db.Add(file_desc_proto)
def AddSerializedFile(self, serialized_file_desc_proto):
"""Adds the FileDescriptorProto and its types to this pool.
Args:
serialized_file_desc_proto: A bytes string, serialization of the
FileDescriptorProto to add.
"""
# pylint: disable=g-import-not-at-top
from google.protobuf import descriptor_pb2
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
serialized_file_desc_proto)
self.Add(file_desc_proto)
def AddDescriptor(self, desc):
"""Adds a Descriptor to the pool, non-recursively.
If the Descriptor contains nested messages or enums, the caller must
explicitly register them. This method also registers the FileDescriptor
associated with the message.
Args:
desc: A Descriptor.
"""
if not isinstance(desc, descriptor.Descriptor):
raise TypeError('Expected instance of descriptor.Descriptor.')
self._descriptors[desc.full_name] = desc
self.AddFileDescriptor(desc.file)
def AddEnumDescriptor(self, enum_desc):
"""Adds an EnumDescriptor to the pool.
This method also registers the FileDescriptor associated with the message.
Args:
enum_desc: An EnumDescriptor.
"""
if not isinstance(enum_desc, descriptor.EnumDescriptor):
raise TypeError('Expected instance of descriptor.EnumDescriptor.')
self._enum_descriptors[enum_desc.full_name] = enum_desc
self.AddFileDescriptor(enum_desc.file)
def AddFileDescriptor(self, file_desc):
"""Adds a FileDescriptor to the pool, non-recursively.
If the FileDescriptor contains messages or enums, the caller must explicitly
register them.
Args:
file_desc: A FileDescriptor.
"""
if not isinstance(file_desc, descriptor.FileDescriptor):
raise TypeError('Expected instance of descriptor.FileDescriptor.')
self._file_descriptors[file_desc.name] = file_desc
def FindFileByName(self, file_name):
"""Gets a FileDescriptor by file name.
Args:
file_name: The path to the file to get a descriptor for.
Returns:
A FileDescriptor for the named file.
Raises:
KeyError: if the file can not be found in the pool.
"""
try:
return self._file_descriptors[file_name]
except KeyError:
pass
try:
file_proto = self._internal_db.FindFileByName(file_name)
except KeyError as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileByName(file_name)
else:
raise error
if not file_proto:
raise KeyError('Cannot find a file named %s' % file_name)
return self._ConvertFileProtoToFileDescriptor(file_proto)
def FindFileContainingSymbol(self, symbol):
"""Gets the FileDescriptor for the file containing the specified symbol.
Args:
symbol: The name of the symbol to search for.
Returns:
A FileDescriptor that contains the specified symbol.
Raises:
KeyError: if the file can not be found in the pool.
"""
symbol = _NormalizeFullyQualifiedName(symbol)
try:
return self._descriptors[symbol].file
except KeyError:
pass
try:
return self._enum_descriptors[symbol].file
except KeyError:
pass
try:
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
except KeyError as error:
if self._descriptor_db:
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
else:
raise error
if not file_proto:
raise KeyError('Cannot find a file containing %s' % symbol)
return self._ConvertFileProtoToFileDescriptor(file_proto)
def FindMessageTypeByName(self, full_name):
"""Loads the named descriptor from the pool.
Args:
full_name: The full name of the descriptor to load.
Returns:
The descriptor for the named type.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._descriptors:
self.FindFileContainingSymbol(full_name)
return self._descriptors[full_name]
def FindEnumTypeByName(self, full_name):
"""Loads the named enum descriptor from the pool.
Args:
full_name: The full name of the enum descriptor to load.
Returns:
The enum descriptor for the named type.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
if full_name not in self._enum_descriptors:
self.FindFileContainingSymbol(full_name)
return self._enum_descriptors[full_name]
def FindFieldByName(self, full_name):
"""Loads the named field descriptor from the pool.
Args:
full_name: The full name of the field descriptor to load.
Returns:
The field descriptor for the named field.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
message_name, _, field_name = full_name.rpartition('.')
message_descriptor = self.FindMessageTypeByName(message_name)
return message_descriptor.fields_by_name[field_name]
def FindExtensionByName(self, full_name):
"""Loads the named extension descriptor from the pool.
Args:
full_name: The full name of the extension descriptor to load.
Returns:
A FieldDescriptor, describing the named extension.
"""
full_name = _NormalizeFullyQualifiedName(full_name)
message_name, _, extension_name = full_name.rpartition('.')
try:
# Most extensions are nested inside a message.
scope = self.FindMessageTypeByName(message_name)
except KeyError:
# Some extensions are defined at file scope.
scope = self.FindFileContainingSymbol(full_name)
return scope.extensions_by_name[extension_name]
def _ConvertFileProtoToFileDescriptor(self, file_proto):
"""Creates a FileDescriptor from a proto or returns a cached copy.
This method also has the side effect of loading all the symbols found in
the file into the appropriate dictionaries in the pool.
Args:
file_proto: The proto to convert.
Returns:
A FileDescriptor matching the passed in proto.
"""
if file_proto.name not in self._file_descriptors:
built_deps = list(self._GetDeps(file_proto.dependency))
direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
public_deps = [direct_deps[i] for i in file_proto.public_dependency]
file_descriptor = descriptor.FileDescriptor(
pool=self,
name=file_proto.name,
package=file_proto.package,
syntax=file_proto.syntax,
options=_OptionsOrNone(file_proto),
serialized_pb=file_proto.SerializeToString(),
dependencies=direct_deps,
public_dependencies=public_deps)
scope = {}
# This loop extracts all the message and enum types from all the
# dependencies of the file_proto. This is necessary to create the
# scope of available message types when defining the passed in
# file proto.
for dependency in built_deps:
scope.update(self._ExtractSymbols(
dependency.message_types_by_name.values()))
scope.update((_PrefixWithDot(enum.full_name), enum)
for enum in dependency.enum_types_by_name.values())
for message_type in file_proto.message_type:
message_desc = self._ConvertMessageDescriptor(
message_type, file_proto.package, file_descriptor, scope,
file_proto.syntax)
file_descriptor.message_types_by_name[message_desc.name] = (
message_desc)
for enum_type in file_proto.enum_type:
file_descriptor.enum_types_by_name[enum_type.name] = (
self._ConvertEnumDescriptor(enum_type, file_proto.package,
file_descriptor, None, scope))
for index, extension_proto in enumerate(file_proto.extension):
extension_desc = self._MakeFieldDescriptor(
extension_proto, file_proto.package, index, is_extension=True)
extension_desc.containing_type = self._GetTypeFromScope(
file_descriptor.package, extension_proto.extendee, scope)
self._SetFieldType(extension_proto, extension_desc,
file_descriptor.package, scope)
file_descriptor.extensions_by_name[extension_desc.name] = (
extension_desc)
for desc_proto in file_proto.message_type:
self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
if file_proto.package:
desc_proto_prefix = _PrefixWithDot(file_proto.package)
else:
desc_proto_prefix = ''
for desc_proto in file_proto.message_type:
desc = self._GetTypeFromScope(
desc_proto_prefix, desc_proto.name, scope)
file_descriptor.message_types_by_name[desc_proto.name] = desc
for index, service_proto in enumerate(file_proto.service):
file_descriptor.services_by_name[service_proto.name] = (
self._MakeServiceDescriptor(service_proto, index, scope,
file_proto.package, file_descriptor))
self.Add(file_proto)
self._file_descriptors[file_proto.name] = file_descriptor
return self._file_descriptors[file_proto.name]
def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
scope=None, syntax=None):
"""Adds the proto to the pool in the specified package.
Args:
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
package: The package the proto should be located in.
file_desc: The file containing this message.
scope: Dict mapping short and full symbols to message and enum types.
syntax: string indicating syntax of the file ("proto2" or "proto3")
Returns:
The added descriptor.
"""
if package:
desc_name = '.'.join((package, desc_proto.name))
else:
desc_name = desc_proto.name
if file_desc is None:
file_name = None
else:
file_name = file_desc.name
if scope is None:
scope = {}
nested = [
self._ConvertMessageDescriptor(
nested, desc_name, file_desc, scope, syntax)
for nested in desc_proto.nested_type]
enums = [
self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
for enum in desc_proto.enum_type]
fields = [self._MakeFieldDescriptor(field, desc_name, index)
for index, field in enumerate(desc_proto.field)]
extensions = [
self._MakeFieldDescriptor(extension, desc_name, index,
is_extension=True)
for index, extension in enumerate(desc_proto.extension)]
oneofs = [
descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)),
index, None, [], desc.options)
for index, desc in enumerate(desc_proto.oneof_decl)]
extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
if extension_ranges:
is_extendable = True
else:
is_extendable = False
desc = descriptor.Descriptor(
name=desc_proto.name,
full_name=desc_name,
filename=file_name,
containing_type=None,
fields=fields,
oneofs=oneofs,
nested_types=nested,
enum_types=enums,
extensions=extensions,
options=_OptionsOrNone(desc_proto),
is_extendable=is_extendable,
extension_ranges=extension_ranges,
file=file_desc,
serialized_start=None,
serialized_end=None,
syntax=syntax)
for nested in desc.nested_types:
nested.containing_type = desc
for enum in desc.enum_types:
enum.containing_type = desc
for field_index, field_desc in enumerate(desc_proto.field):
if field_desc.HasField('oneof_index'):
oneof_index = field_desc.oneof_index
oneofs[oneof_index].fields.append(fields[field_index])
fields[field_index].containing_oneof = oneofs[oneof_index]
scope[_PrefixWithDot(desc_name)] = desc
self._descriptors[desc_name] = desc
return desc
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
containing_type=None, scope=None):
"""Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
Args:
enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
package: Optional package name for the new message EnumDescriptor.
file_desc: The file containing the enum descriptor.
containing_type: The type containing this enum.
scope: Scope containing available types.
Returns:
The added descriptor
"""
if package:
enum_name = '.'.join((package, enum_proto.name))
else:
enum_name = enum_proto.name
if file_desc is None:
file_name = None
else:
file_name = file_desc.name
values = [self._MakeEnumValueDescriptor(value, index)
for index, value in enumerate(enum_proto.value)]
desc = descriptor.EnumDescriptor(name=enum_proto.name,
full_name=enum_name,
filename=file_name,
file=file_desc,
values=values,
containing_type=containing_type,
options=_OptionsOrNone(enum_proto))
scope['.%s' % enum_name] = desc
self._enum_descriptors[enum_name] = desc
return desc
def _MakeFieldDescriptor(self, field_proto, message_name, index,
is_extension=False):
"""Creates a field descriptor from a FieldDescriptorProto.
For message and enum type fields, this method will do a look up
in the pool for the appropriate descriptor for that type. If it
is unavailable, it will fall back to the _source function to
create it. If this type is still unavailable, construction will
fail.
Args:
field_proto: The proto describing the field.
message_name: The name of the containing message.
index: Index of the field
is_extension: Indication that this field is for an extension.
Returns:
An initialized FieldDescriptor object
"""
if message_name:
full_name = '.'.join((message_name, field_proto.name))
else:
full_name = field_proto.name
return descriptor.FieldDescriptor(
name=field_proto.name,
full_name=full_name,
index=index,
number=field_proto.number,
type=field_proto.type,
cpp_type=None,
message_type=None,
enum_type=None,
containing_type=None,
label=field_proto.label,
has_default_value=False,
default_value=None,
is_extension=is_extension,
extension_scope=None,
options=_OptionsOrNone(field_proto))
def _SetAllFieldTypes(self, package, desc_proto, scope):
"""Sets all the descriptor's fields's types.
This method also sets the containing types on any extensions.
Args:
package: The current package of desc_proto.
desc_proto: The message descriptor to update.
scope: Enclosing scope of available types.
"""
package = _PrefixWithDot(package)
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
if package == '.':
nested_package = _PrefixWithDot(desc_proto.name)
else:
nested_package = '.'.join([package, desc_proto.name])
for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
self._SetFieldType(field_proto, field_desc, nested_package, scope)
for extension_proto, extension_desc in (
zip(desc_proto.extension, main_desc.extensions)):
extension_desc.containing_type = self._GetTypeFromScope(
nested_package, extension_proto.extendee, scope)
self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
for nested_type in desc_proto.nested_type:
self._SetAllFieldTypes(nested_package, nested_type, scope)
def _SetFieldType(self, field_proto, field_desc, package, scope):
"""Sets the field's type, cpp_type, message_type and enum_type.
Args:
field_proto: Data about the field in proto format.
field_desc: The descriptor to modiy.
package: The package the field's container is in.
scope: Enclosing scope of available types.
"""
if field_proto.type_name:
desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
else:
desc = None
if not field_proto.HasField('type'):
if isinstance(desc, descriptor.Descriptor):
field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
else:
field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
field_proto.type)
if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
field_desc.message_type = desc
if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
field_desc.enum_type = desc
if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
field_desc.has_default_value = False
field_desc.default_value = []
elif field_proto.HasField('default_value'):
field_desc.has_default_value = True
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
field_desc.default_value = float(field_proto.default_value)
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
field_desc.default_value = field_proto.default_value
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
field_desc.default_value = field_proto.default_value.lower() == 'true'
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
field_desc.default_value = field_desc.enum_type.values_by_name[
field_proto.default_value].number
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
field_desc.default_value = text_encoding.CUnescape(
field_proto.default_value)
else:
# All other types are of the "int" type.
field_desc.default_value = int(field_proto.default_value)
else:
field_desc.has_default_value = False
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
field_desc.default_value = 0.0
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
field_desc.default_value = u''
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
field_desc.default_value = False
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
field_desc.default_value = field_desc.enum_type.values[0].number
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
field_desc.default_value = b''
else:
# All other types are of the "int" type.
field_desc.default_value = 0
field_desc.type = field_proto.type
def _MakeEnumValueDescriptor(self, value_proto, index):
"""Creates a enum value descriptor object from a enum value proto.
Args:
value_proto: The proto describing the enum value.
index: The index of the enum value.
Returns:
An initialized EnumValueDescriptor object.
"""
return descriptor.EnumValueDescriptor(
name=value_proto.name,
index=index,
number=value_proto.number,
options=_OptionsOrNone(value_proto),
type=None)
def _MakeServiceDescriptor(self, service_proto, service_index, scope,
package, file_desc):
"""Make a protobuf ServiceDescriptor given a ServiceDescriptorProto.
Args:
service_proto: The descriptor_pb2.ServiceDescriptorProto protobuf message.
service_index: The index of the service in the File.
scope: Dict mapping short and full symbols to message and enum types.
package: Optional package name for the new message EnumDescriptor.
file_desc: The file containing the service descriptor.
Returns:
The added descriptor.
"""
if package:
service_name = '.'.join((package, service_proto.name))
else:
service_name = service_proto.name
methods = [self._MakeMethodDescriptor(method_proto, service_name, package,
scope, index)
for index, method_proto in enumerate(service_proto.method)]
desc = descriptor.ServiceDescriptor(name=service_proto.name,
full_name=service_name,
index=service_index,
methods=methods,
options=_OptionsOrNone(service_proto),
file=file_desc)
return desc
def _MakeMethodDescriptor(self, method_proto, service_name, package, scope,
index):
"""Creates a method descriptor from a MethodDescriptorProto.
Args:
method_proto: The proto describing the method.
service_name: The name of the containing service.
package: Optional package name to look up for types.
scope: Scope containing available types.
index: Index of the method in the service.
Returns:
An initialized MethodDescriptor object.
"""
full_name = '.'.join((service_name, method_proto.name))
input_type = self._GetTypeFromScope(
package, method_proto.input_type, scope)
output_type = self._GetTypeFromScope(
package, method_proto.output_type, scope)
return descriptor.MethodDescriptor(name=method_proto.name,
full_name=full_name,
index=index,
containing_service=None,
input_type=input_type,
output_type=output_type,
options=_OptionsOrNone(method_proto))
def _ExtractSymbols(self, descriptors):
"""Pulls out all the symbols from descriptor protos.
Args:
descriptors: The messages to extract descriptors from.
Yields:
A two element tuple of the type name and descriptor object.
"""
for desc in descriptors:
yield (_PrefixWithDot(desc.full_name), desc)
for symbol in self._ExtractSymbols(desc.nested_types):
yield symbol
for enum in desc.enum_types:
yield (_PrefixWithDot(enum.full_name), enum)
def _GetDeps(self, dependencies):
"""Recursively finds dependencies for file protos.
Args:
dependencies: The names of the files being depended on.
Yields:
Each direct and indirect dependency.
"""
for dependency in dependencies:
dep_desc = self.FindFileByName(dependency)
yield dep_desc
for parent_dep in dep_desc.dependencies:
yield parent_dep
def _GetTypeFromScope(self, package, type_name, scope):
"""Finds a given type name in the current scope.
Args:
package: The package the proto should be located in.
type_name: The name of the type to be found in the scope.
scope: Dict mapping short and full symbols to message and enum types.
Returns:
The descriptor for the requested type.
"""
if type_name not in scope:
components = _PrefixWithDot(package).split('.')
while components:
possible_match = '.'.join(components + [type_name])
if possible_match in scope:
type_name = possible_match
break
else:
components.pop(-1)
return scope[type_name]
def _PrefixWithDot(name):
return name if name.startswith('.') else '.%s' % name
if _USE_C_DESCRIPTORS:
# TODO(amauryfa): This pool could be constructed from Python code, when we
# support a flag like 'use_cpp_generated_pool=True'.
# pylint: disable=protected-access
_DEFAULT = descriptor._message.default_pool
else:
_DEFAULT = DescriptorPool()
def Default():
return _DEFAULT
| gpl-3.0 |
suneeth51/neutron | neutron/plugins/ml2/models.py | 49 | 5276 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from sqlalchemy import orm
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import portbindings
BINDING_PROFILE_LEN = 4095
class NetworkSegment(model_base.BASEV2, models_v2.HasId):
"""Represent persistent state of a network segment.
A network segment is a portion of a neutron network with a
specific physical realization. A neutron network can consist of
one or more segments.
"""
__tablename__ = 'ml2_network_segments'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
nullable=False)
network_type = sa.Column(sa.String(32), nullable=False)
physical_network = sa.Column(sa.String(64))
segmentation_id = sa.Column(sa.Integer)
is_dynamic = sa.Column(sa.Boolean, default=False, nullable=False,
server_default=sa.sql.false())
segment_index = sa.Column(sa.Integer, nullable=False, server_default='0')
class PortBinding(model_base.BASEV2):
"""Represent binding-related state of a port.
A port binding stores the port attributes required for the
portbindings extension, as well as internal ml2 state such as
which MechanismDriver and which segment are used by the port
binding.
"""
__tablename__ = 'ml2_port_bindings'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
host = sa.Column(sa.String(255), nullable=False, default='',
server_default='')
vnic_type = sa.Column(sa.String(64), nullable=False,
default=portbindings.VNIC_NORMAL,
server_default=portbindings.VNIC_NORMAL)
profile = sa.Column(sa.String(BINDING_PROFILE_LEN), nullable=False,
default='', server_default='')
vif_type = sa.Column(sa.String(64), nullable=False)
vif_details = sa.Column(sa.String(4095), nullable=False, default='',
server_default='')
# Add a relationship to the Port model in order to instruct SQLAlchemy to
# eagerly load port bindings
port = orm.relationship(
models_v2.Port,
backref=orm.backref("port_binding",
lazy='joined', uselist=False,
cascade='delete'))
class PortBindingLevel(model_base.BASEV2):
"""Represent each level of a port binding.
Stores information associated with each level of an established
port binding. Different levels might correspond to the host and
ToR switch, for instance.
"""
__tablename__ = 'ml2_port_binding_levels'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
host = sa.Column(sa.String(255), nullable=False, primary_key=True)
level = sa.Column(sa.Integer, primary_key=True, autoincrement=False)
driver = sa.Column(sa.String(64))
segment_id = sa.Column(sa.String(36),
sa.ForeignKey('ml2_network_segments.id',
ondelete="SET NULL"))
class DVRPortBinding(model_base.BASEV2):
"""Represent binding-related state of a DVR port.
Port binding for all the ports associated to a DVR identified by router_id.
"""
__tablename__ = 'ml2_dvr_port_bindings'
port_id = sa.Column(sa.String(36),
sa.ForeignKey('ports.id', ondelete="CASCADE"),
primary_key=True)
host = sa.Column(sa.String(255), nullable=False, primary_key=True)
router_id = sa.Column(sa.String(36), nullable=True)
vif_type = sa.Column(sa.String(64), nullable=False)
vif_details = sa.Column(sa.String(4095), nullable=False, default='',
server_default='')
vnic_type = sa.Column(sa.String(64), nullable=False,
default=portbindings.VNIC_NORMAL,
server_default=portbindings.VNIC_NORMAL)
profile = sa.Column(sa.String(BINDING_PROFILE_LEN), nullable=False,
default='', server_default='')
status = sa.Column(sa.String(16), nullable=False)
# Add a relationship to the Port model in order to instruct SQLAlchemy to
# eagerly load port bindings
port = orm.relationship(
models_v2.Port,
backref=orm.backref("dvr_port_binding",
lazy='joined', uselist=False,
cascade='delete'))
| apache-2.0 |
thesoulkiller/thefuck | tests/rules/test_git_push_force.py | 15 | 1908 | import pytest
from thefuck.rules.git_push_force import match, get_new_command
from tests.utils import Command
git_err = '''
To /tmp/foo
! [rejected] master -> master (non-fast-forward)
error: failed to push some refs to '/tmp/bar'
hint: Updates were rejected because the tip of your current branch is behind
hint: its remote counterpart. Integrate the remote changes (e.g.
hint: 'git pull ...') before pushing again.
hint: See the 'Note about fast-forwards' in 'git push --help' for details.
'''
git_uptodate = 'Everything up-to-date'
git_ok = '''
Counting objects: 3, done.
Delta compression using up to 4 threads.
Compressing objects: 100% (2/2), done.
Writing objects: 100% (3/3), 282 bytes | 0 bytes/s, done.
Total 3 (delta 0), reused 0 (delta 0)
To /tmp/bar
514eed3..f269c79 master -> master
'''
@pytest.mark.parametrize('command', [
Command(script='git push', stderr=git_err),
Command(script='git push nvbn', stderr=git_err),
Command(script='git push nvbn master', stderr=git_err)])
def test_match(command):
assert match(command, None)
@pytest.mark.parametrize('command', [
Command(script='git push', stderr=git_ok),
Command(script='git push', stderr=git_uptodate),
Command(script='git push nvbn', stderr=git_ok),
Command(script='git push nvbn master', stderr=git_uptodate),
Command(script='git push nvbn', stderr=git_ok),
Command(script='git push nvbn master', stderr=git_uptodate)])
def test_not_match(command):
assert not match(command, None)
@pytest.mark.parametrize('command, output', [
(Command(script='git push', stderr=git_err), 'git push --force'),
(Command(script='git push nvbn', stderr=git_err), 'git push --force nvbn'),
(Command(script='git push nvbn master', stderr=git_err), 'git push --force nvbn master')])
def test_get_new_command(command, output):
assert get_new_command(command, None) == output
| mit |
techdragon/django | django/contrib/gis/maps/google/__init__.py | 50 | 2770 | """
This module houses the GoogleMap object, used for generating
the needed javascript to embed Google Maps in a Web page.
Google(R) is a registered trademark of Google, Inc. of Mountain View, California.
Example:
* In the view:
return render(request, 'template.html', {'google': GoogleMap(key="abcdefg")})
* In the template:
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
{{ google.xhtml }}
<head>
<title>Google Maps via GeoDjango</title>
{{ google.style }}
{{ google.scripts }}
</head>
{{ google.body }}
<div id="{{ google.dom_id }}" style="width:600px;height:400px;"></div>
</body>
</html>
Note: If you want to be more explicit in your templates, the following are
equivalent:
{{ google.body }} => "<body {{ google.onload }} {{ google.onunload }}>"
{{ google.xhtml }} => "<html xmlns="http://www.w3.org/1999/xhtml" {{ google.xmlns }}>"
{{ google.style }} => "<style>{{ google.vml_css }}</style>"
Explanation:
- The `xhtml` property provides the correct XML namespace needed for
Google Maps to operate in IE using XHTML. Google Maps on IE uses
VML to draw polylines. Returns, by default:
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml">
- The `style` property provides the correct style tag for the CSS
properties required by Google Maps on IE:
<style type="text/css">v\:* {behavior:url(#default#VML);}</style>
- The `scripts` property provides the necessary <script> tags for
including the Google Maps javascript, as well as including the
generated javascript.
- The `body` property provides the correct attributes for the
body tag to load the generated javascript. By default, returns:
<body onload="gmap_load()" onunload="GUnload()">
- The `dom_id` property returns the DOM id for the map. Defaults to "map".
The following attributes may be set or customized in your local settings:
* GOOGLE_MAPS_API_KEY: String of your Google Maps API key. These are tied
to a domain. May be obtained from https://developers.google.com/maps/
* GOOGLE_MAPS_API_VERSION (optional): Defaults to using "2.x"
* GOOGLE_MAPS_URL (optional): Must have a substitution ('%s') for the API
version.
"""
from django.contrib.gis.maps.google.gmap import GoogleMap, GoogleMapSet
from django.contrib.gis.maps.google.overlays import (
GEvent, GIcon, GMarker, GPolygon, GPolyline,
)
from django.contrib.gis.maps.google.zoom import GoogleZoom
__all__ = [
'GoogleMap', 'GoogleMapSet', 'GEvent', 'GIcon', 'GMarker', 'GPolygon',
'GPolyline', 'GoogleZoom',
]
| bsd-3-clause |
xiangshouding/nv | node/node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/lexers/_clbuiltins.py | 57 | 14050 | # -*- coding: utf-8 -*-
"""
pygments.lexers._clbuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~
ANSI Common Lisp builtins.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
BUILTIN_FUNCTIONS = set([ # 638 functions
'<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+',
'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin',
'adjustable-array-p', 'adjust-array', 'allocate-instance',
'alpha-char-p', 'alphanumericp', 'append', 'apply', 'apropos',
'apropos-list', 'aref', 'arithmetic-error-operands',
'arithmetic-error-operation', 'array-dimension', 'array-dimensions',
'array-displacement', 'array-element-type', 'array-has-fill-pointer-p',
'array-in-bounds-p', 'arrayp', 'array-rank', 'array-row-major-index',
'array-total-size', 'ash', 'asin', 'asinh', 'assoc', 'assoc-if',
'assoc-if-not', 'atan', 'atanh', 'atom', 'bit', 'bit-and', 'bit-andc1',
'bit-andc2', 'bit-eqv', 'bit-ior', 'bit-nand', 'bit-nor', 'bit-not',
'bit-orc1', 'bit-orc2', 'bit-vector-p', 'bit-xor', 'boole',
'both-case-p', 'boundp', 'break', 'broadcast-stream-streams',
'butlast', 'byte', 'byte-position', 'byte-size', 'caaaar', 'caaadr',
'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', 'cadadr',
'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', 'call-next-method', 'car',
'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar',
'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', 'cdr',
'ceiling', 'cell-error-name', 'cerror', 'change-class', 'char', 'char<',
'char<=', 'char=', 'char>', 'char>=', 'char/=', 'character',
'characterp', 'char-code', 'char-downcase', 'char-equal',
'char-greaterp', 'char-int', 'char-lessp', 'char-name',
'char-not-equal', 'char-not-greaterp', 'char-not-lessp', 'char-upcase',
'cis', 'class-name', 'class-of', 'clear-input', 'clear-output',
'close', 'clrhash', 'code-char', 'coerce', 'compile',
'compiled-function-p', 'compile-file', 'compile-file-pathname',
'compiler-macro-function', 'complement', 'complex', 'complexp',
'compute-applicable-methods', 'compute-restarts', 'concatenate',
'concatenated-stream-streams', 'conjugate', 'cons', 'consp',
'constantly', 'constantp', 'continue', 'copy-alist', 'copy-list',
'copy-pprint-dispatch', 'copy-readtable', 'copy-seq', 'copy-structure',
'copy-symbol', 'copy-tree', 'cos', 'cosh', 'count', 'count-if',
'count-if-not', 'decode-float', 'decode-universal-time', 'delete',
'delete-duplicates', 'delete-file', 'delete-if', 'delete-if-not',
'delete-package', 'denominator', 'deposit-field', 'describe',
'describe-object', 'digit-char', 'digit-char-p', 'directory',
'directory-namestring', 'disassemble', 'documentation', 'dpb',
'dribble', 'echo-stream-input-stream', 'echo-stream-output-stream',
'ed', 'eighth', 'elt', 'encode-universal-time', 'endp',
'enough-namestring', 'ensure-directories-exist',
'ensure-generic-function', 'eq', 'eql', 'equal', 'equalp', 'error',
'eval', 'evenp', 'every', 'exp', 'export', 'expt', 'fboundp',
'fceiling', 'fdefinition', 'ffloor', 'fifth', 'file-author',
'file-error-pathname', 'file-length', 'file-namestring',
'file-position', 'file-string-length', 'file-write-date',
'fill', 'fill-pointer', 'find', 'find-all-symbols', 'find-class',
'find-if', 'find-if-not', 'find-method', 'find-package', 'find-restart',
'find-symbol', 'finish-output', 'first', 'float', 'float-digits',
'floatp', 'float-precision', 'float-radix', 'float-sign', 'floor',
'fmakunbound', 'force-output', 'format', 'fourth', 'fresh-line',
'fround', 'ftruncate', 'funcall', 'function-keywords',
'function-lambda-expression', 'functionp', 'gcd', 'gensym', 'gentemp',
'get', 'get-decoded-time', 'get-dispatch-macro-character', 'getf',
'gethash', 'get-internal-real-time', 'get-internal-run-time',
'get-macro-character', 'get-output-stream-string', 'get-properties',
'get-setf-expansion', 'get-universal-time', 'graphic-char-p',
'hash-table-count', 'hash-table-p', 'hash-table-rehash-size',
'hash-table-rehash-threshold', 'hash-table-size', 'hash-table-test',
'host-namestring', 'identity', 'imagpart', 'import',
'initialize-instance', 'input-stream-p', 'inspect',
'integer-decode-float', 'integer-length', 'integerp',
'interactive-stream-p', 'intern', 'intersection',
'invalid-method-error', 'invoke-debugger', 'invoke-restart',
'invoke-restart-interactively', 'isqrt', 'keywordp', 'last', 'lcm',
'ldb', 'ldb-test', 'ldiff', 'length', 'lisp-implementation-type',
'lisp-implementation-version', 'list', 'list*', 'list-all-packages',
'listen', 'list-length', 'listp', 'load',
'load-logical-pathname-translations', 'log', 'logand', 'logandc1',
'logandc2', 'logbitp', 'logcount', 'logeqv', 'logical-pathname',
'logical-pathname-translations', 'logior', 'lognand', 'lognor',
'lognot', 'logorc1', 'logorc2', 'logtest', 'logxor', 'long-site-name',
'lower-case-p', 'machine-instance', 'machine-type', 'machine-version',
'macroexpand', 'macroexpand-1', 'macro-function', 'make-array',
'make-broadcast-stream', 'make-concatenated-stream', 'make-condition',
'make-dispatch-macro-character', 'make-echo-stream', 'make-hash-table',
'make-instance', 'make-instances-obsolete', 'make-list',
'make-load-form', 'make-load-form-saving-slots', 'make-package',
'make-pathname', 'make-random-state', 'make-sequence', 'make-string',
'make-string-input-stream', 'make-string-output-stream', 'make-symbol',
'make-synonym-stream', 'make-two-way-stream', 'makunbound', 'map',
'mapc', 'mapcan', 'mapcar', 'mapcon', 'maphash', 'map-into', 'mapl',
'maplist', 'mask-field', 'max', 'member', 'member-if', 'member-if-not',
'merge', 'merge-pathnames', 'method-combination-error',
'method-qualifiers', 'min', 'minusp', 'mismatch', 'mod',
'muffle-warning', 'name-char', 'namestring', 'nbutlast', 'nconc',
'next-method-p', 'nintersection', 'ninth', 'no-applicable-method',
'no-next-method', 'not', 'notany', 'notevery', 'nreconc', 'nreverse',
'nset-difference', 'nset-exclusive-or', 'nstring-capitalize',
'nstring-downcase', 'nstring-upcase', 'nsublis', 'nsubst', 'nsubst-if',
'nsubst-if-not', 'nsubstitute', 'nsubstitute-if', 'nsubstitute-if-not',
'nth', 'nthcdr', 'null', 'numberp', 'numerator', 'nunion', 'oddp',
'open', 'open-stream-p', 'output-stream-p', 'package-error-package',
'package-name', 'package-nicknames', 'packagep',
'package-shadowing-symbols', 'package-used-by-list', 'package-use-list',
'pairlis', 'parse-integer', 'parse-namestring', 'pathname',
'pathname-device', 'pathname-directory', 'pathname-host',
'pathname-match-p', 'pathname-name', 'pathnamep', 'pathname-type',
'pathname-version', 'peek-char', 'phase', 'plusp', 'position',
'position-if', 'position-if-not', 'pprint', 'pprint-dispatch',
'pprint-fill', 'pprint-indent', 'pprint-linear', 'pprint-newline',
'pprint-tab', 'pprint-tabular', 'prin1', 'prin1-to-string', 'princ',
'princ-to-string', 'print', 'print-object', 'probe-file', 'proclaim',
'provide', 'random', 'random-state-p', 'rassoc', 'rassoc-if',
'rassoc-if-not', 'rational', 'rationalize', 'rationalp', 'read',
'read-byte', 'read-char', 'read-char-no-hang', 'read-delimited-list',
'read-from-string', 'read-line', 'read-preserving-whitespace',
'read-sequence', 'readtable-case', 'readtablep', 'realp', 'realpart',
'reduce', 'reinitialize-instance', 'rem', 'remhash', 'remove',
'remove-duplicates', 'remove-if', 'remove-if-not', 'remove-method',
'remprop', 'rename-file', 'rename-package', 'replace', 'require',
'rest', 'restart-name', 'revappend', 'reverse', 'room', 'round',
'row-major-aref', 'rplaca', 'rplacd', 'sbit', 'scale-float', 'schar',
'search', 'second', 'set', 'set-difference',
'set-dispatch-macro-character', 'set-exclusive-or',
'set-macro-character', 'set-pprint-dispatch', 'set-syntax-from-char',
'seventh', 'shadow', 'shadowing-import', 'shared-initialize',
'short-site-name', 'signal', 'signum', 'simple-bit-vector-p',
'simple-condition-format-arguments', 'simple-condition-format-control',
'simple-string-p', 'simple-vector-p', 'sin', 'sinh', 'sixth', 'sleep',
'slot-boundp', 'slot-exists-p', 'slot-makunbound', 'slot-missing',
'slot-unbound', 'slot-value', 'software-type', 'software-version',
'some', 'sort', 'special-operator-p', 'sqrt', 'stable-sort',
'standard-char-p', 'store-value', 'stream-element-type',
'stream-error-stream', 'stream-external-format', 'streamp', 'string',
'string<', 'string<=', 'string=', 'string>', 'string>=', 'string/=',
'string-capitalize', 'string-downcase', 'string-equal',
'string-greaterp', 'string-left-trim', 'string-lessp',
'string-not-equal', 'string-not-greaterp', 'string-not-lessp',
'stringp', 'string-right-trim', 'string-trim', 'string-upcase',
'sublis', 'subseq', 'subsetp', 'subst', 'subst-if', 'subst-if-not',
'substitute', 'substitute-if', 'substitute-if-not', 'subtypep','svref',
'sxhash', 'symbol-function', 'symbol-name', 'symbolp', 'symbol-package',
'symbol-plist', 'symbol-value', 'synonym-stream-symbol', 'syntax:',
'tailp', 'tan', 'tanh', 'tenth', 'terpri', 'third',
'translate-logical-pathname', 'translate-pathname', 'tree-equal',
'truename', 'truncate', 'two-way-stream-input-stream',
'two-way-stream-output-stream', 'type-error-datum',
'type-error-expected-type', 'type-of', 'typep', 'unbound-slot-instance',
'unexport', 'unintern', 'union', 'unread-char', 'unuse-package',
'update-instance-for-different-class',
'update-instance-for-redefined-class', 'upgraded-array-element-type',
'upgraded-complex-part-type', 'upper-case-p', 'use-package',
'user-homedir-pathname', 'use-value', 'values', 'values-list', 'vector',
'vectorp', 'vector-pop', 'vector-push', 'vector-push-extend', 'warn',
'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line',
'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p',
'y-or-n-p', 'zerop',
])
SPECIAL_FORMS = set([
'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if',
'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet',
'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote',
'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw',
'unwind-protect',
])
MACROS = set([
'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond',
'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric',
'define-compiler-macro', 'define-condition', 'define-method-combination',
'define-modify-macro', 'define-setf-expander', 'define-symbol-macro',
'defmacro', 'defmethod', 'defpackage', 'defparameter', 'defsetf',
'defstruct', 'deftype', 'defun', 'defvar', 'destructuring-bind', 'do',
'do*', 'do-all-symbols', 'do-external-symbols', 'dolist', 'do-symbols',
'dotimes', 'ecase', 'etypecase', 'formatter', 'handler-bind',
'handler-case', 'ignore-errors', 'incf', 'in-package', 'lambda', 'loop',
'loop-finish', 'make-method', 'multiple-value-bind', 'multiple-value-list',
'multiple-value-setq', 'nth-value', 'or', 'pop',
'pprint-exit-if-list-exhausted', 'pprint-logical-block', 'pprint-pop',
'print-unreadable-object', 'prog', 'prog*', 'prog1', 'prog2', 'psetf',
'psetq', 'push', 'pushnew', 'remf', 'restart-bind', 'restart-case',
'return', 'rotatef', 'setf', 'shiftf', 'step', 'time', 'trace', 'typecase',
'unless', 'untrace', 'when', 'with-accessors', 'with-compilation-unit',
'with-condition-restarts', 'with-hash-table-iterator',
'with-input-from-string', 'with-open-file', 'with-open-stream',
'with-output-to-string', 'with-package-iterator', 'with-simple-restart',
'with-slots', 'with-standard-io-syntax',
])
LAMBDA_LIST_KEYWORDS = set([
'&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional',
'&rest', '&whole',
])
DECLARATIONS = set([
'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special',
'ignorable', 'notinline', 'type',
])
BUILTIN_TYPES = set([
'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit',
'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil',
'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float',
'simple-array', 'simple-base-string', 'simple-bit-vector', 'simple-string',
'simple-vector', 'standard-char', 'unsigned-byte',
# Condition Types
'arithmetic-error', 'cell-error', 'condition', 'control-error',
'division-by-zero', 'end-of-file', 'error', 'file-error',
'floating-point-inexact', 'floating-point-overflow',
'floating-point-underflow', 'floating-point-invalid-operation',
'parse-error', 'package-error', 'print-not-readable', 'program-error',
'reader-error', 'serious-condition', 'simple-condition', 'simple-error',
'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition',
'style-warning', 'type-error', 'unbound-variable', 'unbound-slot',
'undefined-function', 'warning',
])
BUILTIN_CLASSES = set([
'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character',
'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream',
'file-stream', 'float', 'function', 'generic-function', 'hash-table',
'integer', 'list', 'logical-pathname', 'method-combination', 'method',
'null', 'number', 'package', 'pathname', 'ratio', 'rational', 'readtable',
'real', 'random-state', 'restart', 'sequence', 'standard-class',
'standard-generic-function', 'standard-method', 'standard-object',
'string-stream', 'stream', 'string', 'structure-class', 'structure-object',
'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector',
])
| bsd-3-clause |
jobiols/server-tools | base_user_role/models/role.py | 6 | 3154 | # -*- coding: utf-8 -*-
# Copyright 2014 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import datetime
import logging
from openerp import api, fields, models
_logger = logging.getLogger(__name__)
class ResUsersRole(models.Model):
_name = 'res.users.role'
_inherits = {'res.groups': 'group_id'}
_description = "User role"
group_id = fields.Many2one(
'res.groups', required=True, ondelete='cascade',
readonly=True, string=u"Associated group")
line_ids = fields.One2many(
'res.users.role.line', 'role_id', string=u"Users")
user_ids = fields.One2many(
'res.users', string=u"Users", compute='_compute_user_ids')
_defaults = { # pylint: disable=attribute-deprecated
'category_id': api.model(
lambda cls: cls.env.ref(
'base_user_role.ir_module_category_role').id),
}
@api.multi
@api.depends('line_ids.user_id')
def _compute_user_ids(self):
for role in self:
role.user_ids = role.line_ids.mapped('user_id')
@api.model
def create(self, vals):
new_record = super(ResUsersRole, self).create(vals)
new_record.update_users()
return new_record
@api.multi
def write(self, vals):
res = super(ResUsersRole, self).write(vals)
self.update_users()
return res
@api.multi
def unlink(self):
users = self.mapped('user_ids')
res = super(ResUsersRole, self).unlink()
users.set_groups_from_roles(force=True)
return res
@api.multi
def update_users(self):
"""Update all the users concerned by the roles identified by `ids`."""
users = self.mapped('user_ids')
users.set_groups_from_roles()
return True
@api.model
def cron_update_users(self):
logging.info(u"Update user roles")
self.search([]).update_users()
class ResUsersRoleLine(models.Model):
_name = 'res.users.role.line'
_description = 'Users associated to a role'
role_id = fields.Many2one(
'res.users.role', string=u"Role", ondelete='cascade')
user_id = fields.Many2one(
'res.users', string=u"User")
date_from = fields.Date(u"From")
date_to = fields.Date(u"To")
is_enabled = fields.Boolean(u"Enabled", compute='_compute_is_enabled')
@api.multi
@api.depends('date_from', 'date_to')
def _compute_is_enabled(self):
today = datetime.date.today()
for role_line in self:
role_line.is_enabled = True
if role_line.date_from:
date_from = fields.Date.from_string(role_line.date_from)
if date_from > today:
role_line.is_enabled = False
if role_line.date_to:
date_to = fields.Date.from_string(role_line.date_to)
if today > date_to:
role_line.is_enabled = False
@api.multi
def unlink(self):
users = self.mapped('user_id')
res = super(ResUsersRoleLine, self).unlink()
users.set_groups_from_roles(force=True)
return res
| agpl-3.0 |
emanuelfeld/poirot | poirot/utils.py | 1 | 3139 | # -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
import subprocess
from .filters import style
def ask(question, options, response=None, formatting=None):
"""
Takes a question for raw_input and a set/list of options
that answer the question. Prompts the user until they
choose one of the prescribed options.
Args:
question: The user prompt
options (list): The responses to choose from
formatting: A key from style_codes in clients.style
Returns:
response (str): The chosen `options` item.
"""
get_input = input
if sys.version_info[:2] <= (2, 7):
get_input = raw_input
prompt = "{} [{}] ".format(question, ", ".join(options))
while response not in options:
response = get_input(style(prompt, formatting))
return response
def merge_dicts(*dicts):
"""
Merges an arbitrary number of dicts.
Note:
Updates left to right, so will override existing
attributes!
"""
merged = {}
for dictionary in dicts:
merged.update(dictionary)
return merged
def execute_cmd(cmd):
"""
Executes a command and returns the stdout and stderr.
"""
try:
popen = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE,
universal_newlines=True)
(out, err) = popen.communicate()
except UnicodeDecodeError:
popen = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(out, err) = popen.communicate()
try:
out = out.decode("latin-1")
out = out.encode("utf-8")
except:
error = sys.exc_info()[0]
print(style("There was a problem executing command: {}\n".format(cmd), "red"), error)
out = ""
return (out, err)
def utf8_decode(text):
try:
return text.decode("utf-8")
except AttributeError:
return text
def is_git_dir(directory):
# checks that command invoked on a git directory
if not os.path.exists(directory):
raise IOError("""Invalid .git directory: {directory}\nSpecify
the correct local directory with
--dir""".format(directory=directory))
def clone_pull(git_url, repo_dir):
"""
Clones a repository from `git_url` or optionally does a
git pull if the repository already exists at `repo_dir`.
Runs only if url argument provided to poirot command.
"""
try:
cmd = ["git", "clone", git_url, repo_dir]
subprocess.check_output(cmd, universal_newlines=True)
except subprocess.CalledProcessError:
response = ask("Do you want to git-pull?", ["y", "n"], "darkblue")
if response == "y":
cmd = ["git", "--git-dir=%s/.git" % (repo_dir), "pull"]
out = subprocess.check_output(cmd, universal_newlines=True)
print(style("Git says: {}".format(out), "smoke"))
except:
error = sys.exc_info()[0]
print(style("Problem writing to destination: {}\n".format(repo_dir), "red"), error)
raise
| mit |
iSECPartners/Scout2 | AWSScout2/configs/base.py | 3 | 6910 | # -*- coding: utf-8 -*-
import copy
from hashlib import sha1
from threading import Event, Thread
# Python2 vs Python3
try:
from Queue import Queue
except ImportError:
from queue import Queue
from opinel.utils.aws import build_region_list, connect_service, handle_truncated_response
from opinel.utils.console import printException, printInfo
from AWSScout2.configs.threads import thread_configs
from AWSScout2.output.console import FetchStatusLogger
from AWSScout2.utils import format_service_name
########################################
# Globals
########################################
status = None
formatted_string = None
class GlobalConfig(object):
def get_non_aws_id(self, name):
"""
Not all AWS resources have an ID and some services allow the use of "." in names, which break's Scout2's
recursion scheme if name is used as an ID. Use SHA1(name) instead.
:param name: Name of the resource to
:return: SHA1(name)
"""
m = sha1()
m.update(name.encode('utf-8'))
return m.hexdigest()
class BaseConfig(GlobalConfig):
"""
FooBar
"""
def __init__(self, thread_config = 4):
self.service = type(self).__name__.replace('Config', '').lower() # TODO: use regex with EOS instead of plain replace
self.thread_config = thread_configs[thread_config]
def fetch_all(self, credentials, regions = [], partition_name = 'aws', targets = None):
"""
Generic fetching function that iterates through all of the service's targets
:param credentials: F
:param service: Name of the service
:param regions: Name of regions to fetch data from
:param partition_name: AWS partition to connect to
:param targets: Type of resources to be fetched; defaults to all.
"""
global status, formatted_string
# Initialize targets
if not targets:
targets = type(self).targets
printInfo('Fetching %s config...' % format_service_name(self.service))
formatted_string = None
api_service = self.service.lower()
# Connect to the service
if self.service in [ 's3' ]: # S3 namespace is global but APIs aren't....
api_clients = {}
for region in build_region_list(self.service, regions, partition_name):
api_clients[region] = connect_service('s3', credentials, region, silent = True)
api_client = api_clients[list(api_clients.keys())[0]]
elif self.service == 'route53domains':
api_client = connect_service(self.service, credentials, 'us-east-1', silent = True) # TODO: use partition's default region
else:
api_client = connect_service(self.service, credentials, silent = True)
# Threading to fetch & parse resources (queue consumer)
params = {'api_client': api_client}
if self.service in ['s3']:
params['api_clients'] = api_clients
q = self._init_threading(self.__fetch_target, params, self.thread_config['parse'])
# Threading to list resources (queue feeder)
params = {'api_client': api_client, 'q': q}
if self.service in ['s3']:
params['api_clients'] = api_clients
qt = self._init_threading(self.__fetch_service, params, self.thread_config['list'])
# Init display
self.fetchstatuslogger = FetchStatusLogger(targets)
# Go
for target in targets:
qt.put(target)
# Join
qt.join()
q.join()
# Show completion and force newline
if self.service != 'iam':
self.fetchstatuslogger.show(True)
def finalize(self):
for t in self.fetchstatuslogger.counts:
setattr(self, '%s_count' % t, self.fetchstatuslogger.counts[t]['fetched'])
self.__delattr__('fetchstatuslogger')
def _init_threading(self, function, params={}, num_threads=10):
# Init queue and threads
q = Queue(maxsize=0) # TODO: find something appropriate
if not num_threads:
num_threads = len(targets)
for i in range(num_threads):
worker = Thread(target=function, args=(q, params))
worker.setDaemon(True)
worker.start()
return q
def __fetch_service(self, q, params):
api_client = params['api_client']
try:
while True:
try:
target_type, response_attribute, list_method_name, list_params, ignore_list_error = q.get()
if not list_method_name:
continue
try:
method = getattr(api_client, list_method_name)
except Exception as e:
printException(e)
continue
try:
if type(list_params) != list:
list_params = [ list_params ]
targets = []
for lp in list_params:
targets += handle_truncated_response(method, lp, [response_attribute])[response_attribute]
except Exception as e:
if not ignore_list_error:
printException(e)
targets = []
self.fetchstatuslogger.counts[target_type]['discovered'] += len(targets)
for target in targets:
params['q'].put((target_type, target),)
except Exception as e:
printException(e)
finally:
q.task_done()
except Exception as e:
printException(e)
pass
def __fetch_target(self, q, params):
global status
try:
while True:
try:
target_type, target = q.get()
# Make a full copy of the target in case we need to re-queue it
backup = copy.deepcopy(target)
method = getattr(self, 'parse_%s' % target_type)
method(target, params)
self.fetchstatuslogger.counts[target_type]['fetched'] += 1
self.fetchstatuslogger.show()
except Exception as e:
if hasattr(e, 'response') and 'Error' in e.response and e.response['Error']['Code'] in [ 'Throttling' ]:
q.put((target_type, backup),)
else:
printException(e)
finally:
q.task_done()
except Exception as e:
printException(e)
pass
| gpl-2.0 |
chripell/mytools | gimp/gimp-filters/scripts/file-ora.py | 1 | 9365 | #!/usr/bin/env python
#
# Copyright (C) 2009 by Jon Nordby <jononor@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# Based on MyPaint source code by Martin Renold
# http://gitorious.org/mypaint/mypaint/blobs/edd84bcc1e091d0d56aa6d26637aa8a925987b6a/lib/document.py
from gimpfu import *
import tempfile, zipfile, os
import xml.etree.ElementTree as ET
layermodes_map = {
"svg:src-over": NORMAL_MODE,
"svg:plus": ADDITION_MODE,
"svg:multiply": MULTIPLY_MODE,
"svg:screen": SCREEN_MODE,
"svg:overlay": OVERLAY_MODE,
"svg:darken": DARKEN_ONLY_MODE,
"svg:lighten": LIGHTEN_ONLY_MODE,
"svg:color-dodge": DODGE_MODE,
"svg:color-burn": BURN_MODE,
"svg:hard-light": HARDLIGHT_MODE,
"svg:soft-light": SOFTLIGHT_MODE,
"svg:difference": DIFFERENCE_MODE,
}
def reverse_map(mapping):
return dict((v,k) for k, v in mapping.iteritems())
def get_image_attributes(orafile):
xml = orafile.read('stack.xml')
image = ET.fromstring(xml)
stack = image.find('stack')
w = int(image.attrib.get('w', ''))
h = int(image.attrib.get('h', ''))
return stack, w, h
def get_layer_attributes(layer):
a = layer.attrib
path = a.get('src', '')
name = a.get('name', '')
x = int(a.get('x', '0'))
y = int(a.get('y', '0'))
opac = float(a.get('opacity', '1.0'))
visible = a.get('visibility', 'visible') != 'hidden'
m = a.get('composite-op', 'svg:src-over')
layer_mode = layermodes_map.get(m, NORMAL_MODE)
return path, name, x, y, opac, visible, layer_mode
def thumbnail_ora(filename, thumb_size):
#FIXME: Untested. Does not seem to be used at all?
#should be run when registered and there is no thumbnail in cache
tempdir = tempfile.mkdtemp('gimp-plugin-file-ora')
orafile = zipfile.ZipFile(filename)
stack, w, h = get_image_attributes(orafile)
#create temp file
tmp = os.path.join(tempdir, 'tmp.png')
f = open(tmp, 'wb')
f.write(orafile.read('Thumbnails/thumbnail.png'))
f.close()
img = pdb['file-png-load'](tmp)
#TODO: scaling
os.remove(tmp)
os.rmdir(tempdir)
return (img, w, h)
def save_ora(img, drawable, filename, raw_filename):
def write_file_str(zfile, fname, data):
#work around a permission bug in the zipfile library: http://bugs.python.org/issue3394
zi = zipfile.ZipInfo(fname)
zi.external_attr = 0100644 << 16
zfile.writestr(zi, data)
tempdir = tempfile.mkdtemp('gimp-plugin-file-ora')
#use .tmpsave extension, so we don't overwrite a valid file if there is an exception
orafile = zipfile.ZipFile(filename + '.tmpsave', 'w', compression=zipfile.ZIP_STORED)
write_file_str(orafile, 'mimetype', 'image/openraster') # must be the first file written
#build image attributes
image = ET.Element('image')
stack = ET.SubElement(image, 'stack')
a = image.attrib
a['w'] = str(img.width)
a['h'] = str(img.height)
def store_layer(img, drawable, path):
tmp = os.path.join(tempdir, 'tmp.png')
interlace, compression = 0, 2
png_chunks = (1, 1, 0, 1, 1) #write all PNG chunks except oFFs(ets)
pdb['file-png-save'](img, drawable, tmp, 'tmp.png',
interlace, compression, *png_chunks)
orafile.write(tmp, path)
os.remove(tmp)
def add_layer(x, y, opac, gimp_layer, path, visible=True):
store_layer(img, gimp_layer, path)
#create layer attributes
layer = ET.Element('layer')
stack.append(layer)
a = layer.attrib
a['src'] = path
a['name'] = gimp_layer.name
a['x'] = str(x)
a['y'] = str(y)
a['opacity'] = str(opac)
a['visibility'] = 'visible' if visible else 'hidden'
a['composite-op'] = reverse_map(layermodes_map).get(gimp_layer.mode, 'svg:src-over')
return layer
#save layers
for lay in img.layers:
x, y = lay.offsets
opac = lay.opacity / 100.0 #needs to be between 0.0 and 1.0
add_layer(x, y, opac, lay, 'data/%s.png' % lay.name.decode('utf-8'), lay.visible)
#save thumbnail
w, h = img.width, img.height
#should be at most 256x256, without changing aspect ratio
if w > h:
w, h = 256, max(h*256/w, 1)
else:
w, h = max(w*256/h, 1), 256
thumb = pdb['gimp-image-duplicate'](img)
thumb_layer = thumb.flatten()
thumb_layer.scale(w, h)
store_layer(thumb, thumb_layer, 'Thumbnails/thumbnail.png')
gimp.delete(thumb)
#write stack.xml
xml = ET.tostring(image, encoding='UTF-8')
write_file_str(orafile, 'stack.xml', xml)
#finish up
orafile.close()
os.rmdir(tempdir)
if os.path.exists(filename):
os.remove(filename) #win32 needs that
os.rename(filename + '.tmpsave', filename)
def load_ora(filename, raw_filename):
tempdir = tempfile.mkdtemp('gimp-plugin-file-ora')
orafile = zipfile.ZipFile(filename)
stack, w, h = get_image_attributes(orafile)
img = gimp.Image(w, h, RGB)
img.filename = filename
def get_layers(root):
"""returns a flattened list of all layers under root"""
res = []
for item in root:
if item.tag == 'layer':
res.append(item)
elif item.tag == 'stack':
res += get_layers(item)
return res
for layer_no, layer in enumerate(get_layers(stack)):
path, name, x, y, opac, visible, layer_mode = get_layer_attributes(layer)
if not path.lower().endswith('.png'):
continue
if not name:
#use the filename without extention as name
n = os.path.basename(path)
name = os.path.splitext(n)[0]
#create temp file. Needed because gimp cannot load files from inside a zip file
tmp = os.path.join(tempdir, 'tmp.png')
f = open(tmp, 'wb')
try:
data = orafile.read(path)
except KeyError:
# support for bad zip files (saved by old versions of this plugin)
data = orafile.read(path.encode('utf-8'))
print 'WARNING: bad OpenRaster ZIP file. There is an utf-8 encoded filename that does not have the utf-8 flag set:', repr(path)
f.write(data)
f.close()
#import layer, set attributes and add to image
gimp_layer = pdb['gimp-file-load-layer'](img, tmp)
gimp_layer.name = name
gimp_layer.mode = layer_mode
gimp_layer.set_offsets(x, y) #move to correct position
gimp_layer.opacity = opac * 100 #a float between 0 and 100
gimp_layer.visible = visible
img.add_layer(gimp_layer, layer_no)
os.remove(tmp)
os.rmdir(tempdir)
return img
def register_load_handlers():
gimp.register_load_handler('file-ora-load', 'ora', '')
pdb['gimp-register-file-handler-mime']('file-ora-load', 'image/openraster')
pdb['gimp-register-thumbnail-loader']('file-ora-load', 'file-ora-load-thumb')
def register_save_handlers():
gimp.register_save_handler('file-ora-save', 'ora', '')
register(
'file-ora-load-thumb', #name
'loads a thumbnail from an OpenRaster (.ora) file', #description
'loads a thumbnail from an OpenRaster (.ora) file',
'Jon Nordby', #author
'Jon Nordby', #copyright
'2009', #year
None,
None, #image type
[ #input args. Format (type, name, description, default [, extra])
(PF_STRING, 'filename', 'The name of the file to load', None),
(PF_INT, 'thumb-size', 'Preferred thumbnail size', None),
],
[ #results. Format (type, name, description)
(PF_IMAGE, 'image', 'Thumbnail image'),
(PF_INT, 'image-width', 'Width of full-sized image'),
(PF_INT, 'image-height', 'Height of full-sized image')
],
thumbnail_ora, #callback
)
register(
'file-ora-save', #name
'save an OpenRaster (.ora) file', #description
'save an OpenRaster (.ora) file',
'Jon Nordby', #author
'Jon Nordby', #copyright
'2009', #year
'OpenRaster',
'*',
[ #input args. Format (type, name, description, default [, extra])
(PF_IMAGE, "image", "Input image", None),
(PF_DRAWABLE, "drawable", "Input drawable", None),
(PF_STRING, "filename", "The name of the file", None),
(PF_STRING, "raw-filename", "The name of the file", None),
],
[], #results. Format (type, name, description)
save_ora, #callback
on_query = register_save_handlers,
menu = '<Save>'
)
register(
'file-ora-load', #name
'load an OpenRaster (.ora) file', #description
'load an OpenRaster (.ora) file',
'Jon Nordby', #author
'Jon Nordby', #copyright
'2009', #year
'OpenRaster',
None, #image type
[ #input args. Format (type, name, description, default [, extra])
(PF_STRING, 'filename', 'The name of the file to load', None),
(PF_STRING, 'raw-filename', 'The name entered', None),
],
[(PF_IMAGE, 'image', 'Output image')], #results. Format (type, name, description)
load_ora, #callback
on_query = register_load_handlers,
menu = "<Load>",
)
main()
| apache-2.0 |
mozilla/onyx | tests/api/test_v1.py | 3 | 3023 | import json
from flask import url_for
from nose.tools import (
assert_equals,
assert_is_none
)
from tests.base import BaseTestCase
class TestNewtabServing(BaseTestCase):
def test_missing_payload(self):
"""
A call without a payload errors
"""
response = self.client.post(url_for('v1_links.fetch'),
content_type='application/json',
headers=[("User-Agent", "TestClient")])
assert_equals(response.status_code, 400)
assert_is_none(response.headers.get('Set-Cookie'))
assert_equals(response.content_length, 0)
def test_missing_dircount(self):
"""
A call without directoryCount errors
"""
response = self.client.post(url_for('v1_links.fetch'),
content_type='application/json',
headers=[("User-Agent", "TestClient")],
data=json.dumps({'locale': 'en-US'}))
assert_equals(response.status_code, 400)
assert_is_none(response.headers.get('Set-Cookie'))
assert_equals(response.content_length, 0)
def test_unknown_locale(self):
"""
A call with an unknown locale yields an HTTP 204 response
"""
response = self.client.post(url_for('v1_links.fetch'),
content_type='application/json',
headers=[("User-Agent", "TestClient")],
data=json.dumps({'locale': 'zh-CN', 'directoryCount': {"organic": 1}}))
assert_equals(response.status_code, 204)
assert_equals(response.content_length, 0)
def test_channel_failure(self):
"""
A channel configuration problem will throw a 500 error
"""
self.env.config.LINKS_LOCALIZATIONS = {
}
response = self.client.post(url_for('v1_links.fetch'),
content_type='application/json',
headers=[("User-Agent", "TestClient")],
environ_base={"REMOTE_ADDR": "173.194.43.105"},
data=json.dumps({'locale': 'en-US', 'directoryCount': {'organic': 1}}))
assert_equals(response.status_code, 500)
assert_equals(response.content_length, 0)
def test_success(self):
"""
A call with an known geo/locale pair redirects
"""
response = self.client.post(url_for('v1_links.fetch'),
content_type='application/json',
headers=[("User-Agent", "TestClient")],
environ_base={"REMOTE_ADDR": "173.194.43.105"},
data=json.dumps({'locale': 'en-US', 'directoryCount': {'organic': 1}}))
assert_equals(response.status_code, 303)
assert_equals(response.content_length, 0)
| mpl-2.0 |
ianya/openyoudao | openyoudao.py | 1 | 12888 | #!/usr/bin/python
#-*- coding: utf-8 -*-
# RECORD extension
# Not very much unlike the xmacrorec2 program in the xmacro package.
import popen2
import goslate
from time import sleep
import thread
import webshot
import sys
import fusionyoudao
import gl
import os
import webkit, gtk
# Change path so we find Xlib
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from Xlib import X, XK, display
from Xlib.ext import record
from Xlib.protocol import rq
record_dpy = display.Display()
def record_callback(reply):
if reply.category != record.FromServer:
return
if reply.client_swapped:
print "* received swapped protocol data, cowardly ignored"
return
if not len(reply.data) or ord(reply.data[0]) < 2:
# not an event
return
data = reply.data
while len(data):
event, data = rq.EventField(None).parse_binary_value(data, record_dpy.display, None, None)
# deal with the event type
if event.type == X.ButtonRelease:
# get text
global Alive
pipe = os.popen("xclip -o")
text = pipe.readline()
pipe.readlines() #清空管道剩余部分
pipe.close()
print "您选取的是: ", text
text = text.strip('\r\n\x00').lower().strip()
if(gl.pre_text != text and text!=""and gl.lock=="0" or text=="%lock%"):
url = ""
gl.pre_text = text
if(False==os.path.exists(gl.cachedir)):
os.system("mkdir \'" + gl.cachedir + "\'")
if(False==os.path.exists(gl.origindir)):
os.system("touch \'" + gl.origindir + "\'")
if(False==os.path.exists(gl.resultdir)):
os.system("touch \'" + gl.resultdir + "\'")
if(False==os.path.exists(gl.historydir)):
os.system("touch \'" + gl.historydir + "\'")
if(False==os.path.exists(gl.keyworddir)):
os.system("touch \'" + gl.keyworddir + "\'")
#youdao
if "%zh2enlj%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2enlj.html"
gl.searchurl=gl.zh2enlj
url = ""
gl.func="lj"
gl.Dict="youdao"
gl.title="汉英例句"
elif "%zh2japlj%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2japlj.html"
gl.searchurl=gl.zh2japlj
url = ""
gl.func="lj"
gl.Dict="youdao"
gl.title="汉日例句"
elif "%zh2kolj%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2kolj.html"
gl.searchurl=gl.zh2kolj
url = ""
gl.func="lj"
gl.Dict="youdao"
gl.title="汉韩例句"
elif "%zh2frlj%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2frlj.html"
gl.searchurl=gl.zh2frlj
url = ""
gl.func="lj"
gl.Dict="youdao"
gl.title="汉法例句"
elif "%zh2en%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2en.html"
gl.searchurl=gl.zh2en
url = ""
gl.Dict="youdao"
gl.title="汉英互译"
elif "%zh2jap%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2jap.html"
gl.searchurl=gl.zh2jap
url = ""
gl.Dict="youdao"
gl.title="汉日互译"
elif "%zh2ko%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2ko.html"
gl.searchurl=gl.zh2ko
url = ""
gl.Dict="youdao"
gl.title="汉韩互译"
elif "%zh2fr%" in text:
gl.homeurl="file:///usr/share/openyoudao/zh2fr.html"
gl.searchurl=gl.zh2fr
url = ""
gl.Dict="youdao"
gl.title="汉法互译"
#config
elif "%index%" in text:
gl.homeurl="file:///usr/share/openyoudao/config.html"
url = ""
gl.title="有道首页"
elif "%helps%" in text:
gl.homeurl="file:///usr/share/openyoudao/help.html"
url = ""
gl.title="使用说明"
elif "%goslate%" in text:
gl.homeurl="file:///usr/share/openyoudao/goslate.html"
url = ""
gl.Dict="google"
gl.title="谷歌翻译"
elif "%donate%" in text:
gl.homeurl="file:///usr/share/openyoudao/donate.html"
url = ""
gl.title="捐赠页面"
elif "%expand%" in text:
gl.homeurl="file:///usr/share/openyoudao/expand.html"
url = ""
gl.title="展开选项"
elif "%history%" in text:
gl.homeurl= "file://" + gl.historydir
if Alive==1:
his_tar=open(gl.historydir,'w')
print >> his_tar,"<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>History</title></head><body><p>%s %s</p>"%("%index%","%expand%")
keyword=open(gl.keyworddir,'r')
print >> his_tar,keyword.read()
print >> his_tar,"</body></html>"
his_tar.close()
keyword.close()
url = ""
gl.title="取词历史"
elif "%lock%" in text:
if gl.lock=="0":
gl.lock="1"
gl.homeurl="file:///usr/share/openyoudao/lock.html"
gl.title="锁定取词"
else:
gl.lock="0"
gl.homeurl="file:///usr/share/openyoudao/unlock.html"
gl.title="取词解锁"
url = ""
elif "%exits%" in text:
Alive=0
else:
url= gl.searchurl + text
if url !="":
if Alive==1:
k_tar=open(gl.keyworddir,'a')
print >> k_tar,"<p>%s</p>" % text
k_tar.close()
#fp = file(gl.keyworddir)
#lines = []
#for line in fp: # 内置的迭代器, 效率很高
# lines.append(line)
#fp.close()
#lines.insert(0, "<p>%s</p>" % text) # 在第二行插入
#s = '\n'.join(lines)
#fp = file(gl.keyworddir, 'w')
#fp.write(s)
#fp.close()
#[google youdao]
if gl.Dict=="google":
gs = goslate.Goslate()
gl.lang=gs.detect(text)
g_tar=open(gl.googledir,'w+')
if gl.lang=='zh-CN':
basehtml="<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>Google Translate</title></head><body><p>Source Language: %s</p><p>Target Language : %s</p><p>Selected Text : %s</p><p>Target Text : %s</p><p>%s %s</p></body></html>"%(gl.lang,'en',text,gs.translate(text, 'en'),"%index%","%expand%")
else:
basehtml="<!DOCTYPE html><html><head><meta charset=\"utf-8\"><title>Google Translate</title></head><body><p>Source Language: %s</p><p>Target Language : %s</p><p>Selected Text : %s</p><p>Target Text : %s</p><p>%s %s</p></body></html>"%(gl.lang,'zh-CN',text,gs.translate(text, 'zh-CN'),"%index%","%expand%")
print >> g_tar,basehtml
g_tar.close()
gl.homeurl= "file://" + gl.googledir
if gl.Dict=="youdao":
os.system("curl -s -w %{http_code}:%{time_connect}:%{time_starttransfer}:%{time_total}:%{speed_download} -o \'" + gl.origindir +"\' \'" + url+ "\'") #获得网页(非代理)
fusionyoudao.reconstruct(gl.func)
gl.homeurl="file://" + gl.resultdir #合成最终缓冲访问地址
if Alive==1:
window.settitle(gl.title)
window.load(gl.homeurl)
window.show()
if not record_dpy.has_extension("RECORD"):
print "RECORD extension not found"
sys.exit(1)
r = record_dpy.record_get_version(0, 0)
print "RECORD extension version %d.%d" % (r.major_version, r.minor_version)
# Create a recording context; we only want key and mouse events
ctx = record_dpy.record_create_context(
0,
[record.AllClients],
[{
'core_requests': (0, 0),
'core_replies': (0, 0),
'ext_requests': (0, 0, 0, 0),
'ext_replies': (0, 0, 0, 0),
'delivered_events': (0, 0),
'device_events': (X.KeyPress, X.MotionNotify),
'errors': (0, 0),
'client_started': False,
'client_died': False,
}])
def webshow():
global window
global Alive
window = webshot.Window()
window.load(gl.homeurl)
window.show()
gtk.main()
record_dpy.record_free_context(ctx)
os.system("ps aux | grep openyoudao.py |awk '{print $2}' |xargs kill -9 >/dev/null")
Alive=0
def gettext():
os.system("xclip -f /dev/null") #清空剪切板
record_dpy.record_enable_context(ctx,record_callback)
record_dpy.record_free_context(ctx)
def main():
global Alive
Alive=1
thread.start_new_thread(webshow,())
sleep(0.5)
thread.start_new_thread(gettext,())
while Alive:
sleep(0.2)
clip_id=os.popen("ps aux | grep xclip | grep -v grep |awk '{print $2}'| grep -v ^$ |wc -l")
pid = clip_id.readline().strip('\r\n\x00')
if int(pid)>=1:
os.system("ps aux | grep xclip |awk '{print $2}' |xargs kill -9 >/dev/null")
if __name__ == '__main__':
main()
| mit |
Itxaka/st2 | st2reactor/st2reactor/sensor/base.py | 4 | 2638 | import abc
import six
import eventlet
__all__ = [
'Sensor',
'PollingSensor'
]
@six.add_metaclass(abc.ABCMeta)
class BaseSensor(object):
"""
Base Sensor class - not to be instantiated directly.
"""
def __init__(self, sensor_service, config=None):
"""
:param sensor_service: Sensor Service instance.
:type sensor_service: :class:``st2reactor.container.sensor_wrapper.SensorService``
:keyword config: Sensor config.
:type config: ``dict`` or None
"""
self._sensor_service = sensor_service
self._config = config or {}
@abc.abstractmethod
def setup(self):
"""
Run the sensor initialization / setup code (if any).
"""
pass
@abc.abstractmethod
def run(self):
"""
Run the sensor.
"""
pass
@abc.abstractmethod
def cleanup(self):
"""
Run the sensor cleanup code (if any).
"""
pass
@abc.abstractmethod
def add_trigger(self, trigger):
"""
Runs when trigger is created
"""
pass
@abc.abstractmethod
def update_trigger(self, trigger):
"""
Runs when trigger is updated
"""
pass
@abc.abstractmethod
def remove_trigger(self, trigger):
"""
Runs when trigger is deleted
"""
pass
class Sensor(BaseSensor):
"""
Base class to be inherited from by the passive sensors.
"""
@abc.abstractmethod
def run(self):
pass
class PollingSensor(BaseSensor):
"""
Base class to be inherited from by the active sensors.
Active sensors periodically poll a 3rd party system for new information.
"""
def __init__(self, sensor_service, config=None, poll_interval=5):
super(PollingSensor, self).__init__(sensor_service=sensor_service, config=config)
self._poll_interval = poll_interval
@abc.abstractmethod
def poll(self):
"""
Poll 3rd party system for new information.
"""
pass
def run(self):
while True:
self.poll()
eventlet.sleep(self._poll_interval)
def get_poll_interval(self):
"""
Retrieve current poll interval.
:return: Current poll interval.
:rtype: ``float``
"""
return self._poll_interval
def set_poll_interval(self, poll_interval):
"""
Set the poll interval.
:param poll_interval: Poll interval to use.
:type poll_interval: ``float``
"""
self._poll_interval = poll_interval
| apache-2.0 |
hronoses/vispy | examples/basics/scene/volume.py | 4 | 4376 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
# vispy: gallery 2
"""
Example volume rendering
Controls:
* 1 - toggle camera between first person (fly), regular 3D (turntable) and
arcball
* 2 - toggle between volume rendering methods
* 3 - toggle between stent-CT / brain-MRI image
* 4 - toggle between colormaps
* 0 - reset cameras
* [] - decrease/increase isosurface threshold
With fly camera:
* WASD or arrow keys - move around
* SPACE - brake
* FC - move up-down
* IJKL or mouse - look around
"""
from itertools import cycle
import numpy as np
from vispy import app, scene, io
from vispy.color import get_colormaps, BaseColormap
# Read volume
vol1 = np.load(io.load_data_file('volume/stent.npz'))['arr_0']
vol2 = np.load(io.load_data_file('brain/mri.npz'))['data']
vol2 = np.flipud(np.rollaxis(vol2, 1))
# Prepare canvas
canvas = scene.SceneCanvas(keys='interactive', size=(800, 600), show=True)
canvas.measure_fps()
# Set up a viewbox to display the image with interactive pan/zoom
view = canvas.central_widget.add_view()
# Set whether we are emulating a 3D texture
emulate_texture = False
# Create the volume visuals, only one is visible
volume1 = scene.visuals.Volume(vol1, parent=view.scene, threshold=0.225,
emulate_texture=emulate_texture)
volume1.transform = scene.STTransform(translate=(64, 64, 0))
volume2 = scene.visuals.Volume(vol2, parent=view.scene, threshold=0.2,
emulate_texture=emulate_texture)
volume2.visible = False
# Create two cameras (1 for firstperson, 3 for 3d person)
fov = 60.
cam1 = scene.cameras.FlyCamera(parent=view.scene, fov=fov, name='Fly')
cam2 = scene.cameras.TurntableCamera(parent=view.scene, fov=fov,
name='Turntable')
cam3 = scene.cameras.ArcballCamera(parent=view.scene, fov=fov, name='Arcball')
view.camera = cam2 # Select turntable at first
# create colormaps that work well for translucent and additive volume rendering
class TransFire(BaseColormap):
glsl_map = """
vec4 translucent_fire(float t) {
return vec4(pow(t, 0.5), t, t*t, max(0, t*1.05 - 0.05));
}
"""
class TransGrays(BaseColormap):
glsl_map = """
vec4 translucent_grays(float t) {
return vec4(t, t, t, t*0.05);
}
"""
# Setup colormap iterators
opaque_cmaps = cycle(get_colormaps())
translucent_cmaps = cycle([TransFire(), TransGrays()])
opaque_cmap = next(opaque_cmaps)
translucent_cmap = next(translucent_cmaps)
# Implement key presses
@canvas.events.key_press.connect
def on_key_press(event):
global opaque_cmap, translucent_cmap
if event.text == '1':
cam_toggle = {cam1: cam2, cam2: cam3, cam3: cam1}
view.camera = cam_toggle.get(view.camera, cam2)
print(view.camera.name + ' camera')
elif event.text == '2':
methods = ['mip', 'translucent', 'iso', 'additive']
method = methods[(methods.index(volume1.method) + 1) % 4]
print("Volume render method: %s" % method)
cmap = opaque_cmap if method in ['mip', 'iso'] else translucent_cmap
volume1.method = method
volume1.cmap = cmap
volume2.method = method
volume2.cmap = cmap
elif event.text == '3':
volume1.visible = not volume1.visible
volume2.visible = not volume1.visible
elif event.text == '4':
if volume1.method in ['mip', 'iso']:
cmap = opaque_cmap = next(opaque_cmaps)
else:
cmap = translucent_cmap = next(translucent_cmaps)
volume1.cmap = cmap
volume2.cmap = cmap
elif event.text == '0':
cam1.set_range()
cam3.set_range()
elif event.text != '' and event.text in '[]':
s = -0.025 if event.text == '[' else 0.025
volume1.threshold += s
volume2.threshold += s
th = volume1.threshold if volume1.visible else volume2.threshold
print("Isosurface threshold: %0.3f" % th)
# for testing performance
#@canvas.connect
#def on_draw(ev):
#canvas.update()
if __name__ == '__main__':
print(__doc__)
app.run()
| bsd-3-clause |
cesarmarinhorj/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/bot/sheriff_unittest.py | 122 | 3783 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.net.buildbot import Builder
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.thirdparty.mock import Mock
from webkitpy.tool.bot.sheriff import Sheriff
from webkitpy.tool.mocktool import MockTool
class MockSheriffBot(object):
name = "mock-sheriff-bot"
watchers = [
"watcher@example.com",
]
def run_webkit_patch(self, args):
return "Created bug https://bugs.webkit.org/show_bug.cgi?id=36936\n"
class SheriffTest(unittest.TestCase):
def test_post_blame_comment_on_bug(self):
def run():
sheriff = Sheriff(MockTool(), MockSheriffBot())
builders = [
Builder("Foo", None),
Builder("Bar", None),
]
commit_info = Mock()
commit_info.bug_id = lambda: None
commit_info.revision = lambda: 4321
# Should do nothing with no bug_id
sheriff.post_blame_comment_on_bug(commit_info, builders, [])
sheriff.post_blame_comment_on_bug(commit_info, builders, ["mock-test-1", "mock-test-2"])
# Should try to post a comment to the bug, but MockTool.bugs does nothing.
commit_info.bug_id = lambda: 1234
sheriff.post_blame_comment_on_bug(commit_info, builders, [])
sheriff.post_blame_comment_on_bug(commit_info, builders, ["mock-test-1"])
sheriff.post_blame_comment_on_bug(commit_info, builders, ["mock-test-1", "mock-test-2"])
expected_logs = u"""MOCK bug comment: bug_id=1234, cc=['watcher@example.com']
--- Begin comment ---
http://trac.webkit.org/changeset/4321 might have broken Foo and Bar
--- End comment ---
MOCK bug comment: bug_id=1234, cc=['watcher@example.com']
--- Begin comment ---
http://trac.webkit.org/changeset/4321 might have broken Foo and Bar
The following tests are not passing:
mock-test-1
--- End comment ---
MOCK bug comment: bug_id=1234, cc=['watcher@example.com']
--- Begin comment ---
http://trac.webkit.org/changeset/4321 might have broken Foo and Bar
The following tests are not passing:
mock-test-1
mock-test-2
--- End comment ---
"""
OutputCapture().assert_outputs(self, run, expected_logs=expected_logs)
| bsd-3-clause |
lokirius/python-for-android | python3-alpha/python3-src/Lib/xml/sax/__init__.py | 237 | 3503 | """Simple API for XML (SAX) implementation for Python.
This module provides an implementation of the SAX 2 interface;
information about the Java version of the interface can be found at
http://www.megginson.com/SAX/. The Python version of the interface is
documented at <...>.
This package contains the following modules:
handler -- Base classes and constants which define the SAX 2 API for
the 'client-side' of SAX for Python.
saxutils -- Implementation of the convenience classes commonly used to
work with SAX.
xmlreader -- Base classes and constants which define the SAX 2 API for
the parsers used with SAX for Python.
expatreader -- Driver that allows use of the Expat parser with SAX.
"""
from .xmlreader import InputSource
from .handler import ContentHandler, ErrorHandler
from ._exceptions import SAXException, SAXNotRecognizedException, \
SAXParseException, SAXNotSupportedException, \
SAXReaderNotAvailable
def parse(source, handler, errorHandler=ErrorHandler()):
parser = make_parser()
parser.setContentHandler(handler)
parser.setErrorHandler(errorHandler)
parser.parse(source)
def parseString(string, handler, errorHandler=ErrorHandler()):
from io import BytesIO
if errorHandler is None:
errorHandler = ErrorHandler()
parser = make_parser()
parser.setContentHandler(handler)
parser.setErrorHandler(errorHandler)
inpsrc = InputSource()
inpsrc.setByteStream(BytesIO(string))
parser.parse(inpsrc)
# this is the parser list used by the make_parser function if no
# alternatives are given as parameters to the function
default_parser_list = ["xml.sax.expatreader"]
# tell modulefinder that importing sax potentially imports expatreader
_false = 0
if _false:
import xml.sax.expatreader
import os, sys
if "PY_SAX_PARSER" in os.environ:
default_parser_list = os.environ["PY_SAX_PARSER"].split(",")
del os
_key = "python.xml.sax.parser"
if sys.platform[:4] == "java" and sys.registry.containsKey(_key):
default_parser_list = sys.registry.getProperty(_key).split(",")
def make_parser(parser_list = []):
"""Creates and returns a SAX parser.
Creates the first parser it is able to instantiate of the ones
given in the list created by doing parser_list +
default_parser_list. The lists must contain the names of Python
modules containing both a SAX parser and a create_parser function."""
for parser_name in parser_list + default_parser_list:
try:
return _create_parser(parser_name)
except ImportError as e:
import sys
if parser_name in sys.modules:
# The parser module was found, but importing it
# failed unexpectedly, pass this exception through
raise
except SAXReaderNotAvailable:
# The parser module detected that it won't work properly,
# so try the next one
pass
raise SAXReaderNotAvailable("No parsers found", None)
# --- Internal utility methods used by make_parser
if sys.platform[ : 4] == "java":
def _create_parser(parser_name):
from org.python.core import imp
drv_module = imp.importName(parser_name, 0, globals())
return drv_module.create_parser()
else:
def _create_parser(parser_name):
drv_module = __import__(parser_name,{},{},['create_parser'])
return drv_module.create_parser()
del sys
| apache-2.0 |
wfxiang08/django190 | tests/model_inheritance_regress/models.py | 150 | 5750 | from __future__ import unicode_literals
import datetime
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Place(models.Model):
name = models.CharField(max_length=50)
address = models.CharField(max_length=80)
class Meta:
ordering = ('name',)
def __str__(self):
return "%s the place" % self.name
@python_2_unicode_compatible
class Restaurant(Place):
serves_hot_dogs = models.BooleanField(default=False)
serves_pizza = models.BooleanField(default=False)
def __str__(self):
return "%s the restaurant" % self.name
@python_2_unicode_compatible
class ItalianRestaurant(Restaurant):
serves_gnocchi = models.BooleanField(default=False)
def __str__(self):
return "%s the italian restaurant" % self.name
@python_2_unicode_compatible
class ParkingLot(Place):
# An explicit link to the parent (we can control the attribute name).
parent = models.OneToOneField(Place, primary_key=True, parent_link=True)
capacity = models.IntegerField()
def __str__(self):
return "%s the parking lot" % self.name
class ParkingLot2(Place):
# In lieu of any other connector, an existing OneToOneField will be
# promoted to the primary key.
parent = models.OneToOneField(Place)
class ParkingLot3(Place):
# The parent_link connector need not be the pk on the model.
primary_key = models.AutoField(primary_key=True)
parent = models.OneToOneField(Place, parent_link=True)
class ParkingLot4(models.Model):
# Test parent_link connector can be discovered in abstract classes.
parent = models.OneToOneField(Place, parent_link=True)
class Meta:
abstract = True
class ParkingLot4A(ParkingLot4, Place):
pass
class ParkingLot4B(Place, ParkingLot4):
pass
@python_2_unicode_compatible
class Supplier(models.Model):
name = models.CharField(max_length=50)
restaurant = models.ForeignKey(Restaurant)
def __str__(self):
return self.name
class Wholesaler(Supplier):
retailer = models.ForeignKey(Supplier, related_name='wholesale_supplier')
class Parent(models.Model):
created = models.DateTimeField(default=datetime.datetime.now)
class Child(Parent):
name = models.CharField(max_length=10)
class SelfRefParent(models.Model):
parent_data = models.IntegerField()
self_data = models.ForeignKey('self', null=True)
class SelfRefChild(SelfRefParent):
child_data = models.IntegerField()
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateTimeField()
class Meta:
ordering = ('-pub_date', 'headline')
def __str__(self):
return self.headline
class ArticleWithAuthor(Article):
author = models.CharField(max_length=100)
class M2MBase(models.Model):
articles = models.ManyToManyField(Article)
class M2MChild(M2MBase):
name = models.CharField(max_length=50)
class Evaluation(Article):
quality = models.IntegerField()
class Meta:
abstract = True
class QualityControl(Evaluation):
assignee = models.CharField(max_length=50)
@python_2_unicode_compatible
class BaseM(models.Model):
base_name = models.CharField(max_length=100)
def __str__(self):
return self.base_name
@python_2_unicode_compatible
class DerivedM(BaseM):
customPK = models.IntegerField(primary_key=True)
derived_name = models.CharField(max_length=100)
def __str__(self):
return "PK = %d, base_name = %s, derived_name = %s" % (
self.customPK, self.base_name, self.derived_name)
class AuditBase(models.Model):
planned_date = models.DateField()
class Meta:
abstract = True
verbose_name_plural = 'Audits'
class CertificationAudit(AuditBase):
class Meta(AuditBase.Meta):
abstract = True
class InternalCertificationAudit(CertificationAudit):
auditing_dept = models.CharField(max_length=20)
# Check that abstract classes don't get m2m tables autocreated.
@python_2_unicode_compatible
class Person(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class AbstractEvent(models.Model):
name = models.CharField(max_length=100)
attendees = models.ManyToManyField(Person, related_name="%(class)s_set")
class Meta:
abstract = True
ordering = ('name',)
def __str__(self):
return self.name
class BirthdayParty(AbstractEvent):
pass
class BachelorParty(AbstractEvent):
pass
class MessyBachelorParty(BachelorParty):
pass
# Check concrete -> abstract -> concrete inheritance
class SearchableLocation(models.Model):
keywords = models.CharField(max_length=256)
class Station(SearchableLocation):
name = models.CharField(max_length=128)
class Meta:
abstract = True
class BusStation(Station):
bus_routes = models.CommaSeparatedIntegerField(max_length=128)
inbound = models.BooleanField(default=False)
class TrainStation(Station):
zone = models.IntegerField()
class User(models.Model):
username = models.CharField(max_length=30, unique=True)
class Profile(User):
profile_id = models.AutoField(primary_key=True)
extra = models.CharField(max_length=30, blank=True)
# Check concrete + concrete -> concrete -> concrete
class Politician(models.Model):
politician_id = models.AutoField(primary_key=True)
title = models.CharField(max_length=50)
class Congressman(Person, Politician):
state = models.CharField(max_length=2)
class Senator(Congressman):
pass
| bsd-3-clause |
piquadrat/django | tests/validation/test_picklable.py | 576 | 2010 | import pickle
from unittest import TestCase
from django.core.exceptions import ValidationError
class PickableValidationErrorTestCase(TestCase):
def test_validationerror_is_picklable(self):
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError('a', code='something')
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertIs(unpickled, unpickled.error_list[0])
self.assertEqual(original.message, unpickled.message)
self.assertEqual(original.code, unpickled.code)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError(['a', 'b'])
unpickled = pickle.loads(pickle.dumps(ValidationError(original)))
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
original = ValidationError([ValidationError('a'), ValidationError('b')])
unpickled = pickle.loads(pickle.dumps(original))
self.assertIs(unpickled.args[0][0], unpickled.error_list[0])
self.assertEqual(original.error_list[0].message, unpickled.error_list[0].message)
self.assertEqual(original.error_list[1].message, unpickled.error_list[1].message)
message_dict = {'field1': ['a', 'b'], 'field2': ['c', 'd']}
original = ValidationError(message_dict)
unpickled = pickle.loads(pickle.dumps(original))
self.assertEqual(unpickled.message_dict, message_dict)
| bsd-3-clause |
beblount/Steer-Clear-Backend-Web | env/Lib/site-packages/jinja2/sandbox.py | 637 | 13445 | # -*- coding: utf-8 -*-
"""
jinja2.sandbox
~~~~~~~~~~~~~~
Adds a sandbox layer to Jinja as it was the default behavior in the old
Jinja 1 releases. This sandbox is slightly different from Jinja 1 as the
default behavior is easier to use.
The behavior can be changed by subclassing the environment.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
import operator
from jinja2.environment import Environment
from jinja2.exceptions import SecurityError
from jinja2._compat import string_types, function_type, method_type, \
traceback_type, code_type, frame_type, generator_type, PY2
#: maximum number of items a range may produce
MAX_RANGE = 100000
#: attributes of function objects that are considered unsafe.
UNSAFE_FUNCTION_ATTRIBUTES = set(['func_closure', 'func_code', 'func_dict',
'func_defaults', 'func_globals'])
#: unsafe method attributes. function attributes are unsafe for methods too
UNSAFE_METHOD_ATTRIBUTES = set(['im_class', 'im_func', 'im_self'])
#: unsafe generator attirbutes.
UNSAFE_GENERATOR_ATTRIBUTES = set(['gi_frame', 'gi_code'])
# On versions > python 2 the special attributes on functions are gone,
# but they remain on methods and generators for whatever reason.
if not PY2:
UNSAFE_FUNCTION_ATTRIBUTES = set()
import warnings
# make sure we don't warn in python 2.6 about stuff we don't care about
warnings.filterwarnings('ignore', 'the sets module', DeprecationWarning,
module='jinja2.sandbox')
from collections import deque
_mutable_set_types = (set,)
_mutable_mapping_types = (dict,)
_mutable_sequence_types = (list,)
# on python 2.x we can register the user collection types
try:
from UserDict import UserDict, DictMixin
from UserList import UserList
_mutable_mapping_types += (UserDict, DictMixin)
_mutable_set_types += (UserList,)
except ImportError:
pass
# if sets is still available, register the mutable set from there as well
try:
from sets import Set
_mutable_set_types += (Set,)
except ImportError:
pass
#: register Python 2.6 abstract base classes
try:
from collections import MutableSet, MutableMapping, MutableSequence
_mutable_set_types += (MutableSet,)
_mutable_mapping_types += (MutableMapping,)
_mutable_sequence_types += (MutableSequence,)
except ImportError:
pass
_mutable_spec = (
(_mutable_set_types, frozenset([
'add', 'clear', 'difference_update', 'discard', 'pop', 'remove',
'symmetric_difference_update', 'update'
])),
(_mutable_mapping_types, frozenset([
'clear', 'pop', 'popitem', 'setdefault', 'update'
])),
(_mutable_sequence_types, frozenset([
'append', 'reverse', 'insert', 'sort', 'extend', 'remove'
])),
(deque, frozenset([
'append', 'appendleft', 'clear', 'extend', 'extendleft', 'pop',
'popleft', 'remove', 'rotate'
]))
)
def safe_range(*args):
"""A range that can't generate ranges with a length of more than
MAX_RANGE items.
"""
rng = range(*args)
if len(rng) > MAX_RANGE:
raise OverflowError('range too big, maximum size for range is %d' %
MAX_RANGE)
return rng
def unsafe(f):
"""Marks a function or method as unsafe.
::
@unsafe
def delete(self):
pass
"""
f.unsafe_callable = True
return f
def is_internal_attribute(obj, attr):
"""Test if the attribute given is an internal python attribute. For
example this function returns `True` for the `func_code` attribute of
python objects. This is useful if the environment method
:meth:`~SandboxedEnvironment.is_safe_attribute` is overridden.
>>> from jinja2.sandbox import is_internal_attribute
>>> is_internal_attribute(lambda: None, "func_code")
True
>>> is_internal_attribute((lambda x:x).func_code, 'co_code')
True
>>> is_internal_attribute(str, "upper")
False
"""
if isinstance(obj, function_type):
if attr in UNSAFE_FUNCTION_ATTRIBUTES:
return True
elif isinstance(obj, method_type):
if attr in UNSAFE_FUNCTION_ATTRIBUTES or \
attr in UNSAFE_METHOD_ATTRIBUTES:
return True
elif isinstance(obj, type):
if attr == 'mro':
return True
elif isinstance(obj, (code_type, traceback_type, frame_type)):
return True
elif isinstance(obj, generator_type):
if attr in UNSAFE_GENERATOR_ATTRIBUTES:
return True
return attr.startswith('__')
def modifies_known_mutable(obj, attr):
"""This function checks if an attribute on a builtin mutable object
(list, dict, set or deque) would modify it if called. It also supports
the "user"-versions of the objects (`sets.Set`, `UserDict.*` etc.) and
with Python 2.6 onwards the abstract base classes `MutableSet`,
`MutableMapping`, and `MutableSequence`.
>>> modifies_known_mutable({}, "clear")
True
>>> modifies_known_mutable({}, "keys")
False
>>> modifies_known_mutable([], "append")
True
>>> modifies_known_mutable([], "index")
False
If called with an unsupported object (such as unicode) `False` is
returned.
>>> modifies_known_mutable("foo", "upper")
False
"""
for typespec, unsafe in _mutable_spec:
if isinstance(obj, typespec):
return attr in unsafe
return False
class SandboxedEnvironment(Environment):
"""The sandboxed environment. It works like the regular environment but
tells the compiler to generate sandboxed code. Additionally subclasses of
this environment may override the methods that tell the runtime what
attributes or functions are safe to access.
If the template tries to access insecure code a :exc:`SecurityError` is
raised. However also other exceptions may occour during the rendering so
the caller has to ensure that all exceptions are catched.
"""
sandboxed = True
#: default callback table for the binary operators. A copy of this is
#: available on each instance of a sandboxed environment as
#: :attr:`binop_table`
default_binop_table = {
'+': operator.add,
'-': operator.sub,
'*': operator.mul,
'/': operator.truediv,
'//': operator.floordiv,
'**': operator.pow,
'%': operator.mod
}
#: default callback table for the unary operators. A copy of this is
#: available on each instance of a sandboxed environment as
#: :attr:`unop_table`
default_unop_table = {
'+': operator.pos,
'-': operator.neg
}
#: a set of binary operators that should be intercepted. Each operator
#: that is added to this set (empty by default) is delegated to the
#: :meth:`call_binop` method that will perform the operator. The default
#: operator callback is specified by :attr:`binop_table`.
#:
#: The following binary operators are interceptable:
#: ``//``, ``%``, ``+``, ``*``, ``-``, ``/``, and ``**``
#:
#: The default operation form the operator table corresponds to the
#: builtin function. Intercepted calls are always slower than the native
#: operator call, so make sure only to intercept the ones you are
#: interested in.
#:
#: .. versionadded:: 2.6
intercepted_binops = frozenset()
#: a set of unary operators that should be intercepted. Each operator
#: that is added to this set (empty by default) is delegated to the
#: :meth:`call_unop` method that will perform the operator. The default
#: operator callback is specified by :attr:`unop_table`.
#:
#: The following unary operators are interceptable: ``+``, ``-``
#:
#: The default operation form the operator table corresponds to the
#: builtin function. Intercepted calls are always slower than the native
#: operator call, so make sure only to intercept the ones you are
#: interested in.
#:
#: .. versionadded:: 2.6
intercepted_unops = frozenset()
def intercept_unop(self, operator):
"""Called during template compilation with the name of a unary
operator to check if it should be intercepted at runtime. If this
method returns `True`, :meth:`call_unop` is excuted for this unary
operator. The default implementation of :meth:`call_unop` will use
the :attr:`unop_table` dictionary to perform the operator with the
same logic as the builtin one.
The following unary operators are interceptable: ``+`` and ``-``
Intercepted calls are always slower than the native operator call,
so make sure only to intercept the ones you are interested in.
.. versionadded:: 2.6
"""
return False
def __init__(self, *args, **kwargs):
Environment.__init__(self, *args, **kwargs)
self.globals['range'] = safe_range
self.binop_table = self.default_binop_table.copy()
self.unop_table = self.default_unop_table.copy()
def is_safe_attribute(self, obj, attr, value):
"""The sandboxed environment will call this method to check if the
attribute of an object is safe to access. Per default all attributes
starting with an underscore are considered private as well as the
special attributes of internal python objects as returned by the
:func:`is_internal_attribute` function.
"""
return not (attr.startswith('_') or is_internal_attribute(obj, attr))
def is_safe_callable(self, obj):
"""Check if an object is safely callable. Per default a function is
considered safe unless the `unsafe_callable` attribute exists and is
True. Override this method to alter the behavior, but this won't
affect the `unsafe` decorator from this module.
"""
return not (getattr(obj, 'unsafe_callable', False) or
getattr(obj, 'alters_data', False))
def call_binop(self, context, operator, left, right):
"""For intercepted binary operator calls (:meth:`intercepted_binops`)
this function is executed instead of the builtin operator. This can
be used to fine tune the behavior of certain operators.
.. versionadded:: 2.6
"""
return self.binop_table[operator](left, right)
def call_unop(self, context, operator, arg):
"""For intercepted unary operator calls (:meth:`intercepted_unops`)
this function is executed instead of the builtin operator. This can
be used to fine tune the behavior of certain operators.
.. versionadded:: 2.6
"""
return self.unop_table[operator](arg)
def getitem(self, obj, argument):
"""Subscribe an object from sandboxed code."""
try:
return obj[argument]
except (TypeError, LookupError):
if isinstance(argument, string_types):
try:
attr = str(argument)
except Exception:
pass
else:
try:
value = getattr(obj, attr)
except AttributeError:
pass
else:
if self.is_safe_attribute(obj, argument, value):
return value
return self.unsafe_undefined(obj, argument)
return self.undefined(obj=obj, name=argument)
def getattr(self, obj, attribute):
"""Subscribe an object from sandboxed code and prefer the
attribute. The attribute passed *must* be a bytestring.
"""
try:
value = getattr(obj, attribute)
except AttributeError:
try:
return obj[attribute]
except (TypeError, LookupError):
pass
else:
if self.is_safe_attribute(obj, attribute, value):
return value
return self.unsafe_undefined(obj, attribute)
return self.undefined(obj=obj, name=attribute)
def unsafe_undefined(self, obj, attribute):
"""Return an undefined object for unsafe attributes."""
return self.undefined('access to attribute %r of %r '
'object is unsafe.' % (
attribute,
obj.__class__.__name__
), name=attribute, obj=obj, exc=SecurityError)
def call(__self, __context, __obj, *args, **kwargs):
"""Call an object from sandboxed code."""
# the double prefixes are to avoid double keyword argument
# errors when proxying the call.
if not __self.is_safe_callable(__obj):
raise SecurityError('%r is not safely callable' % (__obj,))
return __context.call(__obj, *args, **kwargs)
class ImmutableSandboxedEnvironment(SandboxedEnvironment):
"""Works exactly like the regular `SandboxedEnvironment` but does not
permit modifications on the builtin mutable objects `list`, `set`, and
`dict` by using the :func:`modifies_known_mutable` function.
"""
def is_safe_attribute(self, obj, attr, value):
if not SandboxedEnvironment.is_safe_attribute(self, obj, attr, value):
return False
return not modifies_known_mutable(obj, attr)
| mit |
Juniper/python-neutronclient | neutronclient/neutron/v2_0/vpn/ipsecpolicy.py | 4 | 4566 | # (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import argparse
from neutronclient._i18n import _
from neutronclient.common import utils
from neutronclient.neutron import v2_0 as neutronv20
from neutronclient.neutron.v2_0.vpn import utils as vpn_utils
def add_common_args(parser, is_create=True):
parser.add_argument(
'--auth-algorithm',
default='sha1' if is_create else argparse.SUPPRESS,
type=utils.convert_to_lowercase,
choices=['sha1', 'sha256', 'sha384', 'sha512'],
help=_('Authentication algorithm for IPsec policy, default:sha1.'))
parser.add_argument(
'--description',
help=_('Description of the IPsec policy.'))
parser.add_argument(
'--encapsulation-mode',
default='tunnel' if is_create else argparse.SUPPRESS,
choices=['tunnel', 'transport'],
type=utils.convert_to_lowercase,
help=_('Encapsulation mode for IPsec policy, default:tunnel.'))
parser.add_argument(
'--encryption-algorithm',
default='aes-128' if is_create else argparse.SUPPRESS,
type=utils.convert_to_lowercase,
help=_('Encryption algorithm for IPsec policy, default:aes-128.'))
parser.add_argument(
'--lifetime',
metavar="units=UNITS,value=VALUE",
type=utils.str2dict_type(optional_keys=['units', 'value']),
help=vpn_utils.lifetime_help("IPsec"))
parser.add_argument(
'--pfs',
default='group5' if is_create else argparse.SUPPRESS,
type=utils.convert_to_lowercase,
help=_('Perfect Forward Secrecy for IPsec policy, default:group5.'))
parser.add_argument(
'--transform-protocol',
default='esp' if is_create else argparse.SUPPRESS,
type=utils.convert_to_lowercase,
choices=['esp', 'ah', 'ah-esp'],
help=_('Transform protocol for IPsec policy, default:esp.'))
def parse_common_args2body(parsed_args, body):
neutronv20.update_dict(parsed_args, body,
['auth_algorithm', 'encryption_algorithm',
'encapsulation_mode', 'transform_protocol',
'pfs', 'name', 'description', 'tenant_id'])
if parsed_args.lifetime:
vpn_utils.validate_lifetime_dict(parsed_args.lifetime)
body['lifetime'] = parsed_args.lifetime
return body
class ListIPsecPolicy(neutronv20.ListCommand):
"""List IPsec policies that belong to a given tenant connection."""
resource = 'ipsecpolicy'
list_columns = ['id', 'name', 'auth_algorithm',
'encryption_algorithm', 'pfs']
_formatters = {}
pagination_support = True
sorting_support = True
class ShowIPsecPolicy(neutronv20.ShowCommand):
"""Show information of a given IPsec policy."""
resource = 'ipsecpolicy'
help_resource = 'IPsec policy'
class CreateIPsecPolicy(neutronv20.CreateCommand):
"""Create an IPsec policy."""
resource = 'ipsecpolicy'
help_resource = 'IPsec policy'
def add_known_arguments(self, parser):
parser.add_argument(
'name', metavar='NAME',
help=_('Name of the IPsec policy.'))
add_common_args(parser)
def args2body(self, parsed_args):
return {'ipsecpolicy': parse_common_args2body(parsed_args, body={})}
class UpdateIPsecPolicy(neutronv20.UpdateCommand):
"""Update a given IPsec policy."""
resource = 'ipsecpolicy'
help_resource = 'IPsec policy'
def add_known_arguments(self, parser):
parser.add_argument(
'--name',
help=_('Updated name of the IPsec policy.'))
add_common_args(parser, is_create=False)
def args2body(self, parsed_args):
return {'ipsecpolicy': parse_common_args2body(parsed_args, body={})}
class DeleteIPsecPolicy(neutronv20.DeleteCommand):
"""Delete a given IPsec policy."""
resource = 'ipsecpolicy'
help_resource = 'IPsec policy'
| apache-2.0 |
azumimuo/family-xbmc-addon | script.module.youtube.dl/lib/youtube_dl/extractor/firstpost.py | 60 | 1787 | from __future__ import unicode_literals
from .common import InfoExtractor
class FirstpostIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?firstpost\.com/[^/]+/.*-(?P<id>[0-9]+)\.html'
_TEST = {
'url': 'http://www.firstpost.com/india/india-to-launch-indigenous-aircraft-carrier-monday-1025403.html',
'md5': 'ee9114957692f01fb1263ed87039112a',
'info_dict': {
'id': '1025403',
'ext': 'mp4',
'title': 'India to launch indigenous aircraft carrier INS Vikrant today',
'description': 'md5:feef3041cb09724e0bdc02843348f5f4',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
page = self._download_webpage(url, video_id)
title = self._html_search_meta('twitter:title', page, 'title', fatal=True)
description = self._html_search_meta('twitter:description', page, 'title')
data = self._download_xml(
'http://www.firstpost.com/getvideoxml-%s.xml' % video_id, video_id,
'Downloading video XML')
item = data.find('./playlist/item')
thumbnail = item.find('./image').text
formats = [
{
'url': details.find('./file').text,
'format_id': details.find('./label').text.strip(),
'width': int(details.find('./width').text.strip()),
'height': int(details.find('./height').text.strip()),
} for details in item.findall('./source/file_details') if details.find('./file').text
]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'formats': formats,
}
| gpl-2.0 |
lilleswing/deepchem | deepchem/models/tests/test_atomic_conv.py | 1 | 5041 | """
Tests for Atomic Convolutions.
"""
import os
import pytest
import deepchem
import numpy as np
import tensorflow as tf
import unittest
import numpy as np
from deepchem.models import atomic_conv
from deepchem.data import NumpyDataset
from deepchem.feat import ComplexNeighborListFragmentAtomicCoordinates
class TestAtomicConv(unittest.TestCase):
@pytest.mark.slow
def test_atomic_conv(self):
"""A simple test that initializes and fits an AtomicConvModel."""
# For simplicity, let's assume both molecules have same number of
# atoms.
N_atoms = 5
batch_size = 1
atomic_convnet = atomic_conv.AtomicConvModel(
batch_size=batch_size,
frag1_num_atoms=5,
frag2_num_atoms=5,
complex_num_atoms=10,
learning_rate=0.003)
# Creates a set of dummy features that contain the coordinate and
# neighbor-list features required by the AtomicConvModel.
features = []
frag1_coords = np.random.rand(N_atoms, 3)
frag1_nbr_list = {0: [], 1: [], 2: [], 3: [], 4: []}
frag1_z = np.random.randint(10, size=(N_atoms))
frag2_coords = np.random.rand(N_atoms, 3)
frag2_nbr_list = {0: [], 1: [], 2: [], 3: [], 4: []}
#frag2_z = np.random.rand(N_atoms, 3)
frag2_z = np.random.randint(10, size=(N_atoms))
system_coords = np.random.rand(2 * N_atoms, 3)
system_nbr_list = {
0: [],
1: [],
2: [],
3: [],
4: [],
5: [],
6: [],
7: [],
8: [],
9: []
}
system_z = np.random.randint(10, size=(2 * N_atoms))
features.append(
(frag1_coords, frag1_nbr_list, frag1_z, frag2_coords, frag2_nbr_list,
frag2_z, system_coords, system_nbr_list, system_z))
features = np.asarray(features)
labels = np.random.rand(batch_size)
train = NumpyDataset(features, labels)
atomic_convnet.fit(train, nb_epoch=300)
assert np.allclose(labels, atomic_convnet.predict(train), atol=0.01)
@pytest.mark.slow
def test_atomic_conv_variable(self):
"""A simple test that initializes and fits an AtomicConvModel on variable input size."""
# For simplicity, let's assume both molecules have same number of
# atoms.
frag1_num_atoms = 1000
frag2_num_atoms = 1200
complex_num_atoms = frag1_num_atoms + frag2_num_atoms
batch_size = 1
atomic_convnet = atomic_conv.AtomicConvModel(
batch_size=batch_size,
frag1_num_atoms=frag1_num_atoms,
frag2_num_atoms=frag2_num_atoms,
complex_num_atoms=complex_num_atoms)
# Creates a set of dummy features that contain the coordinate and
# neighbor-list features required by the AtomicConvModel.
features = []
frag1_coords = np.random.rand(frag1_num_atoms, 3)
frag1_nbr_list = {i: [] for i in range(frag1_num_atoms)}
frag1_z = np.random.randint(10, size=(frag1_num_atoms))
frag2_coords = np.random.rand(frag2_num_atoms, 3)
frag2_nbr_list = {i: [] for i in range(frag2_num_atoms)}
frag2_z = np.random.randint(10, size=(frag2_num_atoms))
system_coords = np.random.rand(complex_num_atoms, 3)
system_nbr_list = {i: [] for i in range(complex_num_atoms)}
system_z = np.random.randint(10, size=(complex_num_atoms))
features.append(
(frag1_coords, frag1_nbr_list, frag1_z, frag2_coords, frag2_nbr_list,
frag2_z, system_coords, system_nbr_list, system_z))
features = np.asarray(features)
labels = np.zeros(batch_size)
train = NumpyDataset(features, labels)
atomic_convnet.fit(train, nb_epoch=1)
@pytest.mark.slow
def test_atomic_conv_with_feat(self):
"""A simple test for running an atomic convolution on featurized data."""
dir_path = os.path.dirname(os.path.realpath(__file__))
ligand_file = os.path.join(dir_path,
"../../feat/tests/data/3zso_ligand_hyd.pdb")
protein_file = os.path.join(dir_path,
"../../feat/tests/data/3zso_protein.pdb")
# Pulled from PDB files. For larger datasets with more PDBs, would use
# max num atoms instead of exact.
frag1_num_atoms = 44 # for ligand atoms
frag2_num_atoms = 2336 # for protein atoms
complex_num_atoms = 2380 # in total
max_num_neighbors = 4
# Cutoff in angstroms
neighbor_cutoff = 4
complex_featurizer = ComplexNeighborListFragmentAtomicCoordinates(
frag1_num_atoms, frag2_num_atoms, complex_num_atoms, max_num_neighbors,
neighbor_cutoff)
# arbitrary label
labels = np.array([0])
features, _ = complex_featurizer.featurize([ligand_file], [protein_file])
dataset = deepchem.data.DiskDataset.from_numpy(features, labels)
batch_size = 1
print("Constructing Atomic Conv model")
atomic_convnet = atomic_conv.AtomicConvModel(
batch_size=batch_size,
frag1_num_atoms=frag1_num_atoms,
frag2_num_atoms=frag2_num_atoms,
complex_num_atoms=complex_num_atoms)
print("About to call fit")
# Run a fitting operation
atomic_convnet.fit(dataset)
| mit |
atumanov/ray | python/ray/rllib/evaluation/worker_set.py | 1 | 8357 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from types import FunctionType
from ray.rllib.utils.annotations import DeveloperAPI
from ray.rllib.evaluation.rollout_worker import RolloutWorker, \
_validate_multiagent_config
from ray.rllib.offline import NoopOutput, JsonReader, MixedInput, JsonWriter, \
ShuffledInput
from ray.rllib.utils import merge_dicts, try_import_tf
from ray.rllib.utils.memory import ray_get_and_free
tf = try_import_tf()
logger = logging.getLogger(__name__)
@DeveloperAPI
class WorkerSet(object):
"""Represents a set of RolloutWorkers.
There must be one local worker copy, and zero or more remote workers.
"""
def __init__(self,
env_creator,
policy,
trainer_config=None,
num_workers=0,
logdir=None,
_setup=True):
"""Create a new WorkerSet and initialize its workers.
Arguments:
env_creator (func): Function that returns env given env config.
policy (cls): rllib.policy.Policy class.
trainer_config (dict): Optional dict that extends the common
config of the Trainer class.
num_workers (int): Number of remote rollout workers to create.
logdir (str): Optional logging directory for workers.
_setup (bool): Whether to setup workers. This is only for testing.
"""
if not trainer_config:
from ray.rllib.agents.trainer import COMMON_CONFIG
trainer_config = COMMON_CONFIG
self._env_creator = env_creator
self._policy = policy
self._remote_config = trainer_config
self._num_workers = num_workers
self._logdir = logdir
if _setup:
self._local_config = merge_dicts(
trainer_config,
{"tf_session_args": trainer_config["local_tf_session_args"]})
# Always create a local worker
self._local_worker = self._make_worker(
RolloutWorker, env_creator, policy, 0, self._local_config)
# Create a number of remote workers
self._remote_workers = []
self.add_workers(num_workers)
def local_worker(self):
"""Return the local rollout worker."""
return self._local_worker
def remote_workers(self):
"""Return a list of remote rollout workers."""
return self._remote_workers
def add_workers(self, num_workers):
"""Create and add a number of remote workers to this worker set."""
remote_args = {
"num_cpus": self._remote_config["num_cpus_per_worker"],
"num_gpus": self._remote_config["num_gpus_per_worker"],
"resources": self._remote_config["custom_resources_per_worker"],
}
cls = RolloutWorker.as_remote(**remote_args).remote
self._remote_workers.extend([
self._make_worker(cls, self._env_creator, self._policy, i + 1,
self._remote_config) for i in range(num_workers)
])
def reset(self, new_remote_workers):
"""Called to change the set of remote workers."""
self._remote_workers = new_remote_workers
def stop(self):
"""Stop all rollout workers."""
self.local_worker().stop()
for w in self.remote_workers():
w.stop.remote()
w.__ray_terminate__.remote()
@DeveloperAPI
def foreach_worker(self, func):
"""Apply the given function to each worker instance."""
local_result = [func(self.local_worker())]
remote_results = ray_get_and_free(
[w.apply.remote(func) for w in self.remote_workers()])
return local_result + remote_results
@DeveloperAPI
def foreach_worker_with_index(self, func):
"""Apply the given function to each worker instance.
The index will be passed as the second arg to the given function.
"""
local_result = [func(self.local_worker(), 0)]
remote_results = ray_get_and_free([
w.apply.remote(func, i + 1)
for i, w in enumerate(self.remote_workers())
])
return local_result + remote_results
@staticmethod
def _from_existing(local_worker, remote_workers=None):
workers = WorkerSet(None, None, {}, _setup=False)
workers._local_worker = local_worker
workers._remote_workers = remote_workers or []
return workers
def _make_worker(self, cls, env_creator, policy, worker_index, config):
def session_creator():
logger.debug("Creating TF session {}".format(
config["tf_session_args"]))
return tf.Session(
config=tf.ConfigProto(**config["tf_session_args"]))
if isinstance(config["input"], FunctionType):
input_creator = config["input"]
elif config["input"] == "sampler":
input_creator = (lambda ioctx: ioctx.default_sampler_input())
elif isinstance(config["input"], dict):
input_creator = (lambda ioctx: ShuffledInput(
MixedInput(config["input"], ioctx), config[
"shuffle_buffer_size"]))
else:
input_creator = (lambda ioctx: ShuffledInput(
JsonReader(config["input"], ioctx), config[
"shuffle_buffer_size"]))
if isinstance(config["output"], FunctionType):
output_creator = config["output"]
elif config["output"] is None:
output_creator = (lambda ioctx: NoopOutput())
elif config["output"] == "logdir":
output_creator = (lambda ioctx: JsonWriter(
ioctx.log_dir,
ioctx,
max_file_size=config["output_max_file_size"],
compress_columns=config["output_compress_columns"]))
else:
output_creator = (lambda ioctx: JsonWriter(
config["output"],
ioctx,
max_file_size=config["output_max_file_size"],
compress_columns=config["output_compress_columns"]))
if config["input"] == "sampler":
input_evaluation = []
else:
input_evaluation = config["input_evaluation"]
# Fill in the default policy if 'None' is specified in multiagent
if config["multiagent"]["policies"]:
tmp = config["multiagent"]["policies"]
_validate_multiagent_config(tmp, allow_none_graph=True)
for k, v in tmp.items():
if v[0] is None:
tmp[k] = (policy, v[1], v[2], v[3])
policy = tmp
return cls(
env_creator,
policy,
policy_mapping_fn=config["multiagent"]["policy_mapping_fn"],
policies_to_train=config["multiagent"]["policies_to_train"],
tf_session_creator=(session_creator
if config["tf_session_args"] else None),
batch_steps=config["sample_batch_size"],
batch_mode=config["batch_mode"],
episode_horizon=config["horizon"],
preprocessor_pref=config["preprocessor_pref"],
sample_async=config["sample_async"],
compress_observations=config["compress_observations"],
num_envs=config["num_envs_per_worker"],
observation_filter=config["observation_filter"],
clip_rewards=config["clip_rewards"],
clip_actions=config["clip_actions"],
env_config=config["env_config"],
model_config=config["model"],
policy_config=config,
worker_index=worker_index,
monitor_path=self._logdir if config["monitor"] else None,
log_dir=self._logdir,
log_level=config["log_level"],
callbacks=config["callbacks"],
input_creator=input_creator,
input_evaluation=input_evaluation,
output_creator=output_creator,
remote_worker_envs=config["remote_worker_envs"],
remote_env_batch_wait_ms=config["remote_env_batch_wait_ms"],
soft_horizon=config["soft_horizon"],
_fake_sampler=config.get("_fake_sampler", False))
| apache-2.0 |
kyleknap/boto | tests/integration/sqs/test_bigmessage.py | 114 | 2688 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Some unit tests for the SQSConnection
"""
import time
from threading import Timer
from tests.unit import unittest
import boto
from boto.compat import StringIO
from boto.sqs.bigmessage import BigMessage
from boto.exception import SQSError
class TestBigMessage(unittest.TestCase):
sqs = True
def test_1_basic(self):
c = boto.connect_sqs()
# create a queue so we can test BigMessage
queue_name = 'test%d' % int(time.time())
timeout = 60
queue = c.create_queue(queue_name, timeout)
self.addCleanup(c.delete_queue, queue, True)
queue.set_message_class(BigMessage)
# create a bucket with the same name to store the message in
s3 = boto.connect_s3()
bucket = s3.create_bucket(queue_name)
self.addCleanup(s3.delete_bucket, queue_name)
time.sleep(30)
# now add a message
msg_body = 'This is a test of the big message'
fp = StringIO(msg_body)
s3_url = 's3://%s' % queue_name
message = queue.new_message(fp, s3_url=s3_url)
queue.write(message)
time.sleep(30)
s3_object_name = message.s3_url.split('/')[-1]
# Make sure msg body is in bucket
self.assertTrue(bucket.lookup(s3_object_name))
m = queue.read()
self.assertEqual(m.get_body().decode('utf-8'), msg_body)
m.delete()
time.sleep(30)
# Make sure msg is deleted from bucket
self.assertIsNone(bucket.lookup(s3_object_name))
| mit |
Tatsh-ansible/ansible | lib/ansible/modules/network/ios/ios_config.py | 16 | 19899 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: ios_config
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage Cisco IOS configuration sections
description:
- Cisco IOS configurations use a simple block indent file syntax
for segmenting configuration into sections. This module provides
an implementation for working with IOS configuration sections in
a deterministic way.
extends_documentation_fragment: ios
options:
lines:
description:
- The ordered set of commands that should be configured in the
section. The commands must be the exact same commands as found
in the device running-config. Be sure to note the configuration
command syntax as some commands are automatically modified by the
device config parser.
required: false
default: null
aliases: ['commands']
parents:
description:
- The ordered set of parents that uniquely identify the section
the commands should be checked against. If the parents argument
is omitted, the commands are checked against the set of top
level or global commands.
required: false
default: null
src:
description:
- Specifies the source path to the file that contains the configuration
or configuration template to load. The path to the source file can
either be the full path on the Ansible control host or a relative
path from the playbook or role root directory. This argument is mutually
exclusive with I(lines).
required: false
default: null
version_added: "2.2"
before:
description:
- The ordered set of commands to push on to the command stack if
a change needs to be made. This allows the playbook designer
the opportunity to perform configuration commands prior to pushing
any changes without affecting how the set of commands are matched
against the system.
required: false
default: null
after:
description:
- The ordered set of commands to append to the end of the command
stack if a change needs to be made. Just like with I(before) this
allows the playbook designer to append a set of commands to be
executed after the command set.
required: false
default: null
match:
description:
- Instructs the module on the way to perform the matching of
the set of commands against the current device config. If
match is set to I(line), commands are matched line by line. If
match is set to I(strict), command lines are matched with respect
to position. If match is set to I(exact), command lines
must be an equal match. Finally, if match is set to I(none), the
module will not attempt to compare the source configuration with
the running configuration on the remote device.
required: false
default: line
choices: ['line', 'strict', 'exact', 'none']
replace:
description:
- Instructs the module on the way to perform the configuration
on the device. If the replace argument is set to I(line) then
the modified lines are pushed to the device in configuration
mode. If the replace argument is set to I(block) then the entire
command block is pushed to the device in configuration mode if any
line is not correct.
required: false
default: line
choices: ['line', 'block']
multiline_delimiter:
description:
- This argument is used when pushing a multiline configuration
element to the IOS device. It specifies the character to use
as the delimiting character. This only applies to the
configuration action.
required: false
default: "@"
version_added: "2.3"
force:
description:
- The force argument instructs the module to not consider the
current devices running-config. When set to true, this will
cause the module to push the contents of I(src) into the device
without first checking if already configured.
- Note this argument should be considered deprecated. To achieve
the equivalent, set the C(match=none) which is idempotent. This argument
will be removed in a future release.
required: false
default: false
type: bool
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. The backup file is written to the C(backup)
folder in the playbook root directory. If the directory does not
exist, it is created.
required: false
default: no
type: bool
version_added: "2.2"
running_config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(running_config) argument allows the
implementer to pass in the configuration to use as the base
config for comparison.
required: false
default: null
aliases: ['config']
version_added: "2.4"
defaults:
description:
- This argument specifies whether or not to collect all defaults
when getting the remote device running config. When enabled,
the module will get the current config by issuing the command
C(show running-config all).
required: false
default: no
type: bool
version_added: "2.2"
save:
description:
- The C(save) argument instructs the module to save the running-
config to the startup-config at the conclusion of the module
running. If check mode is specified, this argument is ignored.
- This option is deprecated as of Ansible 2.4, use C(save_when)
required: false
default: false
type: bool
version_added: "2.2"
save_when:
description:
- When changes are made to the device running-configuration, the
changes are not copied to non-volatile storage by default. Using
this argument will change that before. If the argument is set to
I(always), then the running-config will always be copied to the
startup-config and the I(modified) flag will always be set to
True. If the argument is set to I(modified), then the running-config
will only be copied to the startup-config if it has changed since
the last save to startup-config. If the argument is set to
I(never), the running-config will never be copied to the the
startup-config
required: false
default: never
choices: ['always', 'never', 'modified']
version_added: "2.4"
diff_against:
description:
- When using the C(ansible-playbook --diff) command line argument
the module can generate diffs against different sources.
- When this option is configure as I(startup), the module will return
the diff of the running-config against the startup-config.
- When this option is configured as I(intended), the module will
return the diff of the running-config against the configuration
provided in the C(intended_config) argument.
- When this option is configured as I(running), the module will
return the before and after diff of the running-config with respect
to any changes made to the device configuration.
required: false
choices: ['running', 'startup', 'intended']
version_added: "2.4"
diff_ignore_lines:
description:
- Use this argument to specify one or more lines that should be
ignored during the diff. This is used for lines in the configuration
that are automatically updated by the system. This argument takes
a list of regular expressions or exact line matches.
required: false
version_added: "2.4"
intended_config:
description:
- The C(intended_config) provides the master configuration that
the node should conform to and is used to check the final
running-config against. This argument will not modify any settings
on the remote device and is strictly used to check the compliance
of the current device's configuration against. When specifying this
argument, the task should also modify the C(diff_against) value and
set it to I(intended).
required: false
version_added: "2.4"
"""
EXAMPLES = """
- name: configure top level configuration
ios_config:
lines: hostname {{ inventory_hostname }}
- name: configure interface settings
ios_config:
lines:
- description test interface
- ip address 172.31.1.1 255.255.255.0
parents: interface Ethernet1
- name: load new acl into device
ios_config:
lines:
- 10 permit ip host 1.1.1.1 any log
- 20 permit ip host 2.2.2.2 any log
- 30 permit ip host 3.3.3.3 any log
- 40 permit ip host 4.4.4.4 any log
- 50 permit ip host 5.5.5.5 any log
parents: ip access-list extended test
before: no ip access-list extended test
match: exact
- name: check the running-config against master config
ios_config:
diff_config: intended
intended_config: "{{ lookup('file', 'master.cfg') }}"
- name: check the startup-config against the running-config
ios_config:
diff_against: startup
diff_ignore_lines:
- ntp clock .*
- name: save running to startup when modified
ios_config:
save_when: modified
"""
RETURN = """
updates:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['hostname foo', 'router ospf 1', 'router-id 1.1.1.1']
commands:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['hostname foo', 'router ospf 1', 'router-id 1.1.1.1']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: string
sample: /playbooks/ansible/backup/ios_config.2016-07-16@22:28:34
"""
import re
import time
from ansible.module_utils.ios import run_commands, get_config, load_config
from ansible.module_utils.ios import get_defaults_flag
from ansible.module_utils.ios import ios_argument_spec
from ansible.module_utils.ios import check_args as ios_check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcli import Conditional
from ansible.module_utils.netcfg import NetworkConfig, dumps
from ansible.module_utils.six import iteritems
def check_args(module, warnings):
ios_check_args(module, warnings)
if module.params['multiline_delimiter']:
if len(module.params['multiline_delimiter']) != 1:
module.fail_json(msg='multiline_delimiter value can only be a '
'single character')
def extract_banners(config):
banners = {}
banner_cmds = re.findall(r'^banner (\w+)', config, re.M)
for cmd in banner_cmds:
regex = r'banner %s \^C(.+?)(?=\^C)' % cmd
match = re.search(regex, config, re.S)
if match:
key = 'banner %s' % cmd
banners[key] = match.group(1).strip()
for cmd in banner_cmds:
regex = r'banner %s \^C(.+?)(?=\^C)' % cmd
match = re.search(regex, config, re.S)
if match:
config = config.replace(str(match.group(1)), '')
config = re.sub(r'banner \w+ \^C\^C', '!! banner removed', config)
return (config, banners)
def diff_banners(want, have):
candidate = {}
for key, value in iteritems(want):
if value != have.get(key):
candidate[key] = value
return candidate
def load_banners(module, banners):
delimiter = module.params['multiline_delimiter']
for key, value in iteritems(banners):
key += ' %s' % delimiter
for cmd in ['config terminal', key, value, delimiter, 'end']:
obj = {'command': cmd, 'sendonly': True}
run_commands(module, [cmd])
time.sleep(0.1)
run_commands(module, ['\n'])
def get_running_config(module, current_config=None):
contents = module.params['running_config']
if not contents:
if not module.params['defaults'] and current_config:
contents, banners = extract_banners(current_config.config_text)
else:
flags = get_defaults_flag(module) if module.params['defaults'] else []
contents = get_config(module, flags=flags)
contents, banners = extract_banners(contents)
return NetworkConfig(indent=1, contents=contents), banners
def get_candidate(module):
candidate = NetworkConfig(indent=1)
banners = {}
if module.params['src']:
src, banners = extract_banners(module.params['src'])
candidate.load(src)
elif module.params['lines']:
parents = module.params['parents'] or list()
candidate.add(module.params['lines'], parents=parents)
return candidate, banners
def main():
""" main entry point for module execution
"""
argument_spec = dict(
src=dict(type='path'),
lines=dict(aliases=['commands'], type='list'),
parents=dict(type='list'),
before=dict(type='list'),
after=dict(type='list'),
match=dict(default='line', choices=['line', 'strict', 'exact', 'none']),
replace=dict(default='line', choices=['line', 'block']),
multiline_delimiter=dict(default='@'),
running_config=dict(aliases=['config']),
intended_config=dict(),
defaults=dict(type='bool', default=False),
backup=dict(type='bool', default=False),
save_when=dict(choices=['always', 'never', 'modified'], default='never'),
diff_against=dict(choices=['startup', 'intended', 'running']),
diff_ignore_lines=dict(type='list'),
# save is deprecated as of ans2.4, use save_when instead
save=dict(default=False, type='bool', removed_in_version='2.4'),
# force argument deprecated in ans2.2
force=dict(default=False, type='bool', removed_in_version='2.2')
)
argument_spec.update(ios_argument_spec)
mutually_exclusive = [('lines', 'src'),
('save', 'save_when')]
required_if = [('match', 'strict', ['lines']),
('match', 'exact', ['lines']),
('replace', 'block', ['lines']),
('diff_against', 'intended', ['intended_config'])]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
supports_check_mode=True)
result = {'changed': False}
warnings = list()
check_args(module, warnings)
result['warnings'] = warnings
config = None
if module.params['backup'] or (module._diff and module.params['diff_against'] == 'running'):
contents = get_config(module)
config = NetworkConfig(indent=1, contents=contents)
if module.params['backup']:
result['__backup__'] = contents
if any((module.params['lines'], module.params['src'])):
match = module.params['match']
replace = module.params['replace']
path = module.params['parents']
candidate, want_banners = get_candidate(module)
if match != 'none':
config, have_banners = get_running_config(module, config)
path = module.params['parents']
configobjs = candidate.difference(config, path=path, match=match, replace=replace)
else:
configobjs = candidate.items
have_banners = {}
banners = diff_banners(want_banners, have_banners)
if configobjs or banners:
commands = dumps(configobjs, 'commands').split('\n')
if module.params['before']:
commands[:0] = module.params['before']
if module.params['after']:
commands.extend(module.params['after'])
result['commands'] = commands
result['updates'] = commands
result['banners'] = banners
# send the configuration commands to the device and merge
# them with the current running config
if not module.check_mode:
if commands:
load_config(module, commands)
if banners:
load_banners(module, banners)
result['changed'] = True
running_config = None
startup_config = None
diff_ignore_lines = module.params['diff_ignore_lines']
if module.params['save_when'] != 'never':
output = run_commands(module, ['show running-config', 'show startup-config'])
running_config = NetworkConfig(indent=1, contents=output[0], ignore_lines=diff_ignore_lines)
startup_config = NetworkConfig(indent=1, contents=output[1], ignore_lines=diff_ignore_lines)
if running_config.sha1 != startup_config.sha1 or module.params['save_when'] == 'always':
result['changed'] = True
if not module.check_mode:
run_commands(module, 'copy running-config startup-config')
else:
module.warn('Skipping command `copy running-config startup-config` '
'due to check_mode. Configuration not copied to '
'non-volatile storage')
if module._diff:
if not running_config:
output = run_commands(module, 'show running-config')
contents = output[0]
else:
contents = running_config.config_text
# recreate the object in order to process diff_ignore_lines
running_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)
if module.params['diff_against'] == 'running':
if module.check_mode:
module.warn("unable to perform diff against running-config due to check mode")
contents = None
else:
contents = config.config_text
elif module.params['diff_against'] == 'startup':
if not startup_config:
output = run_commands(module, 'show startup-config')
contents = output[0]
else:
contents = startup_config.config_text
elif module.params['diff_against'] == 'intended':
contents = module.params['intended_config']
if contents is not None:
base_config = NetworkConfig(indent=1, contents=contents, ignore_lines=diff_ignore_lines)
if running_config.sha1 != base_config.sha1:
result.update({
'changed': True,
'diff': {'before': str(base_config), 'after': str(running_config)}
})
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
josthkko/ggrc-core | src/ggrc/migrations/versions/20160113150536_4003827b3d48_drop_unused_tables.py | 7 | 12994 | # Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Drop unused tables
Revision ID: 4003827b3d48
Revises: 5410607088f9
Create Date: 2016-01-13 15:05:36.008456
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '4003827b3d48'
down_revision = '5410607088f9'
def upgrade():
tables = [
"object_sections",
"control_sections",
"objective_controls",
"program_controls",
"directive_controls",
"control_controls",
"calendar_entries",
"object_objectives",
"object_controls",
"section_objectives",
"program_directives",
"directive_sections"
]
for table in tables:
try:
op.drop_table(table)
except sa.exc.OperationalError as operr:
# Ignores error in case relationship_types table no longer exists
error_code, _ = operr.orig.args # error_code, message
if error_code != 1051:
raise operr
def downgrade():
op.create_table(
'directive_sections',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('directive_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('section_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'program_directives',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('program_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('directive_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'section_objectives',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('section_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('objective_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'object_controls',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('start_date', sa.DATE(), nullable=True),
sa.Column('end_date', sa.DATE(), nullable=True),
sa.Column('role', mysql.VARCHAR(length=250), nullable=True),
sa.Column('notes', mysql.TEXT(), nullable=True),
sa.Column('control_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('controllable_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('controllable_type', mysql.VARCHAR(length=250),
nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'object_objectives',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('start_date', sa.DATE(), nullable=True),
sa.Column('end_date', sa.DATE(), nullable=True),
sa.Column('role', mysql.VARCHAR(length=250), nullable=True),
sa.Column('notes', mysql.TEXT(), nullable=True),
sa.Column('objective_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('objectiveable_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('objectiveable_type', mysql.VARCHAR(length=250),
nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'calendar_entries',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('name', mysql.VARCHAR(length=250), nullable=True),
sa.Column('calendar_id', mysql.VARCHAR(length=250), nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('owner_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['context_id'], [u'contexts.id'],
name=u'calendar_entries_ibfk_1'),
sa.ForeignKeyConstraint(['owner_id'], [u'people.id'],
name=u'calendar_entries_ibfk_2'),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'control_controls',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('control_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('implemented_control_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'directive_controls',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('directive_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('control_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'program_controls',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('program_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('control_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'objective_controls',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('objective_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('control_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'control_sections',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('control_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('section_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
op.create_table(
'object_sections',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('modified_by_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('created_at', mysql.DATETIME(), nullable=True),
sa.Column('updated_at', mysql.DATETIME(), nullable=True),
sa.Column('start_date', sa.DATE(), nullable=True),
sa.Column('end_date', sa.DATE(), nullable=True),
sa.Column('role', mysql.VARCHAR(length=250), nullable=True),
sa.Column('notes', mysql.TEXT(), nullable=True),
sa.Column('section_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('sectionable_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=False),
sa.Column('sectionable_type', mysql.VARCHAR(length=250), nullable=False),
sa.Column('context_id', mysql.INTEGER(display_width=11),
autoincrement=False, nullable=True),
sa.Column('status', mysql.VARCHAR(length=250), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'utf8',
mysql_engine=u'InnoDB'
)
| apache-2.0 |
Jannes123/django-oscar | src/oscar/apps/address/abstract_models.py | 7 | 20495 | import re
import zlib
from django.conf import settings
from django.core import exceptions
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.six.moves import filter
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import pgettext_lazy
from oscar.core.compat import AUTH_USER_MODEL
from oscar.models.fields import PhoneNumberField, UppercaseCharField
@python_2_unicode_compatible
class AbstractAddress(models.Model):
"""
Superclass address object
This is subclassed and extended to provide models for
user, shipping and billing addresses.
"""
MR, MISS, MRS, MS, DR = ('Mr', 'Miss', 'Mrs', 'Ms', 'Dr')
TITLE_CHOICES = (
(MR, _("Mr")),
(MISS, _("Miss")),
(MRS, _("Mrs")),
(MS, _("Ms")),
(DR, _("Dr")),
)
POSTCODE_REQUIRED = 'postcode' in settings.OSCAR_REQUIRED_ADDRESS_FIELDS
# Regex for each country. Not listed countries don't use postcodes
# Based on http://en.wikipedia.org/wiki/List_of_postal_codes
POSTCODES_REGEX = {
'AC': r'^[A-Z]{4}[0-9][A-Z]$',
'AD': r'^AD[0-9]{3}$',
'AF': r'^[0-9]{4}$',
'AI': r'^AI-2640$',
'AL': r'^[0-9]{4}$',
'AM': r'^[0-9]{4}$',
'AR': r'^([0-9]{4}|[A-Z][0-9]{4}[A-Z]{3})$',
'AS': r'^[0-9]{5}(-[0-9]{4}|-[0-9]{6})?$',
'AT': r'^[0-9]{4}$',
'AU': r'^[0-9]{4}$',
'AX': r'^[0-9]{5}$',
'AZ': r'^AZ[0-9]{4}$',
'BA': r'^[0-9]{5}$',
'BB': r'^BB[0-9]{5}$',
'BD': r'^[0-9]{4}$',
'BE': r'^[0-9]{4}$',
'BG': r'^[0-9]{4}$',
'BH': r'^[0-9]{3,4}$',
'BL': r'^[0-9]{5}$',
'BM': r'^[A-Z]{2}([0-9]{2}|[A-Z]{2})',
'BN': r'^[A-Z}{2}[0-9]]{4}$',
'BO': r'^[0-9]{4}$',
'BR': r'^[0-9]{5}(-[0-9]{3})?$',
'BT': r'^[0-9]{3}$',
'BY': r'^[0-9]{6}$',
'CA': r'^[A-Z][0-9][A-Z][0-9][A-Z][0-9]$',
'CC': r'^[0-9]{4}$',
'CH': r'^[0-9]{4}$',
'CL': r'^([0-9]{7}|[0-9]{3}-[0-9]{4})$',
'CN': r'^[0-9]{6}$',
'CO': r'^[0-9]{6}$',
'CR': r'^[0-9]{4,5}$',
'CU': r'^[0-9]{5}$',
'CV': r'^[0-9]{4}$',
'CX': r'^[0-9]{4}$',
'CY': r'^[0-9]{4}$',
'CZ': r'^[0-9]{5}$',
'DE': r'^[0-9]{5}$',
'DK': r'^[0-9]{4}$',
'DO': r'^[0-9]{5}$',
'DZ': r'^[0-9]{5}$',
'EC': r'^EC[0-9]{6}$',
'EE': r'^[0-9]{5}$',
'EG': r'^[0-9]{5}$',
'ES': r'^[0-9]{5}$',
'ET': r'^[0-9]{4}$',
'FI': r'^[0-9]{5}$',
'FK': r'^[A-Z]{4}[0-9][A-Z]{2}$',
'FM': r'^[0-9]{5}(-[0-9]{4})?$',
'FO': r'^[0-9]{3}$',
'FR': r'^[0-9]{5}$',
'GA': r'^[0-9]{2}.*[0-9]{2}$',
'GB': r'^[A-Z][A-Z0-9]{1,3}[0-9][A-Z]{2}$',
'GE': r'^[0-9]{4}$',
'GF': r'^[0-9]{5}$',
'GG': r'^([A-Z]{2}[0-9]{2,3}[A-Z]{2})$',
'GI': r'^GX111AA$',
'GL': r'^[0-9]{4}$',
'GP': r'^[0-9]{5}$',
'GR': r'^[0-9]{5}$',
'GS': r'^SIQQ1ZZ$',
'GT': r'^[0-9]{5}$',
'GU': r'^[0-9]{5}$',
'GW': r'^[0-9]{4}$',
'HM': r'^[0-9]{4}$',
'HN': r'^[0-9]{5}$',
'HR': r'^[0-9]{5}$',
'HT': r'^[0-9]{4}$',
'HU': r'^[0-9]{4}$',
'ID': r'^[0-9]{5}$',
'IL': r'^[0-9]{7}$',
'IM': r'^IM[0-9]{2,3}[A-Z]{2}$$',
'IN': r'^[0-9]{6}$',
'IO': r'^[A-Z]{4}[0-9][A-Z]{2}$',
'IQ': r'^[0-9]{5}$',
'IR': r'^[0-9]{5}-[0-9]{5}$',
'IS': r'^[0-9]{3}$',
'IT': r'^[0-9]{5}$',
'JE': r'^JE[0-9]{2}[A-Z]{2}$',
'JM': r'^JM[A-Z]{3}[0-9]{2}$',
'JO': r'^[0-9]{5}$',
'JP': r'^[0-9]{3}-?[0-9]{4}$',
'KE': r'^[0-9]{5}$',
'KG': r'^[0-9]{6}$',
'KH': r'^[0-9]{5}$',
'KR': r'^[0-9]{3}-?[0-9]{3}$',
'KY': r'^KY[0-9]-[0-9]{4}$',
'KZ': r'^[0-9]{6}$',
'LA': r'^[0-9]{5}$',
'LB': r'^[0-9]{8}$',
'LI': r'^[0-9]{4}$',
'LK': r'^[0-9]{5}$',
'LR': r'^[0-9]{4}$',
'LS': r'^[0-9]{3}$',
'LT': r'^(LT-)?[0-9]{5}$',
'LU': r'^[0-9]{4}$',
'LV': r'^LV-[0-9]{4}$',
'LY': r'^[0-9]{5}$',
'MA': r'^[0-9]{5}$',
'MC': r'^980[0-9]{2}$',
'MD': r'^MD-?[0-9]{4}$',
'ME': r'^[0-9]{5}$',
'MF': r'^[0-9]{5}$',
'MG': r'^[0-9]{3}$',
'MH': r'^[0-9]{5}$',
'MK': r'^[0-9]{4}$',
'MM': r'^[0-9]{5}$',
'MN': r'^[0-9]{5}$',
'MP': r'^[0-9]{5}$',
'MQ': r'^[0-9]{5}$',
'MT': r'^[A-Z]{3}[0-9]{4}$',
'MV': r'^[0-9]{4,5}$',
'MX': r'^[0-9]{5}$',
'MY': r'^[0-9]{5}$',
'MZ': r'^[0-9]{4}$',
'NA': r'^[0-9]{5}$',
'NC': r'^[0-9]{5}$',
'NE': r'^[0-9]{4}$',
'NF': r'^[0-9]{4}$',
'NG': r'^[0-9]{6}$',
'NI': r'^[0-9]{3}-[0-9]{3}-[0-9]$',
'NL': r'^[0-9]{4}[A-Z]{2}$',
'NO': r'^[0-9]{4}$',
'NP': r'^[0-9]{5}$',
'NZ': r'^[0-9]{4}$',
'OM': r'^[0-9]{3}$',
'PA': r'^[0-9]{6}$',
'PE': r'^[0-9]{5}$',
'PF': r'^[0-9]{5}$',
'PG': r'^[0-9]{3}$',
'PH': r'^[0-9]{4}$',
'PK': r'^[0-9]{5}$',
'PL': r'^[0-9]{2}-?[0-9]{3}$',
'PM': r'^[0-9]{5}$',
'PN': r'^[A-Z]{4}[0-9][A-Z]{2}$',
'PR': r'^[0-9]{5}$',
'PT': r'^[0-9]{4}(-?[0-9]{3})?$',
'PW': r'^[0-9]{5}$',
'PY': r'^[0-9]{4}$',
'RE': r'^[0-9]{5}$',
'RO': r'^[0-9]{6}$',
'RS': r'^[0-9]{5}$',
'RU': r'^[0-9]{6}$',
'SA': r'^[0-9]{5}$',
'SD': r'^[0-9]{5}$',
'SE': r'^[0-9]{5}$',
'SG': r'^([0-9]{2}|[0-9]{4}|[0-9]{6})$',
'SH': r'^(STHL1ZZ|TDCU1ZZ)$',
'SI': r'^(SI-)?[0-9]{4}$',
'SK': r'^[0-9]{5}$',
'SM': r'^[0-9]{5}$',
'SN': r'^[0-9]{5}$',
'SV': r'^01101$',
'SZ': r'^[A-Z][0-9]{3}$',
'TC': r'^TKCA1ZZ$',
'TD': r'^[0-9]{5}$',
'TH': r'^[0-9]{5}$',
'TJ': r'^[0-9]{6}$',
'TM': r'^[0-9]{6}$',
'TN': r'^[0-9]{4}$',
'TR': r'^[0-9]{5}$',
'TT': r'^[0-9]{6}$',
'TW': r'^[0-9]{5}$',
'UA': r'^[0-9]{5}$',
'US': r'^[0-9]{5}(-[0-9]{4}|-[0-9]{6})?$',
'UY': r'^[0-9]{5}$',
'UZ': r'^[0-9]{6}$',
'VA': r'^00120$',
'VC': r'^VC[0-9]{4}',
'VE': r'^[0-9]{4}[A-Z]?$',
'VG': r'^VG[0-9]{4}$',
'VI': r'^[0-9]{5}$',
'VN': r'^[0-9]{6}$',
'WF': r'^[0-9]{5}$',
'XK': r'^[0-9]{5}$',
'YT': r'^[0-9]{5}$',
'ZA': r'^[0-9]{4}$',
'ZM': r'^[0-9]{5}$',
}
title = models.CharField(
pgettext_lazy(u"Treatment Pronouns for the customer", u"Title"),
max_length=64, choices=TITLE_CHOICES, blank=True)
first_name = models.CharField(_("First name"), max_length=255, blank=True)
last_name = models.CharField(_("Last name"), max_length=255, blank=True)
# We use quite a few lines of an address as they are often quite long and
# it's easier to just hide the unnecessary ones than add extra ones.
line1 = models.CharField(_("First line of address"), max_length=255)
line2 = models.CharField(
_("Second line of address"), max_length=255, blank=True)
line3 = models.CharField(
_("Third line of address"), max_length=255, blank=True)
line4 = models.CharField(_("City"), max_length=255, blank=True)
state = models.CharField(_("State/County"), max_length=255, blank=True)
postcode = UppercaseCharField(
_("Post/Zip-code"), max_length=64, blank=True)
country = models.ForeignKey('address.Country', verbose_name=_("Country"))
#: A field only used for searching addresses - this contains all the
#: relevant fields. This is effectively a poor man's Solr text field.
search_text = models.TextField(
_("Search text - used only for searching addresses"), editable=False)
def __str__(self):
return self.summary
class Meta:
abstract = True
verbose_name = _('Address')
verbose_name_plural = _('Addresses')
# Saving
def save(self, *args, **kwargs):
self._update_search_text()
super(AbstractAddress, self).save(*args, **kwargs)
def clean(self):
# Strip all whitespace
for field in ['first_name', 'last_name', 'line1', 'line2', 'line3',
'line4', 'state', 'postcode']:
if self.__dict__[field]:
self.__dict__[field] = self.__dict__[field].strip()
# Ensure postcodes are valid for country
self.ensure_postcode_is_valid_for_country()
def ensure_postcode_is_valid_for_country(self):
"""
Validate postcode given the country
"""
if not self.postcode and self.POSTCODE_REQUIRED and self.country_id:
country_code = self.country.iso_3166_1_a2
regex = self.POSTCODES_REGEX.get(country_code, None)
if regex:
msg = _("Addresses in %(country)s require a valid postcode") \
% {'country': self.country}
raise exceptions.ValidationError(msg)
if self.postcode and self.country_id:
# Ensure postcodes are always uppercase
postcode = self.postcode.upper().replace(' ', '')
country_code = self.country.iso_3166_1_a2
regex = self.POSTCODES_REGEX.get(country_code, None)
# Validate postcode against regex for the country if available
if regex and not re.match(regex, postcode):
msg = _("The postcode '%(postcode)s' is not valid "
"for %(country)s") \
% {'postcode': self.postcode,
'country': self.country}
raise exceptions.ValidationError(
{'postcode': [msg]})
def _update_search_text(self):
search_fields = filter(
bool, [self.first_name, self.last_name,
self.line1, self.line2, self.line3, self.line4,
self.state, self.postcode, self.country.name])
self.search_text = ' '.join(search_fields)
# Properties
@property
def city(self):
# Common alias
return self.line4
@property
def summary(self):
"""
Returns a single string summary of the address,
separating fields using commas.
"""
return u", ".join(self.active_address_fields())
@property
def salutation(self):
"""
Name (including title)
"""
return self.join_fields(
('title', 'first_name', 'last_name'),
separator=u" ")
@property
def name(self):
return self.join_fields(('first_name', 'last_name'), separator=u" ")
# Helpers
def generate_hash(self):
"""
Returns a hash of the address summary
"""
# We use an upper-case version of the summary
return zlib.crc32(self.summary.strip().upper().encode('UTF8'))
def join_fields(self, fields, separator=u", "):
"""
Join a sequence of fields using the specified separator
"""
field_values = []
for field in fields:
# Title is special case
if field == 'title':
value = self.get_title_display()
else:
value = getattr(self, field)
field_values.append(value)
return separator.join(filter(bool, field_values))
def populate_alternative_model(self, address_model):
"""
For populating an address model using the matching fields
from this one.
This is used to convert a user address to a shipping address
as part of the checkout process.
"""
destination_field_names = [
field.name for field in address_model._meta.fields]
for field_name in [field.name for field in self._meta.fields]:
if field_name in destination_field_names and field_name != 'id':
setattr(address_model, field_name, getattr(self, field_name))
def active_address_fields(self, include_salutation=True):
"""
Return the non-empty components of the address, but merging the
title, first_name and last_name into a single line.
"""
fields = [self.line1, self.line2, self.line3,
self.line4, self.state, self.postcode]
if include_salutation:
fields = [self.salutation] + fields
fields = [f.strip() for f in fields if f]
try:
fields.append(self.country.name)
except exceptions.ObjectDoesNotExist:
pass
return fields
@python_2_unicode_compatible
class AbstractCountry(models.Model):
"""
International Organization for Standardization (ISO) 3166-1 Country list.
The field names are a bit awkward, but kept for backwards compatibility.
pycountry's syntax of alpha2, alpha3, name and official_name seems sane.
"""
iso_3166_1_a2 = models.CharField(
_('ISO 3166-1 alpha-2'), max_length=2, primary_key=True)
iso_3166_1_a3 = models.CharField(
_('ISO 3166-1 alpha-3'), max_length=3, blank=True)
iso_3166_1_numeric = models.CharField(
_('ISO 3166-1 numeric'), blank=True, max_length=3)
#: The commonly used name; e.g. 'United Kingdom'
printable_name = models.CharField(_('Country name'), max_length=128)
#: The full official name of a country
#: e.g. 'United Kingdom of Great Britain and Northern Ireland'
name = models.CharField(_('Official name'), max_length=128)
display_order = models.PositiveSmallIntegerField(
_("Display order"), default=0, db_index=True,
help_text=_('Higher the number, higher the country in the list.'))
is_shipping_country = models.BooleanField(
_("Is shipping country"), default=False, db_index=True)
class Meta:
abstract = True
app_label = 'address'
verbose_name = _('Country')
verbose_name_plural = _('Countries')
ordering = ('-display_order', 'printable_name',)
def __str__(self):
return self.printable_name or self.name
@property
def code(self):
"""
Shorthand for the ISO 3166 Alpha-2 code
"""
return self.iso_3166_1_a2
@property
def numeric_code(self):
"""
Shorthand for the ISO 3166 numeric code.
iso_3166_1_numeric used to wrongly be a integer field, but has to be
padded with leading zeroes. It's since been converted to a char field,
but the database might still contain non-padded strings. That's why
the padding is kept.
"""
return u"%.03d" % int(self.iso_3166_1_numeric)
class AbstractShippingAddress(AbstractAddress):
"""
A shipping address.
A shipping address should not be edited once the order has been placed -
it should be read-only after that.
NOTE:
ShippingAddress is a model of the order app. But moving it there is tricky
due to circular import issues that are amplified by get_model/get_class
calls pre-Django 1.7 to register receivers. So...
TODO: Once Django 1.6 support is dropped, move AbstractBillingAddress and
AbstractShippingAddress to the order app, and move
PartnerAddress to the partner app.
"""
phone_number = PhoneNumberField(
_("Phone number"), blank=True,
help_text=_("In case we need to call you about your order"))
notes = models.TextField(
blank=True, verbose_name=_('Instructions'),
help_text=_("Tell us anything we should know when delivering "
"your order."))
class Meta:
abstract = True
# ShippingAddress is registered in order/models.py
app_label = 'order'
verbose_name = _("Shipping address")
verbose_name_plural = _("Shipping addresses")
@property
def order(self):
"""
Return the order linked to this shipping address
"""
try:
return self.order_set.all()[0]
except IndexError:
return None
class AbstractUserAddress(AbstractShippingAddress):
"""
A user's address. A user can have many of these and together they form an
'address book' of sorts for the user.
We use a separate model for shipping and billing (even though there will be
some data duplication) because we don't want shipping/billing addresses
changed or deleted once an order has been placed. By having a separate
model, we allow users the ability to add/edit/delete from their address
book without affecting orders already placed.
"""
user = models.ForeignKey(
AUTH_USER_MODEL, related_name='addresses', verbose_name=_("User"))
#: Whether this address is the default for shipping
is_default_for_shipping = models.BooleanField(
_("Default shipping address?"), default=False)
#: Whether this address should be the default for billing.
is_default_for_billing = models.BooleanField(
_("Default billing address?"), default=False)
#: We keep track of the number of times an address has been used
#: as a shipping address so we can show the most popular ones
#: first at the checkout.
num_orders = models.PositiveIntegerField(_("Number of Orders"), default=0)
#: A hash is kept to try and avoid duplicate addresses being added
#: to the address book.
hash = models.CharField(_("Address Hash"), max_length=255, db_index=True,
editable=False)
date_created = models.DateTimeField(_("Date Created"), auto_now_add=True)
def save(self, *args, **kwargs):
"""
Save a hash of the address fields
"""
# Save a hash of the address fields so we can check whether two
# addresses are the same to avoid saving duplicates
self.hash = self.generate_hash()
# Ensure that each user only has one default shipping address
# and billing address
self._ensure_defaults_integrity()
super(AbstractUserAddress, self).save(*args, **kwargs)
def _ensure_defaults_integrity(self):
if self.is_default_for_shipping:
self.__class__._default_manager\
.filter(user=self.user, is_default_for_shipping=True)\
.update(is_default_for_shipping=False)
if self.is_default_for_billing:
self.__class__._default_manager\
.filter(user=self.user, is_default_for_billing=True)\
.update(is_default_for_billing=False)
class Meta:
abstract = True
app_label = 'address'
verbose_name = _("User address")
verbose_name_plural = _("User addresses")
ordering = ['-num_orders']
unique_together = ('user', 'hash')
def validate_unique(self, exclude=None):
super(AbstractAddress, self).validate_unique(exclude)
qs = self.__class__.objects.filter(
user=self.user,
hash=self.generate_hash())
if self.id:
qs = qs.exclude(id=self.id)
if qs.exists():
raise exceptions.ValidationError({
'__all__': [_("This address is already in your address"
" book")]})
class AbstractBillingAddress(AbstractAddress):
class Meta:
abstract = True
# BillingAddress is registered in order/models.py
app_label = 'order'
verbose_name = _("Billing address")
verbose_name_plural = _("Billing addresses")
@property
def order(self):
"""
Return the order linked to this shipping address
"""
try:
return self.order_set.all()[0]
except IndexError:
return None
class AbstractPartnerAddress(AbstractAddress):
"""
A partner can have one or more addresses. This can be useful e.g. when
determining US tax which depends on the origin of the shipment.
"""
partner = models.ForeignKey('partner.Partner', related_name='addresses',
verbose_name=_('Partner'))
class Meta:
abstract = True
app_label = 'partner'
verbose_name = _("Partner address")
verbose_name_plural = _("Partner addresses")
| bsd-3-clause |
avinashkunuje/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/qt.py | 113 | 7883 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""QtWebKit implementation of the Port interface."""
import glob
import logging
import re
import sys
import os
import platform
from webkitpy.common.memoized import memoized
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.port.base import Port
from webkitpy.port.xvfbdriver import XvfbDriver
_log = logging.getLogger(__name__)
class QtPort(Port):
ALL_VERSIONS = ['linux', 'win', 'mac']
port_name = "qt"
def _wk2_port_name(self):
return "qt-wk2"
def _port_flag_for_scripts(self):
return "--qt"
@classmethod
def determine_full_port_name(cls, host, options, port_name):
if port_name and port_name != cls.port_name:
return port_name
return port_name + '-' + host.platform.os_name
# sys_platform exists only for unit testing.
def __init__(self, host, port_name, **kwargs):
super(QtPort, self).__init__(host, port_name, **kwargs)
self._operating_system = port_name.replace('qt-', '')
# FIXME: Why is this being set at all?
self._version = self.operating_system()
def _generate_all_test_configurations(self):
configurations = []
for version in self.ALL_VERSIONS:
for build_type in self.ALL_BUILD_TYPES:
configurations.append(TestConfiguration(version=version, architecture='x86', build_type=build_type))
return configurations
def _build_driver(self):
# The Qt port builds DRT as part of the main build step
return True
def supports_per_test_timeout(self):
return True
def _path_to_driver(self):
return self._build_path('bin/%s' % self.driver_name())
def _path_to_image_diff(self):
return self._build_path('bin/ImageDiff')
def _path_to_webcore_library(self):
if self.operating_system() == 'mac':
return self._build_path('lib/QtWebKitWidgets.framework/QtWebKitWidgets')
else:
return self._build_path('lib/libQt5WebKitWidgets.so')
def _modules_to_search_for_symbols(self):
# We search in every library to be reliable in the case of building with CONFIG+=force_static_libs_as_shared.
if self.operating_system() == 'mac':
frameworks = glob.glob(os.path.join(self._build_path('lib'), '*.framework'))
return [os.path.join(framework, os.path.splitext(os.path.basename(framework))[0]) for framework in frameworks]
else:
suffix = 'dll' if self.operating_system() == 'win' else 'so'
return glob.glob(os.path.join(self._build_path('lib'), 'lib*.' + suffix))
@memoized
def qt_version(self):
version = ''
try:
for line in self._executive.run_command(['qmake', '-v']).split('\n'):
match = re.search('Qt\sversion\s(?P<version>\d\.\d)', line)
if match:
version = match.group('version')
break
except OSError:
version = '5.0'
return version
def _search_paths(self):
# qt-mac-wk2
# /
# qt-wk1 qt-wk2
# \/
# qt-5.x
# \
# (qt-linux|qt-mac|qt-win)
# |
# qt
search_paths = []
if self.get_option('webkit_test_runner'):
if self.operating_system() == 'mac':
search_paths.append('qt-mac-wk2')
search_paths.append('qt-wk2')
else:
search_paths.append('qt-wk1')
search_paths.append('qt-' + self.qt_version())
search_paths.append(self.port_name + '-' + self.operating_system())
search_paths.append(self.port_name)
return search_paths
def default_baseline_search_path(self):
return map(self._webkit_baseline_path, self._search_paths())
def _port_specific_expectations_files(self):
paths = self._search_paths()
if self.get_option('webkit_test_runner'):
paths.append('wk2')
# expectations_files() uses the directories listed in _search_paths reversed.
# e.g. qt -> qt-linux -> qt-5.x -> qt-wk1
return list(reversed([self._filesystem.join(self._webkit_baseline_path(p), 'TestExpectations') for p in paths]))
def setup_environ_for_server(self, server_name=None):
clean_env = super(QtPort, self).setup_environ_for_server(server_name)
clean_env['QTWEBKIT_PLUGIN_PATH'] = self._build_path('lib/plugins')
self._copy_value_from_environ_if_set(clean_env, 'QT_DRT_WEBVIEW_MODE')
self._copy_value_from_environ_if_set(clean_env, 'DYLD_IMAGE_SUFFIX')
self._copy_value_from_environ_if_set(clean_env, 'QT_WEBKIT_LOG')
self._copy_value_from_environ_if_set(clean_env, 'DISABLE_NI_WARNING')
self._copy_value_from_environ_if_set(clean_env, 'QT_WEBKIT_PAUSE_UI_PROCESS')
self._copy_value_from_environ_if_set(clean_env, 'QT_QPA_PLATFORM_PLUGIN_PATH')
self._copy_value_from_environ_if_set(clean_env, 'QT_WEBKIT_DISABLE_UIPROCESS_DUMPPIXELS')
return clean_env
# FIXME: We should find a way to share this implmentation with Gtk,
# or teach run-launcher how to call run-safari and move this down to Port.
def show_results_html_file(self, results_filename):
run_launcher_args = []
if self.get_option('webkit_test_runner'):
run_launcher_args.append('-2')
run_launcher_args.append("file://%s" % results_filename)
self._run_script("run-launcher", run_launcher_args)
def operating_system(self):
return self._operating_system
def check_sys_deps(self, needs_http):
result = super(QtPort, self).check_sys_deps(needs_http)
if not 'WEBKIT_TESTFONTS' in os.environ:
_log.error('\nThe WEBKIT_TESTFONTS environment variable is not defined or not set properly.')
_log.error('You must set it before running the tests.')
_log.error('Use git to grab the actual fonts from http://gitorious.org/qtwebkit/testfonts')
return False
return result
# Qt port is not ready for parallel testing, see https://bugs.webkit.org/show_bug.cgi?id=77730 for details.
def default_child_processes(self):
return 1
| bsd-3-clause |
italomaia/django-allauth | allauth/socialaccount/providers/twitter/views.py | 64 | 1493 | import json
from allauth.socialaccount.providers.oauth.client import OAuth
from allauth.socialaccount.providers.oauth.views import (OAuthAdapter,
OAuthLoginView,
OAuthCallbackView)
from .provider import TwitterProvider
class TwitterAPI(OAuth):
"""
Verifying twitter credentials
"""
url = 'https://api.twitter.com/1.1/account/verify_credentials.json'
def get_user_info(self):
user = json.loads(self.query(self.url))
return user
class TwitterOAuthAdapter(OAuthAdapter):
provider_id = TwitterProvider.id
request_token_url = 'https://api.twitter.com/oauth/request_token'
access_token_url = 'https://api.twitter.com/oauth/access_token'
# Issue #42 -- this one authenticates over and over again...
# authorize_url = 'https://api.twitter.com/oauth/authorize'
authorize_url = 'https://api.twitter.com/oauth/authenticate'
def complete_login(self, request, app, token, response):
client = TwitterAPI(request, app.client_id, app.secret,
self.request_token_url)
extra_data = client.get_user_info()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth_login = OAuthLoginView.adapter_view(TwitterOAuthAdapter)
oauth_callback = OAuthCallbackView.adapter_view(TwitterOAuthAdapter)
| mit |
niketanpansare/incubator-systemml | src/main/python/systemml/converters.py | 5 | 14040 | # -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
__all__ = [
'getNumCols',
'convertToMatrixBlock',
'convert_caffemodel',
'convert_lmdb_to_jpeg',
'convertToNumPyArr',
'convertToPandasDF',
'SUPPORTED_TYPES',
'convertToLabeledDF',
'convertImageToNumPyArr',
'getDatasetMean']
import numpy as np
import pandas as pd
import os
import math
from pyspark.context import SparkContext
from scipy.sparse import coo_matrix, spmatrix, csr_matrix
from .classloader import *
SUPPORTED_TYPES = (np.ndarray, pd.DataFrame, spmatrix)
DATASET_MEAN = {'VGG_ILSVRC_19_2014': [103.939, 116.779, 123.68]}
def getNumCols(numPyArr):
if numPyArr.ndim == 1:
return 1
else:
return numPyArr.shape[1]
def get_pretty_str(key, value):
return '\t"' + key + '": ' + str(value) + ',\n'
def save_tensor_csv(tensor, file_path, shouldTranspose):
w = w.reshape(w.shape[0], -1)
if shouldTranspose:
w = w.T
np.savetxt(file_path, w, delimiter=',')
with open(file_path + '.mtd', 'w') as file:
file.write('{\n\t"data_type": "matrix",\n\t"value_type": "double",\n')
file.write(get_pretty_str('rows', w.shape[0]))
file.write(get_pretty_str('cols', w.shape[1]))
file.write(get_pretty_str('nnz', np.count_nonzero(w)))
file.write(
'\t"format": "csv",\n\t"description": {\n\t\t"author": "SystemML"\n\t}\n}\n')
def convert_caffemodel(sc, deploy_file, caffemodel_file,
output_dir, format="binary", is_caffe_installed=False):
"""
Saves the weights and bias in the caffemodel file to output_dir in the specified format.
This method does not requires caffe to be installed.
Parameters
----------
sc: SparkContext
SparkContext
deploy_file: string
Path to the input network file
caffemodel_file: string
Path to the input caffemodel file
output_dir: string
Path to the output directory
format: string
Format of the weights and bias (can be binary, csv or text)
is_caffe_installed: bool
True if caffe is installed
"""
if is_caffe_installed:
if format != 'csv':
raise ValueError(
'The format ' +
str(format) +
' is not supported when caffe is installed. Hint: Please specify format=csv')
import caffe
net = caffe.Net(deploy_file, caffemodel_file, caffe.TEST)
for layerName in net.params.keys():
num_parameters = len(net.params[layerName])
if num_parameters == 0:
continue
elif num_parameters == 2:
# Weights and Biases
layerType = net.layers[list(
net._layer_names).index(layerName)].type
shouldTranspose = True if layerType == 'InnerProduct' else False
save_tensor_csv(
net.params[layerName][0].data,
os.path.join(
output_dir,
layerName +
'_weight.mtx'),
shouldTranspose)
save_tensor_csv(
net.params[layerName][1].data,
os.path.join(
output_dir,
layerName +
'_bias.mtx'),
shouldTranspose)
elif num_parameters == 1:
# Only Weight
layerType = net.layers[list(
net._layer_names).index(layerName)].type
shouldTranspose = True if layerType == 'InnerProduct' else False
save_tensor_csv(
net.params[layerName][0].data,
os.path.join(
output_dir,
layerName +
'_weight.mtx'),
shouldTranspose)
else:
raise ValueError(
'Unsupported number of parameters:' +
str(num_parameters))
else:
createJavaObject(sc, 'dummy')
utilObj = sc._jvm.org.apache.sysml.api.dl.Utils()
utilObj.saveCaffeModelFile(
sc._jsc,
deploy_file,
caffemodel_file,
output_dir,
format)
def convert_lmdb_to_jpeg(lmdb_img_file, output_dir):
"""
Saves the images in the lmdb file as jpeg in the output_dir. This method requires caffe to be installed along with lmdb and cv2 package.
To install cv2 package, do `pip install opencv-python`.
Parameters
----------
lmdb_img_file: string
Path to the input lmdb file
output_dir: string
Output directory for images (local filesystem)
"""
import lmdb
import caffe
import cv2
lmdb_cursor = lmdb.open(lmdb_file, readonly=True).begin().cursor()
datum = caffe.proto.caffe_pb2.Datum()
i = 1
for _, value in lmdb_cursor:
datum.ParseFromString(value)
data = caffe.io.datum_to_array(datum)
output_file_path = os.path.join(output_dir, 'file_' + str(i) + '.jpg')
image = np.transpose(data, (1, 2, 0)) # CxHxW to HxWxC in cv2
cv2.imwrite(output_file_path, image)
i = i + 1
def convertToLabeledDF(sparkSession, X, y=None):
from pyspark.ml.feature import VectorAssembler
if y is not None:
pd1 = pd.DataFrame(X)
pd2 = pd.DataFrame(y, columns=['label'])
pdf = pd.concat([pd1, pd2], axis=1)
inputColumns = ['C' + str(i) for i in pd1.columns]
outputColumns = inputColumns + ['label']
else:
pdf = pd.DataFrame(X)
inputColumns = ['C' + str(i) for i in pdf.columns]
outputColumns = inputColumns
assembler = VectorAssembler(inputCols=inputColumns, outputCol='features')
out = assembler.transform(sparkSession.createDataFrame(pdf, outputColumns))
if y is not None:
return out.select('features', 'label')
else:
return out.select('features')
def _convertSPMatrixToMB(sc, src):
src = coo_matrix(src, dtype=np.float64)
numRows = src.shape[0]
numCols = src.shape[1]
data = src.data
row = src.row.astype(np.int32)
col = src.col.astype(np.int32)
nnz = len(src.col)
buf1 = bytearray(data.tostring())
buf2 = bytearray(row.tostring())
buf3 = bytearray(col.tostring())
createJavaObject(sc, 'dummy')
return sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertSciPyCOOToMB(
buf1, buf2, buf3, numRows, numCols, nnz)
def _convertDenseMatrixToMB(sc, src):
numCols = getNumCols(src)
numRows = src.shape[0]
src = np.asarray(src, dtype=np.float64) if not isinstance(src, np.ndarray) else src
# data_type: 0: int, 1: float and 2: double
if src.dtype is np.dtype(np.int32):
arr = src.ravel().astype(np.int32)
dataType = 0
elif src.dtype is np.dtype(np.float32):
arr = src.ravel().astype(np.float32)
dataType = 1
else:
arr = src.ravel().astype(np.float64)
dataType = 2
buf = bytearray(arr.tostring())
createJavaObject(sc, 'dummy')
return sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertPy4JArrayToMB(
buf, numRows, numCols, dataType)
def _copyRowBlock(i, sc, ret, src, numRowsPerBlock, rlen, clen):
rowIndex = int(i / numRowsPerBlock)
tmp = src[i:min(i + numRowsPerBlock, rlen), ]
mb = _convertSPMatrixToMB(
sc,
tmp) if isinstance(
src,
spmatrix) else _convertDenseMatrixToMB(
sc,
tmp)
sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.copyRowBlocks(
mb, rowIndex, ret, numRowsPerBlock, rlen, clen)
return i
def convertToMatrixBlock(sc, src, maxSizeBlockInMB=128):
if not isinstance(sc, SparkContext):
raise TypeError('sc needs to be of type SparkContext')
if isinstance(src, spmatrix):
isSparse = True
else:
isSparse = False
src = np.asarray(src, dtype=np.float64) if not isinstance(src, np.ndarray) else src
if len(src.shape) != 2:
src_type = str(type(src).__name__)
raise TypeError('Expected 2-dimensional ' +
src_type +
', instead passed ' +
str(len(src.shape)) +
'-dimensional ' +
src_type)
worstCaseSizeInMB = (8*(src.getnnz()*3 if isSparse else src.shape[0]*src.shape[1])) / 1000000
# Ignoring sparsity for computing numRowsPerBlock for now
numRowsPerBlock = int(
math.ceil((maxSizeBlockInMB * 1000000) / (src.shape[1] * 8)))
if worstCaseSizeInMB <= maxSizeBlockInMB:
return _convertSPMatrixToMB(
sc, src) if isSparse else _convertDenseMatrixToMB(sc, src)
else:
# Since coo_matrix does not have range indexing
src = csr_matrix(src) if isSparse else src
rlen = int(src.shape[0])
clen = int(src.shape[1])
ret = sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.allocateDenseOrSparse(
rlen, clen, isSparse)
[_copyRowBlock(i, sc, ret, src, numRowsPerBlock, rlen, clen)
for i in range(0, src.shape[0], numRowsPerBlock)]
sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.postProcessAfterCopying(
ret)
return ret
def convertToNumPyArr(sc, mb):
if isinstance(sc, SparkContext):
numRows = mb.getNumRows()
numCols = mb.getNumColumns()
createJavaObject(sc, 'dummy')
buf = sc._jvm.org.apache.sysml.runtime.instructions.spark.utils.RDDConverterUtilsExt.convertMBtoPy4JDenseArr(
mb)
return np.frombuffer(buf, count=numRows * numCols,
dtype=np.float64).reshape((numRows, numCols))
else:
# TODO: We can generalize this by creating py4j gateway ourselves
raise TypeError('sc needs to be of type SparkContext')
# Returns the mean of a model if defined otherwise None
def getDatasetMean(dataset_name):
"""
Parameters
----------
dataset_name: Name of the dataset used to train model. This name is artificial name based on dataset used to train the model.
Returns
-------
mean: Mean value of model if its defined in the list DATASET_MEAN else None.
"""
try:
mean = DATASET_MEAN[dataset_name.upper()]
except BaseException:
mean = None
return mean
# Example usage: convertImageToNumPyArr(im, img_shape=(3, 224, 224), add_rotated_images=True, add_mirrored_images=True)
# The above call returns a numpy array of shape (6, 50176) in NCHW format
def convertImageToNumPyArr(im, img_shape=None, add_rotated_images=False, add_mirrored_images=False,
color_mode='RGB', mean=None):
# Input Parameters
# color_mode: In case of VGG models which expect image data in BGR format instead of RGB for other most models,
# color_mode parameter is used to process image data in BGR format.
# mean: mean value is used to subtract from input data from every pixel
# value. By default value is None, so mean value not subtracted.
if img_shape is not None:
num_channels = img_shape[0]
size = (img_shape[1], img_shape[2])
else:
num_channels = 1 if im.mode == 'L' else 3
size = None
if num_channels != 1 and num_channels != 3:
raise ValueError('Expected the number of channels to be either 1 or 3')
from PIL import Image
if size is not None:
im = im.resize(size, Image.LANCZOS)
expected_mode = 'L' if num_channels == 1 else 'RGB'
if expected_mode is not im.mode:
im = im.convert(expected_mode)
def _im2NumPy(im):
if expected_mode == 'L':
return np.asarray(im.getdata()).reshape((1, -1))
else:
im = (np.array(im).astype(np.float))
# (H,W,C) -> (C,H,W)
im = im.transpose(2, 0, 1)
# RGB -> BGR
if color_mode == 'BGR':
im = im[..., ::-1]
# Subtract Mean
if mean is not None:
for c in range(3):
im[:, :, c] = im[:, :, c] - mean[c]
# (C,H,W) --> (1, C*H*W)
return im.reshape((1, -1))
ret = _im2NumPy(im)
if add_rotated_images:
ret = np.vstack(
(ret, _im2NumPy(
im.rotate(90)), _im2NumPy(
im.rotate(180)), _im2NumPy(
im.rotate(270))))
if add_mirrored_images:
ret = np.vstack(
(ret, _im2NumPy(
im.transpose(
Image.FLIP_LEFT_RIGHT)), _im2NumPy(
im.transpose(
Image.FLIP_TOP_BOTTOM))))
return ret
def convertToPandasDF(X):
if not isinstance(X, pd.DataFrame):
return pd.DataFrame(X, columns=['C' + str(i)
for i in range(getNumCols(X))])
return X
| apache-2.0 |
rohitwaghchaure/alec_frappe5_erpnext | erpnext/projects/report/project_wise_stock_tracking/project_wise_stock_tracking.py | 46 | 3173 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns = get_columns()
proj_details = get_project_details()
pr_item_map = get_purchased_items_cost()
se_item_map = get_issued_items_cost()
dn_item_map = get_delivered_items_cost()
data = []
for project in proj_details:
data.append([project.name, pr_item_map.get(project.name, 0),
se_item_map.get(project.name, 0), dn_item_map.get(project.name, 0),
project.project_name, project.status, project.company,
project.customer, project.estimated_costing, project.expected_start_date,
project.expected_end_date])
return columns, data
def get_columns():
return [_("Project Id") + ":Link/Project:140", _("Cost of Purchased Items") + ":Currency:160",
_("Cost of Issued Items") + ":Currency:160", _("Cost of Delivered Items") + ":Currency:160",
_("Project Name") + "::120", _("Project Status") + "::120", _("Company") + ":Link/Company:100",
_("Customer") + ":Link/Customer:140", _("Project Value") + ":Currency:120",
_("Project Start Date") + ":Date:120", _("Completion Date") + ":Date:120"]
def get_project_details():
return frappe.db.sql(""" select name, project_name, status, company, customer, estimated_costing,
expected_start_date, expected_end_date from tabProject where docstatus < 2""", as_dict=1)
def get_purchased_items_cost():
pr_items = frappe.db.sql("""select project_name, sum(base_net_amount) as amount
from `tabPurchase Receipt Item` where ifnull(project_name, '') != ''
and docstatus = 1 group by project_name""", as_dict=1)
pr_item_map = {}
for item in pr_items:
pr_item_map.setdefault(item.project_name, item.amount)
return pr_item_map
def get_issued_items_cost():
se_items = frappe.db.sql("""select se.project_name, sum(se_item.amount) as amount
from `tabStock Entry` se, `tabStock Entry Detail` se_item
where se.name = se_item.parent and se.docstatus = 1 and ifnull(se_item.t_warehouse, '') = ''
and ifnull(se.project_name, '') != '' group by se.project_name""", as_dict=1)
se_item_map = {}
for item in se_items:
se_item_map.setdefault(item.project_name, item.amount)
return se_item_map
def get_delivered_items_cost():
dn_items = frappe.db.sql("""select dn.project_name, sum(dn_item.base_net_amount) as amount
from `tabDelivery Note` dn, `tabDelivery Note Item` dn_item
where dn.name = dn_item.parent and dn.docstatus = 1 and ifnull(dn.project_name, '') != ''
group by dn.project_name""", as_dict=1)
si_items = frappe.db.sql("""select si.project_name, sum(si_item.base_net_amount) as amount
from `tabSales Invoice` si, `tabSales Invoice Item` si_item
where si.name = si_item.parent and si.docstatus = 1 and ifnull(si.update_stock, 0) = 1
and ifnull(si.is_pos, 0) = 1 and ifnull(si.project_name, '') != ''
group by si.project_name""", as_dict=1)
dn_item_map = {}
for item in dn_items:
dn_item_map.setdefault(item.project_name, item.amount)
for item in si_items:
dn_item_map.setdefault(item.project_name, item.amount)
return dn_item_map
| agpl-3.0 |
837468220/python-for-android | python-modules/twisted/twisted/trial/test/test_tests.py | 59 | 35891 | # Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the behaviour of unit tests.
"""
import gc, StringIO, sys, weakref
from twisted.internet import defer, reactor
from twisted.trial import unittest, runner, reporter, util
from twisted.trial.test import erroneous, suppression
from twisted.trial.test.test_reporter import LoggingReporter
class ResultsTestMixin:
def loadSuite(self, suite):
self.loader = runner.TestLoader()
self.suite = self.loader.loadClass(suite)
self.reporter = reporter.TestResult()
def test_setUp(self):
self.failUnless(self.reporter.wasSuccessful())
self.failUnlessEqual(self.reporter.errors, [])
self.failUnlessEqual(self.reporter.failures, [])
self.failUnlessEqual(self.reporter.skips, [])
def assertCount(self, numTests):
self.failUnlessEqual(self.suite.countTestCases(), numTests)
self.suite(self.reporter)
self.failUnlessEqual(self.reporter.testsRun, numTests)
class TestSuccess(unittest.TestCase):
"""
Test that successful tests are reported as such.
"""
def setUp(self):
self.result = reporter.TestResult()
def test_successful(self):
"""
A successful test, used by other tests.
"""
def assertSuccessful(self, test, result):
self.assertEqual(result.successes, 1)
self.assertEqual(result.failures, [])
self.assertEqual(result.errors, [])
self.assertEqual(result.expectedFailures, [])
self.assertEqual(result.unexpectedSuccesses, [])
self.assertEqual(result.skips, [])
def test_successfulIsReported(self):
"""
Test that when a successful test is run, it is reported as a success,
and not as any other kind of result.
"""
test = TestSuccess('test_successful')
test.run(self.result)
self.assertSuccessful(test, self.result)
def test_defaultIsSuccessful(self):
"""
Test that L{unittest.TestCase} itself can be instantiated, run, and
reported as being successful.
"""
test = unittest.TestCase()
test.run(self.result)
self.assertSuccessful(test, self.result)
def test_noReference(self):
"""
Test that no reference is kept on a successful test.
"""
test = TestSuccess('test_successful')
ref = weakref.ref(test)
test.run(self.result)
self.assertSuccessful(test, self.result)
del test
gc.collect()
self.assertIdentical(ref(), None)
class TestSkipMethods(unittest.TestCase, ResultsTestMixin):
class SkippingTests(unittest.TestCase):
def test_skip1(self):
raise unittest.SkipTest('skip1')
def test_skip2(self):
raise RuntimeError("I should not get raised")
test_skip2.skip = 'skip2'
def test_skip3(self):
self.fail('I should not fail')
test_skip3.skip = 'skip3'
class SkippingSetUp(unittest.TestCase):
def setUp(self):
raise unittest.SkipTest('skipSetUp')
def test_1(self):
pass
def test_2(self):
pass
def setUp(self):
self.loadSuite(TestSkipMethods.SkippingTests)
def test_counting(self):
self.assertCount(3)
def test_results(self):
self.suite(self.reporter)
self.failUnless(self.reporter.wasSuccessful())
self.failUnlessEqual(self.reporter.errors, [])
self.failUnlessEqual(self.reporter.failures, [])
self.failUnlessEqual(len(self.reporter.skips), 3)
def test_setUp(self):
self.loadSuite(TestSkipMethods.SkippingSetUp)
self.suite(self.reporter)
self.failUnless(self.reporter.wasSuccessful())
self.failUnlessEqual(self.reporter.errors, [])
self.failUnlessEqual(self.reporter.failures, [])
self.failUnlessEqual(len(self.reporter.skips), 2)
def test_reasons(self):
self.suite(self.reporter)
prefix = 'test_'
# whiteboxing reporter
for test, reason in self.reporter.skips:
self.failUnlessEqual(test.shortDescription()[len(prefix):],
str(reason))
class TestSkipClasses(unittest.TestCase, ResultsTestMixin):
class SkippedClass(unittest.TestCase):
skip = 'class'
def setUp(self):
self.__class__._setUpRan = True
def test_skip1(self):
raise unittest.SkipTest('skip1')
def test_skip2(self):
raise RuntimeError("Ought to skip me")
test_skip2.skip = 'skip2'
def test_skip3(self):
pass
def test_skip4(self):
raise RuntimeError("Skip me too")
def setUp(self):
self.loadSuite(TestSkipClasses.SkippedClass)
TestSkipClasses.SkippedClass._setUpRan = False
def test_counting(self):
"""
Skipped test methods still contribute to the total test count.
"""
self.assertCount(4)
def test_setUpRan(self):
"""
The C{setUp} method is not called if the class is set to skip.
"""
self.suite(self.reporter)
self.assertFalse(TestSkipClasses.SkippedClass._setUpRan)
def test_results(self):
"""
Skipped test methods don't cause C{wasSuccessful} to return C{False},
nor do they contribute to the C{errors} or C{failures} of the reporter.
They do, however, add elements to the reporter's C{skips} list.
"""
self.suite(self.reporter)
self.failUnless(self.reporter.wasSuccessful())
self.failUnlessEqual(self.reporter.errors, [])
self.failUnlessEqual(self.reporter.failures, [])
self.failUnlessEqual(len(self.reporter.skips), 4)
def test_reasons(self):
"""
Test methods which raise L{unittest.SkipTest} or have their C{skip}
attribute set to something are skipped.
"""
self.suite(self.reporter)
expectedReasons = ['class', 'skip2', 'class', 'class']
# whitebox reporter
reasonsGiven = [reason for test, reason in self.reporter.skips]
self.assertEquals(expectedReasons, reasonsGiven)
class TestTodo(unittest.TestCase, ResultsTestMixin):
class TodoTests(unittest.TestCase):
def test_todo1(self):
self.fail("deliberate failure")
test_todo1.todo = "todo1"
def test_todo2(self):
raise RuntimeError("deliberate error")
test_todo2.todo = "todo2"
def test_todo3(self):
"""unexpected success"""
test_todo3.todo = 'todo3'
def setUp(self):
self.loadSuite(TestTodo.TodoTests)
def test_counting(self):
self.assertCount(3)
def test_results(self):
self.suite(self.reporter)
self.failUnless(self.reporter.wasSuccessful())
self.failUnlessEqual(self.reporter.errors, [])
self.failUnlessEqual(self.reporter.failures, [])
self.failUnlessEqual(self.reporter.skips, [])
self.failUnlessEqual(len(self.reporter.expectedFailures), 2)
self.failUnlessEqual(len(self.reporter.unexpectedSuccesses), 1)
def test_expectedFailures(self):
self.suite(self.reporter)
expectedReasons = ['todo1', 'todo2']
reasonsGiven = [ r.reason
for t, e, r in self.reporter.expectedFailures ]
self.failUnlessEqual(expectedReasons, reasonsGiven)
def test_unexpectedSuccesses(self):
self.suite(self.reporter)
expectedReasons = ['todo3']
reasonsGiven = [ r.reason
for t, r in self.reporter.unexpectedSuccesses ]
self.failUnlessEqual(expectedReasons, reasonsGiven)
class TestTodoClass(unittest.TestCase, ResultsTestMixin):
class TodoClass(unittest.TestCase):
def test_todo1(self):
pass
test_todo1.todo = "method"
def test_todo2(self):
pass
def test_todo3(self):
self.fail("Deliberate Failure")
test_todo3.todo = "method"
def test_todo4(self):
self.fail("Deliberate Failure")
TodoClass.todo = "class"
def setUp(self):
self.loadSuite(TestTodoClass.TodoClass)
def test_counting(self):
self.assertCount(4)
def test_results(self):
self.suite(self.reporter)
self.failUnless(self.reporter.wasSuccessful())
self.failUnlessEqual(self.reporter.errors, [])
self.failUnlessEqual(self.reporter.failures, [])
self.failUnlessEqual(self.reporter.skips, [])
self.failUnlessEqual(len(self.reporter.expectedFailures), 2)
self.failUnlessEqual(len(self.reporter.unexpectedSuccesses), 2)
def test_expectedFailures(self):
self.suite(self.reporter)
expectedReasons = ['method', 'class']
reasonsGiven = [ r.reason
for t, e, r in self.reporter.expectedFailures ]
self.failUnlessEqual(expectedReasons, reasonsGiven)
def test_unexpectedSuccesses(self):
self.suite(self.reporter)
expectedReasons = ['method', 'class']
reasonsGiven = [ r.reason
for t, r in self.reporter.unexpectedSuccesses ]
self.failUnlessEqual(expectedReasons, reasonsGiven)
class TestStrictTodo(unittest.TestCase, ResultsTestMixin):
class Todos(unittest.TestCase):
def test_todo1(self):
raise RuntimeError, "expected failure"
test_todo1.todo = (RuntimeError, "todo1")
def test_todo2(self):
raise RuntimeError, "expected failure"
test_todo2.todo = ((RuntimeError, OSError), "todo2")
def test_todo3(self):
raise RuntimeError, "we had no idea!"
test_todo3.todo = (OSError, "todo3")
def test_todo4(self):
raise RuntimeError, "we had no idea!"
test_todo4.todo = ((OSError, SyntaxError), "todo4")
def test_todo5(self):
self.fail("deliberate failure")
test_todo5.todo = (unittest.FailTest, "todo5")
def test_todo6(self):
self.fail("deliberate failure")
test_todo6.todo = (RuntimeError, "todo6")
def test_todo7(self):
pass
test_todo7.todo = (RuntimeError, "todo7")
def setUp(self):
self.loadSuite(TestStrictTodo.Todos)
def test_counting(self):
self.assertCount(7)
def test_results(self):
self.suite(self.reporter)
self.failIf(self.reporter.wasSuccessful())
self.failUnlessEqual(len(self.reporter.errors), 2)
self.failUnlessEqual(len(self.reporter.failures), 1)
self.failUnlessEqual(len(self.reporter.expectedFailures), 3)
self.failUnlessEqual(len(self.reporter.unexpectedSuccesses), 1)
self.failUnlessEqual(self.reporter.skips, [])
def test_expectedFailures(self):
self.suite(self.reporter)
expectedReasons = ['todo1', 'todo2', 'todo5']
reasonsGotten = [ r.reason
for t, e, r in self.reporter.expectedFailures ]
self.failUnlessEqual(expectedReasons, reasonsGotten)
def test_unexpectedSuccesses(self):
self.suite(self.reporter)
expectedReasons = [([RuntimeError], 'todo7')]
reasonsGotten = [ (r.errors, r.reason)
for t, r in self.reporter.unexpectedSuccesses ]
self.failUnlessEqual(expectedReasons, reasonsGotten)
class TestCleanup(unittest.TestCase):
def setUp(self):
self.result = reporter.Reporter(StringIO.StringIO())
self.loader = runner.TestLoader()
def testLeftoverSockets(self):
"""
Trial reports a L{util.DirtyReactorAggregateError} if a test leaves
sockets behind.
"""
suite = self.loader.loadMethod(
erroneous.SocketOpenTest.test_socketsLeftOpen)
suite.run(self.result)
self.failIf(self.result.wasSuccessful())
# socket cleanup happens at end of class's tests.
# all the tests in the class are successful, even if the suite
# fails
self.assertEqual(self.result.successes, 1)
failure = self.result.errors[0][1]
self.failUnless(failure.check(util.DirtyReactorAggregateError))
def testLeftoverPendingCalls(self):
"""
Trial reports a L{util.DirtyReactorAggregateError} and fails the test
if a test leaves a L{DelayedCall} hanging.
"""
suite = erroneous.ReactorCleanupTests('test_leftoverPendingCalls')
suite.run(self.result)
self.failIf(self.result.wasSuccessful())
failure = self.result.errors[0][1]
self.assertEqual(self.result.successes, 0)
self.failUnless(failure.check(util.DirtyReactorAggregateError))
class FixtureTest(unittest.TestCase):
"""
Tests for broken fixture helper methods (e.g. setUp, tearDown).
"""
def setUp(self):
self.reporter = reporter.Reporter()
self.loader = runner.TestLoader()
def testBrokenSetUp(self):
"""
When setUp fails, the error is recorded in the result object.
"""
self.loader.loadClass(erroneous.TestFailureInSetUp).run(self.reporter)
self.assert_(len(self.reporter.errors) > 0)
self.assert_(isinstance(self.reporter.errors[0][1].value,
erroneous.FoolishError))
def testBrokenTearDown(self):
"""
When tearDown fails, the error is recorded in the result object.
"""
suite = self.loader.loadClass(erroneous.TestFailureInTearDown)
suite.run(self.reporter)
errors = self.reporter.errors
self.assert_(len(errors) > 0)
self.assert_(isinstance(errors[0][1].value, erroneous.FoolishError))
class SuppressionTest(unittest.TestCase):
def runTests(self, suite):
suite.run(reporter.TestResult())
def setUp(self):
self.loader = runner.TestLoader()
def test_suppressMethod(self):
"""
A suppression set on a test method prevents warnings emitted by that
test method which the suppression matches from being emitted.
"""
self.runTests(self.loader.loadMethod(
suppression.TestSuppression.testSuppressMethod))
warningsShown = self.flushWarnings([
suppression.TestSuppression._emit])
self.assertEqual(
warningsShown[0]['message'], suppression.CLASS_WARNING_MSG)
self.assertEqual(
warningsShown[1]['message'], suppression.MODULE_WARNING_MSG)
self.assertEqual(len(warningsShown), 2)
def test_suppressClass(self):
"""
A suppression set on a L{TestCase} subclass prevents warnings emitted
by any test methods defined on that class which match the suppression
from being emitted.
"""
self.runTests(self.loader.loadMethod(
suppression.TestSuppression.testSuppressClass))
warningsShown = self.flushWarnings([
suppression.TestSuppression._emit])
self.assertEqual(
warningsShown[0]['message'], suppression.METHOD_WARNING_MSG)
self.assertEqual(
warningsShown[1]['message'], suppression.MODULE_WARNING_MSG)
self.assertEqual(len(warningsShown), 2)
def test_suppressModule(self):
"""
A suppression set on a module prevents warnings emitted by any test
mewthods defined in that module which match the suppression from being
emitted.
"""
self.runTests(self.loader.loadMethod(
suppression.TestSuppression2.testSuppressModule))
warningsShown = self.flushWarnings([
suppression.TestSuppression._emit])
self.assertEqual(
warningsShown[0]['message'], suppression.METHOD_WARNING_MSG)
self.assertEqual(
warningsShown[1]['message'], suppression.CLASS_WARNING_MSG)
self.assertEqual(len(warningsShown), 2)
def test_overrideSuppressClass(self):
"""
The suppression set on a test method completely overrides a suppression
with wider scope; if it does not match a warning emitted by that test
method, the warning is emitted, even if a wider suppression matches.
"""
case = self.loader.loadMethod(
suppression.TestSuppression.testOverrideSuppressClass)
self.runTests(case)
warningsShown = self.flushWarnings([
suppression.TestSuppression._emit])
self.assertEqual(
warningsShown[0]['message'], suppression.METHOD_WARNING_MSG)
self.assertEqual(
warningsShown[1]['message'], suppression.CLASS_WARNING_MSG)
self.assertEqual(
warningsShown[2]['message'], suppression.MODULE_WARNING_MSG)
self.assertEqual(len(warningsShown), 3)
class GCMixin:
"""
I provide a few mock tests that log setUp, tearDown, test execution and
garbage collection. I'm used to test whether gc.collect gets called.
"""
class BasicTest(unittest.TestCase):
def setUp(self):
self._log('setUp')
def test_foo(self):
self._log('test')
def tearDown(self):
self._log('tearDown')
class ClassTest(unittest.TestCase):
def test_1(self):
self._log('test1')
def test_2(self):
self._log('test2')
def _log(self, msg):
self._collectCalled.append(msg)
def collect(self):
"""Fake gc.collect"""
self._log('collect')
def setUp(self):
self._collectCalled = []
self.BasicTest._log = self.ClassTest._log = self._log
self._oldCollect = gc.collect
gc.collect = self.collect
def tearDown(self):
gc.collect = self._oldCollect
class TestGarbageCollectionDefault(GCMixin, unittest.TestCase):
def test_collectNotDefault(self):
"""
By default, tests should not force garbage collection.
"""
test = self.BasicTest('test_foo')
result = reporter.TestResult()
test.run(result)
self.failUnlessEqual(self._collectCalled, ['setUp', 'test', 'tearDown'])
class TestGarbageCollection(GCMixin, unittest.TestCase):
def test_collectCalled(self):
"""
test gc.collect is called before and after each test.
"""
test = TestGarbageCollection.BasicTest('test_foo')
test = unittest._ForceGarbageCollectionDecorator(test)
result = reporter.TestResult()
test.run(result)
self.failUnlessEqual(
self._collectCalled,
['collect', 'setUp', 'test', 'tearDown', 'collect'])
class TestUnhandledDeferred(unittest.TestCase):
def setUp(self):
from twisted.trial.test import weird
# test_unhandledDeferred creates a cycle. we need explicit control of gc
gc.disable()
self.test1 = unittest._ForceGarbageCollectionDecorator(
weird.TestBleeding('test_unhandledDeferred'))
def test_isReported(self):
"""
Forcing garbage collection should cause unhandled Deferreds to be
reported as errors.
"""
result = reporter.TestResult()
self.test1(result)
self.assertEqual(len(result.errors), 1,
'Unhandled deferred passed without notice')
def test_doesntBleed(self):
"""
Forcing garbage collection in the test should mean that there are
no unreachable cycles immediately after the test completes.
"""
result = reporter.TestResult()
self.test1(result)
self.flushLoggedErrors() # test1 logs errors that get caught be us.
# test1 created unreachable cycle.
# it & all others should have been collected by now.
n = gc.collect()
self.assertEqual(n, 0, 'unreachable cycle still existed')
# check that last gc.collect didn't log more errors
x = self.flushLoggedErrors()
self.assertEqual(len(x), 0, 'Errors logged after gc.collect')
def tearDown(self):
gc.collect()
gc.enable()
self.flushLoggedErrors()
class TestAddCleanup(unittest.TestCase):
"""
Test the addCleanup method of TestCase.
"""
class MockTest(unittest.TestCase):
def setUp(self):
self.log = ['setUp']
def brokenSetUp(self):
self.log = ['setUp']
raise RuntimeError("Deliberate failure")
def skippingSetUp(self):
self.log = ['setUp']
raise unittest.SkipTest("Don't do this")
def append(self, thing):
self.log.append(thing)
def tearDown(self):
self.log.append('tearDown')
def runTest(self):
self.log.append('runTest')
def setUp(self):
unittest.TestCase.setUp(self)
self.result = reporter.TestResult()
self.test = TestAddCleanup.MockTest()
def test_addCleanupCalledIfSetUpFails(self):
"""
Callables added with C{addCleanup} are run even if setUp fails.
"""
self.test.setUp = self.test.brokenSetUp
self.test.addCleanup(self.test.append, 'foo')
self.test.run(self.result)
self.assertEqual(['setUp', 'foo'], self.test.log)
def test_addCleanupCalledIfSetUpSkips(self):
"""
Callables added with C{addCleanup} are run even if setUp raises
L{SkipTest}. This allows test authors to reliably provide clean up
code using C{addCleanup}.
"""
self.test.setUp = self.test.skippingSetUp
self.test.addCleanup(self.test.append, 'foo')
self.test.run(self.result)
self.assertEqual(['setUp', 'foo'], self.test.log)
def test_addCleanupCalledInReverseOrder(self):
"""
Callables added with C{addCleanup} should be called before C{tearDown}
in reverse order of addition.
"""
self.test.addCleanup(self.test.append, "foo")
self.test.addCleanup(self.test.append, 'bar')
self.test.run(self.result)
self.assertEqual(['setUp', 'runTest', 'bar', 'foo', 'tearDown'],
self.test.log)
def test_addCleanupWaitsForDeferreds(self):
"""
If an added callable returns a L{Deferred}, then the test should wait
until that L{Deferred} has fired before running the next cleanup
method.
"""
def cleanup(message):
d = defer.Deferred()
reactor.callLater(0, d.callback, message)
return d.addCallback(self.test.append)
self.test.addCleanup(self.test.append, 'foo')
self.test.addCleanup(cleanup, 'bar')
self.test.run(self.result)
self.assertEqual(['setUp', 'runTest', 'bar', 'foo', 'tearDown'],
self.test.log)
def test_errorInCleanupIsCaptured(self):
"""
Errors raised in cleanup functions should be treated like errors in
C{tearDown}. They should be added as errors and fail the test. Skips,
todos and failures are all treated as errors.
"""
self.test.addCleanup(self.test.fail, 'foo')
self.test.run(self.result)
self.failIf(self.result.wasSuccessful())
self.assertEqual(1, len(self.result.errors))
[(test, error)] = self.result.errors
self.assertEqual(test, self.test)
self.assertEqual(error.getErrorMessage(), 'foo')
def test_cleanupsContinueRunningAfterError(self):
"""
If a cleanup raises an error then that does not stop the other
cleanups from being run.
"""
self.test.addCleanup(self.test.append, 'foo')
self.test.addCleanup(self.test.fail, 'bar')
self.test.run(self.result)
self.assertEqual(['setUp', 'runTest', 'foo', 'tearDown'],
self.test.log)
self.assertEqual(1, len(self.result.errors))
[(test, error)] = self.result.errors
self.assertEqual(test, self.test)
self.assertEqual(error.getErrorMessage(), 'bar')
def test_multipleErrorsReported(self):
"""
If more than one cleanup fails, then the test should fail with more
than one error.
"""
self.test.addCleanup(self.test.fail, 'foo')
self.test.addCleanup(self.test.fail, 'bar')
self.test.run(self.result)
self.assertEqual(['setUp', 'runTest', 'tearDown'],
self.test.log)
self.assertEqual(2, len(self.result.errors))
[(test1, error1), (test2, error2)] = self.result.errors
self.assertEqual(test1, self.test)
self.assertEqual(test2, self.test)
self.assertEqual(error1.getErrorMessage(), 'bar')
self.assertEqual(error2.getErrorMessage(), 'foo')
class TestSuiteClearing(unittest.TestCase):
"""
Tests for our extension that allows us to clear out a L{TestSuite}.
"""
def test_clearSuite(self):
"""
Calling L{unittest._clearSuite} on a populated L{TestSuite} removes
all tests.
"""
suite = unittest.TestSuite()
suite.addTest(unittest.TestCase())
# Double check that the test suite actually has something in it.
self.assertEqual(1, suite.countTestCases())
unittest._clearSuite(suite)
self.assertEqual(0, suite.countTestCases())
def test_clearPyunitSuite(self):
"""
Calling L{unittest._clearSuite} on a populated standard library
L{TestSuite} removes all tests.
This test is important since C{_clearSuite} operates by mutating
internal variables.
"""
pyunit = __import__('unittest')
suite = pyunit.TestSuite()
suite.addTest(unittest.TestCase())
# Double check that the test suite actually has something in it.
self.assertEqual(1, suite.countTestCases())
unittest._clearSuite(suite)
self.assertEqual(0, suite.countTestCases())
class TestTestDecorator(unittest.TestCase):
"""
Tests for our test decoration features.
"""
def assertTestsEqual(self, observed, expected):
"""
Assert that the given decorated tests are equal.
"""
self.assertEqual(observed.__class__, expected.__class__,
"Different class")
observedOriginal = getattr(observed, '_originalTest', None)
expectedOriginal = getattr(expected, '_originalTest', None)
self.assertIdentical(observedOriginal, expectedOriginal)
if observedOriginal is expectedOriginal is None:
self.assertIdentical(observed, expected)
def assertSuitesEqual(self, observed, expected):
"""
Assert that the given test suites with decorated tests are equal.
"""
self.assertEqual(observed.__class__, expected.__class__,
"Different class")
self.assertEqual(len(observed._tests), len(expected._tests),
"Different number of tests.")
for observedTest, expectedTest in zip(observed._tests,
expected._tests):
if getattr(observedTest, '_tests', None) is not None:
self.assertSuitesEqual(observedTest, expectedTest)
else:
self.assertTestsEqual(observedTest, expectedTest)
def test_usesAdaptedReporterWithRun(self):
"""
For decorated tests, C{run} uses a result adapter that preserves the
test decoration for calls to C{addError}, C{startTest} and the like.
See L{reporter._AdaptedReporter}.
"""
test = unittest.TestCase()
decoratedTest = unittest.TestDecorator(test)
result = LoggingReporter()
decoratedTest.run(result)
self.assertTestsEqual(result.test, decoratedTest)
def test_usesAdaptedReporterWithCall(self):
"""
For decorated tests, C{__call__} uses a result adapter that preserves
the test decoration for calls to C{addError}, C{startTest} and the
like.
See L{reporter._AdaptedReporter}.
"""
test = unittest.TestCase()
decoratedTest = unittest.TestDecorator(test)
result = LoggingReporter()
decoratedTest(result)
self.assertTestsEqual(result.test, decoratedTest)
def test_decorateSingleTest(self):
"""
Calling L{decorate} on a single test case returns the test case
decorated with the provided decorator.
"""
test = unittest.TestCase()
decoratedTest = unittest.decorate(test, unittest.TestDecorator)
self.assertTestsEqual(unittest.TestDecorator(test), decoratedTest)
def test_decorateTestSuite(self):
"""
Calling L{decorate} on a test suite will return a test suite with
each test decorated with the provided decorator.
"""
test = unittest.TestCase()
suite = unittest.TestSuite([test])
decoratedTest = unittest.decorate(suite, unittest.TestDecorator)
self.assertSuitesEqual(
decoratedTest, unittest.TestSuite([unittest.TestDecorator(test)]))
def test_decorateInPlaceMutatesOriginal(self):
"""
Calling L{decorate} on a test suite will mutate the original suite.
"""
test = unittest.TestCase()
suite = unittest.TestSuite([test])
decoratedTest = unittest.decorate(
suite, unittest.TestDecorator)
self.assertSuitesEqual(
decoratedTest, unittest.TestSuite([unittest.TestDecorator(test)]))
self.assertSuitesEqual(
suite, unittest.TestSuite([unittest.TestDecorator(test)]))
def test_decorateTestSuiteReferences(self):
"""
When decorating a test suite in-place, the number of references to the
test objects in that test suite should stay the same.
Previously, L{unittest.decorate} recreated a test suite, so the
original suite kept references to the test objects. This test is here
to ensure the problem doesn't reappear again.
"""
getrefcount = getattr(sys, 'getrefcount', None)
if getrefcount is None:
raise unittest.SkipTest(
"getrefcount not supported on this platform")
test = unittest.TestCase()
suite = unittest.TestSuite([test])
count1 = getrefcount(test)
decoratedTest = unittest.decorate(suite, unittest.TestDecorator)
count2 = getrefcount(test)
self.assertEquals(count1, count2)
def test_decorateNestedTestSuite(self):
"""
Calling L{decorate} on a test suite with nested suites will return a
test suite that maintains the same structure, but with all tests
decorated.
"""
test = unittest.TestCase()
suite = unittest.TestSuite([unittest.TestSuite([test])])
decoratedTest = unittest.decorate(suite, unittest.TestDecorator)
expected = unittest.TestSuite(
[unittest.TestSuite([unittest.TestDecorator(test)])])
self.assertSuitesEqual(decoratedTest, expected)
def test_decorateDecoratedSuite(self):
"""
Calling L{decorate} on a test suite with already-decorated tests
decorates all of the tests in the suite again.
"""
test = unittest.TestCase()
decoratedTest = unittest.decorate(test, unittest.TestDecorator)
redecoratedTest = unittest.decorate(decoratedTest,
unittest.TestDecorator)
self.assertTestsEqual(redecoratedTest,
unittest.TestDecorator(decoratedTest))
def test_decoratePreservesSuite(self):
"""
Tests can be in non-standard suites. L{decorate} preserves the
non-standard suites when it decorates the tests.
"""
test = unittest.TestCase()
suite = runner.DestructiveTestSuite([test])
decorated = unittest.decorate(suite, unittest.TestDecorator)
self.assertSuitesEqual(
decorated,
runner.DestructiveTestSuite([unittest.TestDecorator(test)]))
class TestMonkeyPatchSupport(unittest.TestCase):
"""
Tests for the patch() helper method in L{unittest.TestCase}.
"""
def setUp(self):
self.originalValue = 'original'
self.patchedValue = 'patched'
self.objectToPatch = self.originalValue
self.test = unittest.TestCase()
def test_patch(self):
"""
Calling C{patch()} on a test monkey patches the specified object and
attribute.
"""
self.test.patch(self, 'objectToPatch', self.patchedValue)
self.assertEqual(self.objectToPatch, self.patchedValue)
def test_patchRestoredAfterRun(self):
"""
Any monkey patches introduced by a test using C{patch()} are reverted
after the test has run.
"""
self.test.patch(self, 'objectToPatch', self.patchedValue)
self.test.run(reporter.Reporter())
self.assertEqual(self.objectToPatch, self.originalValue)
def test_revertDuringTest(self):
"""
C{patch()} return a L{monkey.MonkeyPatcher} object that can be used to
restore the original values before the end of the test.
"""
patch = self.test.patch(self, 'objectToPatch', self.patchedValue)
patch.restore()
self.assertEqual(self.objectToPatch, self.originalValue)
def test_revertAndRepatch(self):
"""
The returned L{monkey.MonkeyPatcher} object can re-apply the patch
during the test run.
"""
patch = self.test.patch(self, 'objectToPatch', self.patchedValue)
patch.restore()
patch.patch()
self.assertEqual(self.objectToPatch, self.patchedValue)
def test_successivePatches(self):
"""
Successive patches are applied and reverted just like a single patch.
"""
self.test.patch(self, 'objectToPatch', self.patchedValue)
self.assertEqual(self.objectToPatch, self.patchedValue)
self.test.patch(self, 'objectToPatch', 'second value')
self.assertEqual(self.objectToPatch, 'second value')
self.test.run(reporter.Reporter())
self.assertEqual(self.objectToPatch, self.originalValue)
class TestIterateTests(unittest.TestCase):
"""
L{_iterateTests} returns a list of all test cases in a test suite or test
case.
"""
def test_iterateTestCase(self):
"""
L{_iterateTests} on a single test case returns a list containing that
test case.
"""
test = unittest.TestCase()
self.assertEqual([test], list(unittest._iterateTests(test)))
def test_iterateSingletonTestSuite(self):
"""
L{_iterateTests} on a test suite that contains a single test case
returns a list containing that test case.
"""
test = unittest.TestCase()
suite = runner.TestSuite([test])
self.assertEqual([test], list(unittest._iterateTests(suite)))
def test_iterateNestedTestSuite(self):
"""
L{_iterateTests} returns tests that are in nested test suites.
"""
test = unittest.TestCase()
suite = runner.TestSuite([runner.TestSuite([test])])
self.assertEqual([test], list(unittest._iterateTests(suite)))
def test_iterateIsLeftToRightDepthFirst(self):
"""
L{_iterateTests} returns tests in left-to-right, depth-first order.
"""
test = unittest.TestCase()
suite = runner.TestSuite([runner.TestSuite([test]), self])
self.assertEqual([test, self], list(unittest._iterateTests(suite)))
| apache-2.0 |
archf/ansible | test/units/modules/network/radware/test_vdirect_file.py | 6 | 8284 | # -*- coding: utf-8 -*-
#
# Copyright 2017 Radware LTD.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
from mock import patch, MagicMock
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
RESP_STATUS = 0
RESP_REASON = 1
RESP_STR = 2
RESP_DATA = 3
NONE_PARAMS = {'vdirect_ip': None, 'vdirect_user': None, 'vdirect_password': None,
'vdirect_wait': None, 'vdirect_secondary_ip': None,
'vdirect_https_port': None, 'vdirect_http_port': None,
'vdirect_timeout': None, 'vdirect_use_ssl': None, 'vdirect_validate_certs': None}
@patch('vdirect_client.rest_client.RestClient')
class RestClient ():
def __init__(self, vdirect_ip=None, vdirect_user=None, vdirect_password=None, wait=None,
secondary_vdirect_ip=None, https_port=None, http_port=None,
timeout=None, https=None, strict_http_results=None,
verify=None):
pass
@patch('vdirect_client.rest_client.Template')
class Template ():
create_from_source_result = None
upload_source_result = None
def __init__(self, client):
self.client = client
@classmethod
def set_create_from_source_result(cls, result):
Template.create_from_source_result = result
@classmethod
def set_upload_source_result(cls, result):
Template.upload_source_result = result
def create_from_source(self, data, name=None, tenant=None, fail_if_invalid=False):
return Template.create_from_source_result
def upload_source(self, data, name=None, tenant=None, fail_if_invalid=False):
return Template.upload_source_result
@patch('vdirect_client.rest_client.WorkflowTemplate')
class WorkflowTemplate ():
create_template_from_archive_result = None
update_archive_result = None
def __init__(self, client):
self.client = client
@classmethod
def set_create_template_from_archive_result(cls, result):
WorkflowTemplate.create_template_from_archive_result = result
@classmethod
def set_update_archive_result(cls, result):
WorkflowTemplate.update_archive_result = result
def create_template_from_archive(self, data, validate=False, fail_if_invalid=False, tenant=None):
return WorkflowTemplate.create_template_from_archive_result
def update_archive(self, data, workflow_template_name):
return WorkflowTemplate.update_archive_result
class TestManager(unittest.TestCase):
def setUp(self):
pass
def test_missing_parameter(self, *args):
module_mock = MagicMock()
with patch.dict('sys.modules', **{
'vdirect_client': module_mock,
'vdirect_client.rest_client': module_mock,
}):
from ansible.modules.network.radware import vdirect_file
try:
params = NONE_PARAMS.copy()
del params['vdirect_ip']
vdirect_file.VdirectFile(params)
self.assertFalse("KeyError was not thrown for missing parameter")
except KeyError:
assert True
def test_wrong_file_extension(self, *args):
module_mock = MagicMock()
with patch.dict('sys.modules', **{
'vdirect_client': module_mock,
'vdirect_client.rest_client': module_mock,
}):
from ansible.modules.network.radware import vdirect_file
module_mock.RESP_STATUS = 0
file = vdirect_file.VdirectFile(NONE_PARAMS)
result = file.upload("file.??")
assert result == vdirect_file.WRONG_EXTENSION_ERROR
def test_missing_file(self, *args):
module_mock = MagicMock()
with patch.dict('sys.modules', **{
'vdirect_client': module_mock,
'vdirect_client.rest_client': module_mock,
}):
from ansible.modules.network.radware import vdirect_file
file = vdirect_file.VdirectFile(NONE_PARAMS)
try:
file.upload("missing_file.vm")
self.assertFalse("IOException was not thrown for missing file")
except IOError:
assert True
def test_template_upload_create_success(self, *args):
module_mock = MagicMock()
with patch.dict('sys.modules', **{
'vdirect_client': module_mock,
'vdirect_client.rest_client': module_mock,
}):
from ansible.modules.network.radware import vdirect_file
vdirect_file.rest_client.RESP_STATUS = 0
vdirect_file.rest_client.Template = Template
Template.set_create_from_source_result([400])
file = vdirect_file.VdirectFile(NONE_PARAMS)
path = os.path.dirname(os.path.abspath(__file__))
result = file.upload(os.path.join(path, "ct.vm"))
self.assertEqual(result, vdirect_file.CONFIGURATION_TEMPLATE_CREATED_SUCCESS,
'Unexpected result received:' + repr(result))
def test_template_upload_update_success(self, *args):
module_mock = MagicMock()
with patch.dict('sys.modules', **{
'vdirect_client': module_mock,
'vdirect_client.rest_client': module_mock,
}):
from ansible.modules.network.radware import vdirect_file
vdirect_file.rest_client.RESP_STATUS = 0
vdirect_file.rest_client.Template = Template
Template.set_create_from_source_result([409])
Template.set_upload_source_result([400])
file = vdirect_file.VdirectFile(NONE_PARAMS)
path = os.path.dirname(os.path.abspath(__file__))
result = file.upload(os.path.join(path, "ct.vm"))
self.assertEqual(result, vdirect_file.CONFIGURATION_TEMPLATE_UPDATED_SUCCESS,
'Unexpected result received:' + repr(result))
def test_workflow_upload_create_success(self, *args):
module_mock = MagicMock()
with patch.dict('sys.modules', **{
'vdirect_client': module_mock,
'vdirect_client.rest_client': module_mock,
}):
from ansible.modules.network.radware import vdirect_file
vdirect_file.rest_client.RESP_STATUS = 0
vdirect_file.rest_client.WorkflowTemplate = WorkflowTemplate
WorkflowTemplate.set_create_template_from_archive_result([400])
file = vdirect_file.VdirectFile(NONE_PARAMS)
path = os.path.dirname(os.path.abspath(__file__))
result = file.upload(os.path.join(path, "wt.zip"))
self.assertEqual(result, vdirect_file.WORKFLOW_TEMPLATE_CREATED_SUCCESS,
'Unexpected result received:' + repr(result))
def test_workflow_upload_update_success(self, *args):
module_mock = MagicMock()
with patch.dict('sys.modules', **{
'vdirect_client': module_mock,
'vdirect_client.rest_client': module_mock,
}):
from ansible.modules.network.radware import vdirect_file
vdirect_file.rest_client.RESP_STATUS = 0
vdirect_file.rest_client.WorkflowTemplate = WorkflowTemplate
WorkflowTemplate.set_create_template_from_archive_result([409])
WorkflowTemplate.set_update_archive_result([400])
file = vdirect_file.VdirectFile(NONE_PARAMS)
path = os.path.dirname(os.path.abspath(__file__))
result = file.upload(os.path.join(path, "wt.zip"))
self.assertEqual(result, vdirect_file.WORKFLOW_TEMPLATE_UPDATED_SUCCESS,
'Unexpected result received:' + repr(result))
| gpl-3.0 |
bmihelac/django-cookie-consent | cookie_consent/templatetags/cookie_consent_tags.py | 1 | 4045 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import template
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
from cookie_consent.util import (
get_accepted_cookies,
get_cookie_string,
get_cookie_value_from_request,
get_cookie_dict_from_request,
are_all_cookies_accepted,
get_not_accepted_or_declined_cookie_groups,
is_cookie_consent_enabled,
)
from cookie_consent.conf import settings
register = template.Library()
@register.filter
def cookie_group_accepted(request, arg):
"""
Filter returns if cookie group is accepted.
Examples:
::
{{ request|cookie_group_accepted:"analytics" }}
{{ request|cookie_group_accepted:"analytics=*:.google.com" }}
"""
value = get_cookie_value_from_request(request, *arg.split("="))
return value is True
@register.filter
def cookie_group_declined(request, arg):
"""
Filter returns if cookie group is declined.
"""
value = get_cookie_value_from_request(request, *arg.split("="))
return value is False
@register.filter
def all_cookies_accepted(request):
"""
Filter returns if all cookies are accepted.
"""
return are_all_cookies_accepted(request)
@register.simple_tag
def not_accepted_or_declined_cookie_groups(request):
"""
Assignement tag returns cookie groups that does not yet given consent
or decline.
"""
return get_not_accepted_or_declined_cookie_groups(request)
@register.filter
def cookie_consent_enabled(request):
"""
Filter returns if cookie consent enabled for this request.
"""
return is_cookie_consent_enabled(request)
@register.simple_tag
def cookie_consent_accept_url(cookie_groups):
"""
Assignement tag returns url for accepting given concept groups.
"""
varnames = ",".join([g.varname for g in cookie_groups])
url = reverse("cookie_consent_accept", kwargs={"varname": varnames})
return url
@register.simple_tag
def cookie_consent_decline_url(cookie_groups):
"""
Assignement tag returns url for declining given concept groups.
"""
varnames = ",".join([g.varname for g in cookie_groups])
url = reverse("cookie_consent_decline", kwargs={"varname": varnames})
return url
@register.simple_tag
def get_accept_cookie_groups_cookie_string(request, cookie_groups):
"""
Tag returns accept cookie string suitable to use in javascript.
"""
cookie_dic = get_cookie_dict_from_request(request)
for cookie_group in cookie_groups:
cookie_dic[cookie_group.varname] = cookie_group.get_version()
return get_cookie_string(cookie_dic)
@register.simple_tag
def get_decline_cookie_groups_cookie_string(request, cookie_groups):
"""
Tag returns decline cookie string suitable to use in javascript.
"""
cookie_dic = get_cookie_dict_from_request(request)
for cookie_group in cookie_groups:
cookie_dic[cookie_group.varname] = settings.COOKIE_CONSENT_DECLINE
return get_cookie_string(cookie_dic)
@register.simple_tag
def js_type_for_cookie_consent(request, varname, cookie=None):
"""
Tag returns "x/cookie_consent" when processing javascript
will create an cookie and consent does not exists yet.
Example::
<script type="{% js_type_for_cookie_consent request "social" %}"
data-varname="social">
alert("Social cookie accepted");
</script>
"""
enabled = is_cookie_consent_enabled(request)
if not enabled:
res = True
else:
value = get_cookie_value_from_request(request, varname, cookie)
if value is None:
res = settings.COOKIE_CONSENT_OPT_OUT
else:
res = value
return "text/javascript" if res else "x/cookie_consent"
@register.filter
def accepted_cookies(request):
"""
Filter returns accepted cookies varnames.
::
{{ request|accepted_cookies }}
"""
return [c.varname for c in get_accepted_cookies(request)]
| bsd-2-clause |
gangadhar-kadam/mic-erpnext | patches/february_2013/p09_timesheets.py | 5 | 1776 | import webnotes
def execute():
# convert timesheet details to time logs
webnotes.reload_doc("projects", "doctype", "time_log")
# copy custom fields
custom_map = {"Timesheet":[], "Timesheet Detail":[]}
for custom_field in webnotes.conn.sql("""select * from `tabCustom Field` where
dt in ('Timesheet', 'Timesheet Detail')""", as_dict=True):
custom_map[custom_field.dt].append(custom_field.fieldname)
custom_field.doctype = "Custom Field"
custom_field.dt = "Time Log"
custom_field.insert_after = None
try:
cf = webnotes.bean(custom_field).insert()
except Exception, e:
# duplicate custom field
pass
for name in webnotes.conn.sql_list("""select name from tabTimesheet"""):
ts = webnotes.bean("Timesheet", name)
for tsd in ts.doclist.get({"doctype":"Timesheet Detail"}):
if not webnotes.conn.exists("Project", tsd.project_name):
tsd.project_name = None
if not webnotes.conn.exists("Task", tsd.task_id):
tsd.task_id = None
tl = webnotes.bean({
"doctype": "Time Log",
"status": "Draft",
"from_time": ts.doc.timesheet_date + " " + tsd.act_start_time,
"to_time": ts.doc.timesheet_date + " " + tsd.act_end_time,
"activity_type": tsd.activity_type,
"task": tsd.task_id,
"project": tsd.project_name,
"note": ts.doc.notes,
"file_list": ts.doc.file_list,
"_user_tags": ts.doc._user_tags,
"owner": ts.doc.owner,
"creation": ts.doc.creation,
"modified_by": ts.doc.modified_by
})
for key in custom_map["Timesheet"]:
tl.doc.fields[key] = ts.doc.fields.get(key)
for key in custom_map["Timesheet Detail"]:
tl.doc.fields[key] = tsd.fields.get(key)
tl.make_obj()
tl.controller.set_status()
tl.controller.calculate_total_hours()
tl.doc.insert()
| agpl-3.0 |
mousepawgames/diamondquest | src/diamondquest/common/constants.py | 1 | 2259 | """
Contants [DiamondQuest]
Contants used elsewhere in the code.
"""
# LICENSE (BSD-3-Clause)
# Copyright (c) 2020 MousePaw Media.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
#
# CONTRIBUTING
# See https://www.mousepawmedia.com/developers for information
# on how to contribute to our projects.
# Framerate
FPS = 16
# Key repeat rate in milliseconds
KEY_REPEAT_RATE = 250
# The number of pixels in a single "block" in the source textures.
TEXTURE_RES = 16
TEXTURE_RES_SPRITE = 20 # TODO issue with sprite sheet? this should probably be the same as TEXTURE_RES?
# The number of blocks from top to bottom of screen
BLOCK_COUNT = 8
# Maximum number of blocks in a single map view
MAP_DEPTH = BLOCK_COUNT * 8
# The maximum power level
MAX_POWER_LEVEL = 8
# The game title
TITLE = "DiamondQuest 1.0"
| gpl-3.0 |
mrquim/repository.mrquim | plugin.program.indigo/maintool.py | 4 | 18344 | import datetime
import os
import shutil
import sys
import xbmc
import xbmcgui
from libs import kodi
from libs import viewsetter
addon_id = kodi.addon_id
addon = (addon_id, sys.argv)
AddonName = kodi.addon.getAddonInfo('name') + " for Kodi"
artwork = xbmc.translatePath(os.path.join('special://home', 'addons', addon_id, 'art/'))
fanart = artwork + 'fanart.jpg'
messages = xbmc.translatePath(os.path.join('special://home', 'addons', addon_id, 'resources', 'messages/'))
execute = xbmc.executebuiltin
dp = xbmcgui.DialogProgress()
dialog = xbmcgui.Dialog()
userdata_path = xbmc.translatePath('special://userdata/')
database_path = xbmc.translatePath('special://userdata/Database')
addon_data = xbmc.translatePath('special://userdata/addon_data')
thumbnail_path = xbmc.translatePath('special://userdata/Thumbnails')
cache_path = os.path.join(xbmc.translatePath('special://home'), 'cache')
temp_path = os.path.join(xbmc.translatePath('special://home'), 'temp')
addons_path = os.path.join(xbmc.translatePath('special://home'), 'addons')
packages_path = os.path.join(xbmc.translatePath('special://home/addons'), 'packages')
def tool_menu():
menu_cache_path = cache_path
if not os.path.exists(temp_path) and not os.path.exists(cache_path):
os.makedirs(temp_path)
if os.path.exists(temp_path):
menu_cache_path = temp_path
if not os.path.exists(packages_path):
os.makedirs(packages_path)
cache_size = ''
thumb_size = ''
packages_size = ''
paths = {menu_cache_path: cache_size, thumbnail_path: thumb_size, packages_path: packages_size}
if kodi.get_setting("maint_check_folders") == "true":
for path in paths:
try:
paths[path] = ' - [COLOR blue]' + convert_size(get_size(path)) + '[/COLOR]'
except:
paths[path] = ' - [COLOR red]Error reading thumbnails[/COLOR]'
startup_clean = kodi.get_setting("acstartup")
if startup_clean == "false":
startup_onoff = "Enable"
su_art = 'enable_am_startup.png'
else:
startup_onoff = "Disable"
su_art = 'disable_am_startup.png'
su_desc = startup_onoff + " maintenance on Kodi launch!"
weekly_clean = kodi.get_setting("clearday")
if weekly_clean == "7":
weekly_onoff = "Enable"
acw_art = 'enable_am_week.png'
acw_desc = "Set your device to perform maintenance on a given day each week!"
else:
weekly_onoff = "Disable"
acw_art = 'disable_am_week.png'
acw_desc = weekly_onoff + " weekly maintenance on Kodi launch!"
if kodi.get_setting('scriptblock') == 'false':
scb_onoff = 'Enable'
scb_mode = 'toggleblocker'
scb_art = 'enable_MSB.png'
else:
scb_onoff = 'Disable'
scb_mode = 'toggleblocker'
scb_art = 'enable_MSB.png'
scb_desc = scb_onoff + " protection against malicious scripts!"
if not _is_debugging():
debug_onoff = 'Enable'
debug_art = 'enabledebug.png'
else:
debug_onoff = 'Disable'
debug_art = 'disabledebug.png'
debug_desc = debug_onoff + " Debugging!"
# Maintenance Tool Menu
kodi.addItem("Clear Cache " + str(paths[menu_cache_path]), '', 'clear_cache', artwork + 'currentcache.png',
description="Clear your device cache!")
kodi.addItem("Delete Thumbnails " + str(paths[thumbnail_path]), '', 'clear_thumbs', artwork + 'currentthumbs.png',
description="Delete your Thumbnail cache!")
kodi.addItem("Delete Packages " + str(paths[packages_path]), '', 'purge_packages', artwork + 'currentpackages.png',
description="Delete your addon installation files!")
kodi.addItem("Delete Crash Logs", '', 'crashlogs', artwork + 'clearcrash.png',
description="Clear all crash logs from your device!")
kodi.addItem("Delete Textures13.db", '', 'deletetextures', artwork + 'currentthumbs.png',
description="This will delete the Textures13 database")
kodi.addDir("Wipe Addons", '', 'wipe_addons', artwork + 'wipe_addons.png',
description="Erase all your Kodi addons in one shot!")
kodi.addItem("Run Auto Maintenance", '', 'autoclean', artwork + 'run_am.png',
description="Clear your cache, thumbnails and delete addon packages in one click!")
kodi.addItem(startup_onoff + ' Auto Maintenance on Startup', '', 'autocleanstartup', artwork + su_art,
description=su_desc)
kodi.addItem(weekly_onoff + ' Weekly Auto Maintenance', '', 'autocleanweekly', artwork + acw_art,
description=acw_desc)
kodi.addItem(debug_onoff + " Debugging Mode", '', 'debug_onoff', artwork + debug_art,
description=debug_desc)
kodi.addItem(scb_onoff + " Malicious Scripts Blocker", '', 'toggleblocker', artwork + scb_art,
description=scb_desc)
kodi.addItem("Force Update Addons", '', 'updateaddons', artwork + 'forceupdateaddons.png',
description="Force a reload of all Kodi addons and repositories!")
kodi.addDir("Install Custom Keymaps", '', 'customkeys', artwork + 'custom_keymaps.png',
description="Get the best experience out of your device-specific remote control!")
kodi.addItem("Reload Current Skin", '', 'reloadskin', artwork + 'reloadskin.png',
description="Reload the skin!")
viewsetter.set_view("sets")
def delete_cache(auto_clear=False):
if not auto_clear:
if not xbmcgui.Dialog().yesno("Please Confirm",
" Please confirm that you wish to clear",
" your Kodi application cache!",
" ", "Cancel", "Clear"):
return
cache_paths = [cache_path, temp_path]
if xbmc.getCondVisibility('system.platform.ATV2'):
cache_paths.extend([os.path.join('/private/var/mobile/Library/Caches/AppleTV/Video/', 'Other'),
os.path.join('/private/var/mobile/Library/Caches/AppleTV/Video/', 'LocalAndRental')])
file_types = ['log', 'db', 'dat', 'socket']
# if kodi.get_setting('acdb') == 'true':
# file_types.remove('db')
directories = ('temp', 'archive_cache')
for directory in cache_paths:
if os.path.exists(directory):
for root, dirs, files in os.walk(directory):
for f in files:
try:
if f.split('.')[1] not in file_types:
os.unlink(os.path.join(root, f))
except OSError:
pass
for d in dirs:
try:
if d not in directories:
shutil.rmtree(os.path.join(root, d))
except OSError:
pass
if not auto_clear:
xbmcgui.Dialog().ok(AddonName, "Done Clearing Cache files")
xbmc.executebuiltin("Container.Refresh")
def delete_thumbnails(auto_clear=False):
if not auto_clear:
if not xbmcgui.Dialog().yesno("Delete Thumbnails", "This option deletes all thumbnails",
"Are you sure you want to do this?"):
return
status = 'have been'
if os.path.exists(thumbnail_path):
file_types = ('db', 'dat', 'socket')
for root, dirs, files in os.walk(thumbnail_path):
for f in files:
if f.split('.')[1] not in file_types:
try:
os.unlink(os.path.join(root, f))
except OSError:
status = 'could not all be'
if not auto_clear:
xbmcgui.Dialog().ok(AddonName, 'Thumbnails %s deleted.' % status)
xbmc.executebuiltin("Container.Refresh")
def delete_packages(auto_clear=False):
if not auto_clear:
if not xbmcgui.Dialog().yesno('Delete Packages', "Delete Package Cache Files?"):
return
for root, dirs, files in os.walk(xbmc.translatePath('special://home/addons/packages')):
try:
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
except OSError:
pass
if not auto_clear:
xbmcgui.Dialog().ok(AddonName, "Deleting Packages all done")
xbmc.executebuiltin("Container.Refresh")
def delete_crash_logs(auto_clear=False):
if not auto_clear:
if not xbmcgui.Dialog().yesno(AddonName, 'Delete Crash Logs', "Do you want to delete old crash logs?"):
return
cache_directories = (xbmc.translatePath('special://home'),
os.path.join(xbmc.translatePath('special://home'), 'cache'),
xbmc.translatePath('special://temp'))
for cache_directory in cache_directories:
if os.path.exists(cache_directory):
file_types = ('*.dmp', '*.txt')
import glob
for file_type in file_types:
for infile in glob.glob(cache_directory + file_type):
os.remove(infile)
if not auto_clear:
xbmcgui.Dialog().ok(AddonName, "Crash logs deleted")
def delete_textures():
if not xbmcgui.Dialog().yesno(AddonName, 'Delete Textures13 Database', "Do you want to delete the Database?"):
return
status = "has been"
try:
os.unlink(os.path.join(database_path, "Textures13.db"))
except OSError:
status = 'could not be'
xbmcgui.Dialog().ok(AddonName, 'Textures13.db %s deleted.' % status,
'Kodi will now shut down for database to rebuild')
xbmc.executebuiltin('ShutDown')
def wipe_addons():
# kodi.logInfo('WIPE ADDONS ACTIVATED')
if xbmcgui.Dialog().yesno("Please Confirm",
" Please confirm that you wish to uninstall",
" all addons from your device!",
" ", nolabel='Cancel', yeslabel='Uninstall'):
try:
for root, dirs, files in os.walk(addons_path, topdown=False):
if root != addons_path:
if addon_id not in root:
if 'metadata.album.universal' not in root:
if 'metadata.artists.universal' not in root:
if 'service.xbmc.versioncheck' not in root:
if 'metadata.common.musicbrainz.org' not in root:
shutil.rmtree(root)
xbmcgui.Dialog().ok(AddonName, "Addons Wiped Successfully!",
"Click OK to exit Kodi and then restart to complete.")
xbmc.executebuiltin('ShutDown')
except:
xbmcgui.Dialog().ok(AddonName, "Error Wiping Addons please visit TVADDONS.CO forums")
def debug_toggle():
xbmc.executebuiltin("ToggleDebug")
xbmc.executebuiltin("Container.Refresh")
xbmcgui.Dialog().notification('Debugging', 'Setting Changed!', sound=False)
def toggle_setting(setting_title, setting, restart=False, silent=False):
# kodi.log('TOGGLE SETTING')
if not silent:
if kodi.get_setting(setting) == "true":
status_on_off = 'OFF'
else:
status_on_off = 'ON'
if not xbmcgui.Dialog().yesno(setting_title,
'Please confirm that you wish to TURN %s %s' % (status_on_off, setting_title),
'', '', 'Cancel', 'Confirm'):
return
if kodi.get_setting(setting) == 'true':
kodi.set_setting(setting, 'false')
else:
kodi.set_setting(setting, 'true')
kodi.log('Toggled setting for ' + setting_title)
if not silent and not restart:
xbmcgui.Dialog().notification('', 'Setting Changed!', sound=False)
xbmc.executebuiltin("Container.Refresh")
if restart:
xbmcgui.Dialog().notification('', 'Kodi is shutting down for changes to take effect', sound=False)
xbmc.executebuiltin('ShutDown')
def auto_weekly_clean_on_off():
if kodi.get_setting("clearday") == '7':
if xbmcgui.Dialog().yesno(AddonName, 'Please confirm that you wish to enable weekly automated maintenance.'):
kodi.set_setting("clearday", datetime.datetime.today().weekday())
kodi.openSettings(addon_id, id1=5, id2=3)
available_space, total_space = get_free_space_mb(xbmc.translatePath('special://home'))
mb_settings = (0, 25, 50, 75, 100)
while True:
allotted_space = 0
for value in ('cachemb', 'thumbsmb', 'packagesmb'):
allotted_space += mb_settings[int(kodi.get_setting(value))] * 10 ** 6
if (allotted_space >= available_space) and not kodi.get_setting("automb"):
xbmcgui.Dialog().ok("Your settings sizes for Kodi to use are larger than the available drive space",
'Please try lower settings, uninstall uneeded apps and addons,',
'or set kodi size to "Auto" to use the automated settings based on free space')
kodi.openSettings(addon_id, id1=5, id2=3)
else:
break
else:
if xbmcgui.Dialog().yesno(AddonName, 'Please confirm that you wish to disable weekly automated maintenance.'):
kodi.set_setting("clearday", '7')
xbmc.executebuiltin("Container.Refresh")
def auto_clean(auto_clear=False):
if not auto_clear:
if not xbmcgui.Dialog().yesno(AddonName, 'Selecting Yes runs maintenance based on your settings.',
'Do you wish to continue?', yeslabel='Yes', nolabel='No'):
return
available_space, total_space = get_free_space_mb(xbmc.translatePath('special://home'))
mb_settings = (0, 25, 50, 75, 100)
for value in ('cachemb', 'thumbsmb', 'packagesmb'):
available_space += mb_settings[int(kodi.get_setting(value))] * 10 ** 6
automb = kodi.get_setting("automb")
cachemb = float((mb_settings[int(kodi.get_setting("cachemb"))]) * 10 ** 6) # 35%
for path in (cache_path, temp_path):
if os.path.exists(path):
try:
if (automb and (cachemb >= float(available_space) * .35)) or \
((cachemb == 0 and kodi.get_setting("accache") == 'true')
or (cachemb != 0 and (get_size(cache_path) >= int(cachemb)))):
delete_cache(auto_clear=True)
except:
pass
thumbsmb = float((mb_settings[int(kodi.get_setting("thumbsmb"))]) * 10 ** 6) # 35%
try:
if (automb and (thumbsmb >= int(available_space) * .35)) or \
((thumbsmb == 0 and kodi.get_setting("acthumbs") == 'true')
or (thumbsmb != 0 and (get_size(thumbnail_path) >= int(thumbsmb)))):
delete_thumbnails(auto_clear=True)
except:
pass
packagesmb = float((mb_settings[int(kodi.get_setting("packagesmb"))]) * 10 ** 6) # 10%
try:
if (automb and (packagesmb >= int(available_space) * .10)) or \
((packagesmb == 0 and kodi.get_setting("acpackages") == 'true')
or (packagesmb != 0 and (get_size(packages_path) >= int(packagesmb)))):
delete_packages(auto_clear=True)
except:
pass
if kodi.get_setting("accrash") == 'true':
delete_crash_logs(auto_clear=True)
if not auto_clear:
xbmc.executebuiltin("Container.Refresh")
xbmcgui.Dialog().ok(AddonName, 'Auto Maintenance has been run successfully')
def get_free_space_mb(dirname):
import ctypes
# import platform
# if platform.system() == 'Windows':
if xbmc.getCondVisibility('system.platform.windows'):
free_bytes = ctypes.c_ulonglong(0)
total_bytes = ctypes.c_int64()
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(dirname), None, ctypes.pointer(total_bytes),
ctypes.pointer(free_bytes))
return free_bytes.value, total_bytes.value
else:
st = os.statvfs(dirname)
return st.f_bavail * st.f_frsize, st.f_frsize * st.f_blocks
def _is_debugging():
command = {'jsonrpc': '2.0', 'id': 1, 'method': 'Settings.getSettings',
'params': {'filter': {'section': 'system', 'category': 'logging'}}}
js_data = kodi.execute_jsonrpc(command)
for item in js_data.get('result', {}).get('settings', {}):
if item['id'] == 'debug.showloginfo':
return item['value']
return False
def convert_size(size):
import math
if size == 0:
return '0 B'
labels = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size, 1000)))
s = round(size/math.pow(1000, i), 2)
return '%s %s' % (s, labels[i])
def get_size(start_path):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
total_size += os.path.getsize(os.path.join(dirpath, f))
return total_size
def source_change():
new_source = userdata_path + "/sources.xml"
try:
with open(new_source) as fi:
a = fi.read()
if 'fusion.tvaddons.ag' in a:
b = a.replace('http://www.fusion.tvaddons.ag', 'http://fusion.tvaddons.co')
elif 'https://code.sourcecode.ag' in a:
b = a.replace('https://code.sourcecode.ag', 'http://fusion.tvaddons.co')
else:
return
with open(new_source, "w") as fil:
fil.write(str(b))
except:
pass
def feed_change():
new_feed = userdata_path + "/RssFeeds.xml"
try:
with open(new_feed) as fi:
a = fi.read()
if 'TVADDONS' in a:
b = a.replace('TVADDONS', 'TVADDONSCO')
else:
return
with open(new_feed, "w") as fil:
fil.write(str(b))
except:
pass
| gpl-2.0 |
dsgouda/autorest | src/generator/AutoRest.Python.Tests/AcceptanceTests/integer_tests.py | 8 | 3064 | # --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import unittest
import subprocess
import sys
import isodate
import tempfile
import json
from datetime import date, datetime, timedelta, tzinfo
import os
from os.path import dirname, pardir, join, realpath
cwd = dirname(realpath(__file__))
log_level = int(os.environ.get('PythonLogLevel', 30))
tests = realpath(join(cwd, pardir, "Expected", "AcceptanceTests"))
sys.path.append(join(tests, "BodyInteger"))
from msrest.serialization import Deserializer
from msrest.exceptions import DeserializationError
from auto_rest_integer_test_service import AutoRestIntegerTestService
class IntegerTests(unittest.TestCase):
def test_integer(self):
client = AutoRestIntegerTestService(base_url="http://localhost:3000")
client.int_model.put_max32(2147483647) # sys.maxint
client.int_model.put_min32(-2147483648)
client.int_model.put_max64(9223372036854776000) # sys.maxsize
client.int_model.put_min64(-9223372036854776000)
client.int_model.get_null()
with self.assertRaises(DeserializationError):
client.int_model.get_invalid()
# These wont fail in Python
#client.int_model.get_overflow_int32()
#client.int_model.get_overflow_int64()
#client.int_model.get_underflow_int32()
#client.int_model.get_underflow_int64()
unix_date = datetime(year=2016, month=4, day=13)
client.int_model.put_unix_time_date(unix_date)
self.assertEqual(unix_date.utctimetuple(), client.int_model.get_unix_time().utctimetuple())
self.assertIsNone(client.int_model.get_null_unix_time())
with self.assertRaises(DeserializationError):
client.int_model.get_invalid_unix_time()
if __name__ == '__main__':
unittest.main()
| mit |
Krossom/python-for-android | python3-alpha/python3-src/Lib/email/test/test_email_codecs.py | 50 | 3394 | # Copyright (C) 2002-2006 Python Software Foundation
# Contact: email-sig@python.org
# email package unit tests for (optional) Asian codecs
import unittest
from test.support import run_unittest
from email.test.test_email import TestEmailBase
from email.charset import Charset
from email.header import Header, decode_header
from email.message import Message
# We're compatible with Python 2.3, but it doesn't have the built-in Asian
# codecs, so we have to skip all these tests.
try:
str(b'foo', 'euc-jp')
except LookupError:
raise unittest.SkipTest
class TestEmailAsianCodecs(TestEmailBase):
def test_japanese_codecs(self):
eq = self.ndiffAssertEqual
jcode = "euc-jp"
gcode = "iso-8859-1"
j = Charset(jcode)
g = Charset(gcode)
h = Header("Hello World!")
jhello = str(b'\xa5\xcf\xa5\xed\xa1\xbc\xa5\xef\xa1\xbc'
b'\xa5\xeb\xa5\xc9\xa1\xaa', jcode)
ghello = str(b'Gr\xfc\xdf Gott!', gcode)
h.append(jhello, j)
h.append(ghello, g)
# BAW: This used to -- and maybe should -- fold the two iso-8859-1
# chunks into a single encoded word. However it doesn't violate the
# standard to have them as two encoded chunks and maybe it's
# reasonable <wink> for each .append() call to result in a separate
# encoded word.
eq(h.encode(), """\
Hello World! =?iso-2022-jp?b?GyRCJU8lbSE8JW8hPCVrJUkhKhsoQg==?=
=?iso-8859-1?q?Gr=FC=DF_Gott!?=""")
eq(decode_header(h.encode()),
[(b'Hello World!', None),
(b'\x1b$B%O%m!<%o!<%k%I!*\x1b(B', 'iso-2022-jp'),
(b'Gr\xfc\xdf Gott!', gcode)])
subject_bytes = (b'test-ja \xa4\xd8\xc5\xea\xb9\xc6\xa4\xb5'
b'\xa4\xec\xa4\xbf\xa5\xe1\xa1\xbc\xa5\xeb\xa4\xcf\xbb\xca\xb2'
b'\xf1\xbc\xd4\xa4\xce\xbe\xb5\xc7\xa7\xa4\xf2\xc2\xd4\xa4\xc3'
b'\xa4\xc6\xa4\xa4\xa4\xde\xa4\xb9')
subject = str(subject_bytes, jcode)
h = Header(subject, j, header_name="Subject")
# test a very long header
enc = h.encode()
# TK: splitting point may differ by codec design and/or Header encoding
eq(enc , """\
=?iso-2022-jp?b?dGVzdC1qYSAbJEIkWEVqOUYkNSRsJD8lYSE8JWskTztKGyhC?=
=?iso-2022-jp?b?GyRCMnE8VCROPjVHJyRyQlQkQyRGJCQkXiQ5GyhC?=""")
# TK: full decode comparison
eq(str(h).encode(jcode), subject_bytes)
def test_payload_encoding_utf8(self):
jhello = str(b'\xa5\xcf\xa5\xed\xa1\xbc\xa5\xef\xa1\xbc'
b'\xa5\xeb\xa5\xc9\xa1\xaa', 'euc-jp')
msg = Message()
msg.set_payload(jhello, 'utf-8')
ustr = msg.get_payload(decode=True).decode(msg.get_content_charset())
self.assertEqual(jhello, ustr)
def test_payload_encoding(self):
jcode = 'euc-jp'
jhello = str(b'\xa5\xcf\xa5\xed\xa1\xbc\xa5\xef\xa1\xbc'
b'\xa5\xeb\xa5\xc9\xa1\xaa', jcode)
msg = Message()
msg.set_payload(jhello, jcode)
ustr = msg.get_payload(decode=True).decode(msg.get_content_charset())
self.assertEqual(jhello, ustr)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestEmailAsianCodecs))
return suite
def test_main():
run_unittest(TestEmailAsianCodecs)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| apache-2.0 |
PokemonGoF/PokemonGo-Bot-Desktop | build/pywin/Lib/rfc822.py | 30 | 33542 | """RFC 2822 message manipulation.
Note: This is only a very rough sketch of a full RFC-822 parser; in particular
the tokenizing of addresses does not adhere to all the quoting rules.
Note: RFC 2822 is a long awaited update to RFC 822. This module should
conform to RFC 2822, and is thus mis-named (it's not worth renaming it). Some
effort at RFC 2822 updates have been made, but a thorough audit has not been
performed. Consider any RFC 2822 non-conformance to be a bug.
RFC 2822: http://www.faqs.org/rfcs/rfc2822.html
RFC 822 : http://www.faqs.org/rfcs/rfc822.html (obsolete)
Directions for use:
To create a Message object: first open a file, e.g.:
fp = open(file, 'r')
You can use any other legal way of getting an open file object, e.g. use
sys.stdin or call os.popen(). Then pass the open file object to the Message()
constructor:
m = Message(fp)
This class can work with any input object that supports a readline method. If
the input object has seek and tell capability, the rewindbody method will
work; also illegal lines will be pushed back onto the input stream. If the
input object lacks seek but has an `unread' method that can push back a line
of input, Message will use that to push back illegal lines. Thus this class
can be used to parse messages coming from a buffered stream.
The optional `seekable' argument is provided as a workaround for certain stdio
libraries in which tell() discards buffered data before discovering that the
lseek() system call doesn't work. For maximum portability, you should set the
seekable argument to zero to prevent that initial \code{tell} when passing in
an unseekable object such as a file object created from a socket object. If
it is 1 on entry -- which it is by default -- the tell() method of the open
file object is called once; if this raises an exception, seekable is reset to
0. For other nonzero values of seekable, this test is not made.
To get the text of a particular header there are several methods:
str = m.getheader(name)
str = m.getrawheader(name)
where name is the name of the header, e.g. 'Subject'. The difference is that
getheader() strips the leading and trailing whitespace, while getrawheader()
doesn't. Both functions retain embedded whitespace (including newlines)
exactly as they are specified in the header, and leave the case of the text
unchanged.
For addresses and address lists there are functions
realname, mailaddress = m.getaddr(name)
list = m.getaddrlist(name)
where the latter returns a list of (realname, mailaddr) tuples.
There is also a method
time = m.getdate(name)
which parses a Date-like field and returns a time-compatible tuple,
i.e. a tuple such as returned by time.localtime() or accepted by
time.mktime().
See the class definition for lower level access methods.
There are also some utility functions here.
"""
# Cleanup and extensions by Eric S. Raymond <esr@thyrsus.com>
import time
from warnings import warnpy3k
warnpy3k("in 3.x, rfc822 has been removed in favor of the email package",
stacklevel=2)
__all__ = ["Message","AddressList","parsedate","parsedate_tz","mktime_tz"]
_blanklines = ('\r\n', '\n') # Optimization for islast()
class Message:
"""Represents a single RFC 2822-compliant message."""
def __init__(self, fp, seekable = 1):
"""Initialize the class instance and read the headers."""
if seekable == 1:
# Exercise tell() to make sure it works
# (and then assume seek() works, too)
try:
fp.tell()
except (AttributeError, IOError):
seekable = 0
self.fp = fp
self.seekable = seekable
self.startofheaders = None
self.startofbody = None
#
if self.seekable:
try:
self.startofheaders = self.fp.tell()
except IOError:
self.seekable = 0
#
self.readheaders()
#
if self.seekable:
try:
self.startofbody = self.fp.tell()
except IOError:
self.seekable = 0
def rewindbody(self):
"""Rewind the file to the start of the body (if seekable)."""
if not self.seekable:
raise IOError, "unseekable file"
self.fp.seek(self.startofbody)
def readheaders(self):
"""Read header lines.
Read header lines up to the entirely blank line that terminates them.
The (normally blank) line that ends the headers is skipped, but not
included in the returned list. If a non-header line ends the headers,
(which is an error), an attempt is made to backspace over it; it is
never included in the returned list.
The variable self.status is set to the empty string if all went well,
otherwise it is an error message. The variable self.headers is a
completely uninterpreted list of lines contained in the header (so
printing them will reproduce the header exactly as it appears in the
file).
"""
self.dict = {}
self.unixfrom = ''
self.headers = lst = []
self.status = ''
headerseen = ""
firstline = 1
startofline = unread = tell = None
if hasattr(self.fp, 'unread'):
unread = self.fp.unread
elif self.seekable:
tell = self.fp.tell
while 1:
if tell:
try:
startofline = tell()
except IOError:
startofline = tell = None
self.seekable = 0
line = self.fp.readline()
if not line:
self.status = 'EOF in headers'
break
# Skip unix From name time lines
if firstline and line.startswith('From '):
self.unixfrom = self.unixfrom + line
continue
firstline = 0
if headerseen and line[0] in ' \t':
# It's a continuation line.
lst.append(line)
x = (self.dict[headerseen] + "\n " + line.strip())
self.dict[headerseen] = x.strip()
continue
elif self.iscomment(line):
# It's a comment. Ignore it.
continue
elif self.islast(line):
# Note! No pushback here! The delimiter line gets eaten.
break
headerseen = self.isheader(line)
if headerseen:
# It's a legal header line, save it.
lst.append(line)
self.dict[headerseen] = line[len(headerseen)+1:].strip()
continue
elif headerseen is not None:
# An empty header name. These aren't allowed in HTTP, but it's
# probably a benign mistake. Don't add the header, just keep
# going.
continue
else:
# It's not a header line; throw it back and stop here.
if not self.dict:
self.status = 'No headers'
else:
self.status = 'Non-header line where header expected'
# Try to undo the read.
if unread:
unread(line)
elif tell:
self.fp.seek(startofline)
else:
self.status = self.status + '; bad seek'
break
def isheader(self, line):
"""Determine whether a given line is a legal header.
This method should return the header name, suitably canonicalized.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats with special header formats.
"""
i = line.find(':')
if i > -1:
return line[:i].lower()
return None
def islast(self, line):
"""Determine whether a line is a legal end of RFC 2822 headers.
You may override this method if your application wants to bend the
rules, e.g. to strip trailing whitespace, or to recognize MH template
separators ('--------'). For convenience (e.g. for code reading from
sockets) a line consisting of \\r\\n also matches.
"""
return line in _blanklines
def iscomment(self, line):
"""Determine whether a line should be skipped entirely.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats that support embedded comments or
free-text data.
"""
return False
def getallmatchingheaders(self, name):
"""Find all header lines matching a given header name.
Look through the list of headers and find all lines matching a given
header name (and their continuation lines). A list of the lines is
returned, without interpretation. If the header does not occur, an
empty list is returned. If the header occurs multiple times, all
occurrences are returned. Case is not important in the header name.
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(line)
return lst
def getfirstmatchingheader(self, name):
"""Get the first header line matching name.
This is similar to getallmatchingheaders, but it returns only the
first matching header (and its continuation lines).
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if hit:
if not line[:1].isspace():
break
elif line[:n].lower() == name:
hit = 1
if hit:
lst.append(line)
return lst
def getrawheader(self, name):
"""A higher-level interface to getfirstmatchingheader().
Return a string containing the literal text of the header but with the
keyword stripped. All leading, trailing and embedded whitespace is
kept in the string, however. Return None if the header does not
occur.
"""
lst = self.getfirstmatchingheader(name)
if not lst:
return None
lst[0] = lst[0][len(name) + 1:]
return ''.join(lst)
def getheader(self, name, default=None):
"""Get the header value for a name.
This is the normal interface: it returns a stripped version of the
header value for a given header name, or None if it doesn't exist.
This uses the dictionary version which finds the *last* such header.
"""
return self.dict.get(name.lower(), default)
get = getheader
def getheaders(self, name):
"""Get all values for a header.
This returns a list of values for headers given more than once; each
value in the result list is stripped in the same way as the result of
getheader(). If the header is not given, return an empty list.
"""
result = []
current = ''
have_header = 0
for s in self.getallmatchingheaders(name):
if s[0].isspace():
if current:
current = "%s\n %s" % (current, s.strip())
else:
current = s.strip()
else:
if have_header:
result.append(current)
current = s[s.find(":") + 1:].strip()
have_header = 1
if have_header:
result.append(current)
return result
def getaddr(self, name):
"""Get a single address from a header, as a tuple.
An example return value:
('Guido van Rossum', 'guido@cwi.nl')
"""
# New, by Ben Escoto
alist = self.getaddrlist(name)
if alist:
return alist[0]
else:
return (None, None)
def getaddrlist(self, name):
"""Get a list of addresses from a header.
Retrieves a list of addresses from a header, where each address is a
tuple as returned by getaddr(). Scans all named headers, so it works
properly with multiple To: or Cc: headers for example.
"""
raw = []
for h in self.getallmatchingheaders(name):
if h[0] in ' \t':
raw.append(h)
else:
if raw:
raw.append(', ')
i = h.find(':')
if i > 0:
addr = h[i+1:]
raw.append(addr)
alladdrs = ''.join(raw)
a = AddressList(alladdrs)
return a.addresslist
def getdate(self, name):
"""Retrieve a date field from a header.
Retrieves a date field from the named header, returning a tuple
compatible with time.mktime().
"""
try:
data = self[name]
except KeyError:
return None
return parsedate(data)
def getdate_tz(self, name):
"""Retrieve a date field from a header as a 10-tuple.
The first 9 elements make up a tuple compatible with time.mktime(),
and the 10th is the offset of the poster's time zone from GMT/UTC.
"""
try:
data = self[name]
except KeyError:
return None
return parsedate_tz(data)
# Access as a dictionary (only finds *last* header of each type):
def __len__(self):
"""Get the number of headers in a message."""
return len(self.dict)
def __getitem__(self, name):
"""Get a specific header, as from a dictionary."""
return self.dict[name.lower()]
def __setitem__(self, name, value):
"""Set the value of a header.
Note: This is not a perfect inversion of __getitem__, because any
changed headers get stuck at the end of the raw-headers list rather
than where the altered header was.
"""
del self[name] # Won't fail if it doesn't exist
self.dict[name.lower()] = value
text = name + ": " + value
for line in text.split("\n"):
self.headers.append(line + "\n")
def __delitem__(self, name):
"""Delete all occurrences of a specific header, if it is present."""
name = name.lower()
if not name in self.dict:
return
del self.dict[name]
name = name + ':'
n = len(name)
lst = []
hit = 0
for i in range(len(self.headers)):
line = self.headers[i]
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(i)
for i in reversed(lst):
del self.headers[i]
def setdefault(self, name, default=""):
lowername = name.lower()
if lowername in self.dict:
return self.dict[lowername]
else:
text = name + ": " + default
for line in text.split("\n"):
self.headers.append(line + "\n")
self.dict[lowername] = default
return default
def has_key(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __contains__(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __iter__(self):
return iter(self.dict)
def keys(self):
"""Get all of a message's header field names."""
return self.dict.keys()
def values(self):
"""Get all of a message's header field values."""
return self.dict.values()
def items(self):
"""Get all of a message's headers.
Returns a list of name, value tuples.
"""
return self.dict.items()
def __str__(self):
return ''.join(self.headers)
# Utility functions
# -----------------
# XXX Should fix unquote() and quote() to be really conformant.
# XXX The inverses of the parse functions may also be useful.
def unquote(s):
"""Remove quotes from a string."""
if len(s) > 1:
if s.startswith('"') and s.endswith('"'):
return s[1:-1].replace('\\\\', '\\').replace('\\"', '"')
if s.startswith('<') and s.endswith('>'):
return s[1:-1]
return s
def quote(s):
"""Add quotes around a string."""
return s.replace('\\', '\\\\').replace('"', '\\"')
def parseaddr(address):
"""Parse an address into a (realname, mailaddr) tuple."""
a = AddressList(address)
lst = a.addresslist
if not lst:
return (None, None)
return lst[0]
class AddrlistClass:
"""Address parser class by Ben Escoto.
To understand what this class does, it helps to have a copy of
RFC 2822 in front of you.
http://www.faqs.org/rfcs/rfc2822.html
Note: this class interface is deprecated and may be removed in the future.
Use rfc822.AddressList instead.
"""
def __init__(self, field):
"""Initialize a new instance.
`field' is an unparsed address header field, containing one or more
addresses.
"""
self.specials = '()<>@,:;.\"[]'
self.pos = 0
self.LWS = ' \t'
self.CR = '\r\n'
self.atomends = self.specials + self.LWS + self.CR
# Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it
# is obsolete syntax. RFC 2822 requires that we recognize obsolete
# syntax, so allow dots in phrases.
self.phraseends = self.atomends.replace('.', '')
self.field = field
self.commentlist = []
def gotonext(self):
"""Parse up to the start of the next address."""
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS + '\n\r':
self.pos = self.pos + 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
else: break
def getaddrlist(self):
"""Parse all addresses.
Returns a list containing all of the addresses.
"""
result = []
ad = self.getaddress()
while ad:
result += ad
ad = self.getaddress()
return result
def getaddress(self):
"""Parse the next address."""
self.commentlist = []
self.gotonext()
oldpos = self.pos
oldcl = self.commentlist
plist = self.getphraselist()
self.gotonext()
returnlist = []
if self.pos >= len(self.field):
# Bad email address technically, no domain.
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in '.@':
# email address is just an addrspec
# this isn't very efficient since we start over
self.pos = oldpos
self.commentlist = oldcl
addrspec = self.getaddrspec()
returnlist = [(' '.join(self.commentlist), addrspec)]
elif self.field[self.pos] == ':':
# address is a group
returnlist = []
fieldlen = len(self.field)
self.pos += 1
while self.pos < len(self.field):
self.gotonext()
if self.pos < fieldlen and self.field[self.pos] == ';':
self.pos += 1
break
returnlist = returnlist + self.getaddress()
elif self.field[self.pos] == '<':
# Address is a phrase then a route addr
routeaddr = self.getrouteaddr()
if self.commentlist:
returnlist = [(' '.join(plist) + ' (' + \
' '.join(self.commentlist) + ')', routeaddr)]
else: returnlist = [(' '.join(plist), routeaddr)]
else:
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in self.specials:
self.pos += 1
self.gotonext()
if self.pos < len(self.field) and self.field[self.pos] == ',':
self.pos += 1
return returnlist
def getrouteaddr(self):
"""Parse a route address (Return-path value).
This method just skips all the route stuff and returns the addrspec.
"""
if self.field[self.pos] != '<':
return
expectroute = 0
self.pos += 1
self.gotonext()
adlist = ""
while self.pos < len(self.field):
if expectroute:
self.getdomain()
expectroute = 0
elif self.field[self.pos] == '>':
self.pos += 1
break
elif self.field[self.pos] == '@':
self.pos += 1
expectroute = 1
elif self.field[self.pos] == ':':
self.pos += 1
else:
adlist = self.getaddrspec()
self.pos += 1
break
self.gotonext()
return adlist
def getaddrspec(self):
"""Parse an RFC 2822 addr-spec."""
aslist = []
self.gotonext()
while self.pos < len(self.field):
if self.field[self.pos] == '.':
aslist.append('.')
self.pos += 1
elif self.field[self.pos] == '"':
aslist.append('"%s"' % self.getquote())
elif self.field[self.pos] in self.atomends:
break
else: aslist.append(self.getatom())
self.gotonext()
if self.pos >= len(self.field) or self.field[self.pos] != '@':
return ''.join(aslist)
aslist.append('@')
self.pos += 1
self.gotonext()
return ''.join(aslist) + self.getdomain()
def getdomain(self):
"""Get the complete domain name from an address."""
sdlist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] == '[':
sdlist.append(self.getdomainliteral())
elif self.field[self.pos] == '.':
self.pos += 1
sdlist.append('.')
elif self.field[self.pos] in self.atomends:
break
else: sdlist.append(self.getatom())
return ''.join(sdlist)
def getdelimited(self, beginchar, endchars, allowcomments = 1):
"""Parse a header fragment delimited by special characters.
`beginchar' is the start character for the fragment. If self is not
looking at an instance of `beginchar' then getdelimited returns the
empty string.
`endchars' is a sequence of allowable end-delimiting characters.
Parsing stops when one of these is encountered.
If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
within the parsed fragment.
"""
if self.field[self.pos] != beginchar:
return ''
slist = ['']
quote = 0
self.pos += 1
while self.pos < len(self.field):
if quote == 1:
slist.append(self.field[self.pos])
quote = 0
elif self.field[self.pos] in endchars:
self.pos += 1
break
elif allowcomments and self.field[self.pos] == '(':
slist.append(self.getcomment())
continue # have already advanced pos from getcomment
elif self.field[self.pos] == '\\':
quote = 1
else:
slist.append(self.field[self.pos])
self.pos += 1
return ''.join(slist)
def getquote(self):
"""Get a quote-delimited fragment from self's field."""
return self.getdelimited('"', '"\r', 0)
def getcomment(self):
"""Get a parenthesis-delimited fragment from self's field."""
return self.getdelimited('(', ')\r', 1)
def getdomainliteral(self):
"""Parse an RFC 2822 domain-literal."""
return '[%s]' % self.getdelimited('[', ']\r', 0)
def getatom(self, atomends=None):
"""Parse an RFC 2822 atom.
Optional atomends specifies a different set of end token delimiters
(the default is to use self.atomends). This is used e.g. in
getphraselist() since phrase endings must not include the `.' (which
is legal in phrases)."""
atomlist = ['']
if atomends is None:
atomends = self.atomends
while self.pos < len(self.field):
if self.field[self.pos] in atomends:
break
else: atomlist.append(self.field[self.pos])
self.pos += 1
return ''.join(atomlist)
def getphraselist(self):
"""Parse a sequence of RFC 2822 phrases.
A phrase is a sequence of words, which are in turn either RFC 2822
atoms or quoted-strings. Phrases are canonicalized by squeezing all
runs of continuous whitespace into one space.
"""
plist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '"':
plist.append(self.getquote())
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] in self.phraseends:
break
else:
plist.append(self.getatom(self.phraseends))
return plist
class AddressList(AddrlistClass):
"""An AddressList encapsulates a list of parsed RFC 2822 addresses."""
def __init__(self, field):
AddrlistClass.__init__(self, field)
if field:
self.addresslist = self.getaddrlist()
else:
self.addresslist = []
def __len__(self):
return len(self.addresslist)
def __str__(self):
return ", ".join(map(dump_address_pair, self.addresslist))
def __add__(self, other):
# Set union
newaddr = AddressList(None)
newaddr.addresslist = self.addresslist[:]
for x in other.addresslist:
if not x in self.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __iadd__(self, other):
# Set union, in-place
for x in other.addresslist:
if not x in self.addresslist:
self.addresslist.append(x)
return self
def __sub__(self, other):
# Set difference
newaddr = AddressList(None)
for x in self.addresslist:
if not x in other.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __isub__(self, other):
# Set difference, in-place
for x in other.addresslist:
if x in self.addresslist:
self.addresslist.remove(x)
return self
def __getitem__(self, index):
# Make indexing, slices, and 'in' work
return self.addresslist[index]
def dump_address_pair(pair):
"""Dump a (name, address) pair in a canonicalized form."""
if pair[0]:
return '"' + pair[0] + '" <' + pair[1] + '>'
else:
return pair[1]
# Parse a date field
_monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
'aug', 'sep', 'oct', 'nov', 'dec',
'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december']
_daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
# The timezone table does not include the military time zones defined
# in RFC822, other than Z. According to RFC1123, the description in
# RFC822 gets the signs wrong, so we can't rely on any such time
# zones. RFC1123 recommends that numeric timezone indicators be used
# instead of timezone names.
_timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0,
'AST': -400, 'ADT': -300, # Atlantic (used in Canada)
'EST': -500, 'EDT': -400, # Eastern
'CST': -600, 'CDT': -500, # Central
'MST': -700, 'MDT': -600, # Mountain
'PST': -800, 'PDT': -700 # Pacific
}
def parsedate_tz(data):
"""Convert a date string to a time tuple.
Accounts for military timezones.
"""
if not data:
return None
data = data.split()
if data[0][-1] in (',', '.') or data[0].lower() in _daynames:
# There's a dayname here. Skip it
del data[0]
else:
# no space after the "weekday,"?
i = data[0].rfind(',')
if i >= 0:
data[0] = data[0][i+1:]
if len(data) == 3: # RFC 850 date, deprecated
stuff = data[0].split('-')
if len(stuff) == 3:
data = stuff + data[1:]
if len(data) == 4:
s = data[3]
i = s.find('+')
if i > 0:
data[3:] = [s[:i], s[i+1:]]
else:
data.append('') # Dummy tz
if len(data) < 5:
return None
data = data[:5]
[dd, mm, yy, tm, tz] = data
mm = mm.lower()
if not mm in _monthnames:
dd, mm = mm, dd.lower()
if not mm in _monthnames:
return None
mm = _monthnames.index(mm)+1
if mm > 12: mm = mm - 12
if dd[-1] == ',':
dd = dd[:-1]
i = yy.find(':')
if i > 0:
yy, tm = tm, yy
if yy[-1] == ',':
yy = yy[:-1]
if not yy[0].isdigit():
yy, tz = tz, yy
if tm[-1] == ',':
tm = tm[:-1]
tm = tm.split(':')
if len(tm) == 2:
[thh, tmm] = tm
tss = '0'
elif len(tm) == 3:
[thh, tmm, tss] = tm
else:
return None
try:
yy = int(yy)
dd = int(dd)
thh = int(thh)
tmm = int(tmm)
tss = int(tss)
except ValueError:
return None
tzoffset = None
tz = tz.upper()
if tz in _timezones:
tzoffset = _timezones[tz]
else:
try:
tzoffset = int(tz)
except ValueError:
pass
# Convert a timezone offset into seconds ; -0500 -> -18000
if tzoffset:
if tzoffset < 0:
tzsign = -1
tzoffset = -tzoffset
else:
tzsign = 1
tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
return (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset)
def parsedate(data):
"""Convert a time string to a time tuple."""
t = parsedate_tz(data)
if t is None:
return t
return t[:9]
def mktime_tz(data):
"""Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
if data[9] is None:
# No zone info, so localtime is better assumption than GMT
return time.mktime(data[:8] + (-1,))
else:
t = time.mktime(data[:8] + (0,))
return t - data[9] - time.timezone
def formatdate(timeval=None):
"""Returns time format preferred for Internet standards.
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
According to RFC 1123, day and month names must always be in
English. If not for that, this code could use strftime(). It
can't because strftime() honors the locale and could generate
non-English names.
"""
if timeval is None:
timeval = time.time()
timeval = time.gmtime(timeval)
return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]],
timeval[2],
("Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1],
timeval[0], timeval[3], timeval[4], timeval[5])
# When used as script, run a small test program.
# The first command line argument must be a filename containing one
# message in RFC-822 format.
if __name__ == '__main__':
import sys, os
file = os.path.join(os.environ['HOME'], 'Mail/inbox/1')
if sys.argv[1:]: file = sys.argv[1]
f = open(file, 'r')
m = Message(f)
print 'From:', m.getaddr('from')
print 'To:', m.getaddrlist('to')
print 'Subject:', m.getheader('subject')
print 'Date:', m.getheader('date')
date = m.getdate_tz('date')
tz = date[-1]
date = time.localtime(mktime_tz(date))
if date:
print 'ParsedDate:', time.asctime(date),
hhmmss = tz
hhmm, ss = divmod(hhmmss, 60)
hh, mm = divmod(hhmm, 60)
print "%+03d%02d" % (hh, mm),
if ss: print ".%02d" % ss,
print
else:
print 'ParsedDate:', None
m.rewindbody()
n = 0
while f.readline():
n += 1
print 'Lines:', n
print '-'*70
print 'len =', len(m)
if 'Date' in m: print 'Date =', m['Date']
if 'X-Nonsense' in m: pass
print 'keys =', m.keys()
print 'values =', m.values()
print 'items =', m.items()
| mit |
spatialdev/onadata | onadata/libs/mixins/xform_id_string_lookup.py | 11 | 1087 | from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import get_object_or_404
class XFormIdStringLookupMixin(object):
lookup_id_string = 'id_string'
def get_object(self, queryset=None):
if queryset is None:
queryset = self.filter_queryset(self.get_queryset())
lookup_field = self.lookup_field
lookup = self.kwargs.get(lookup_field)
if lookup is not None:
try:
int(lookup)
except ValueError:
lookup_field = self.lookup_id_string
else:
raise ImproperlyConfigured(
'Expected view %s to be called with a URL keyword argument '
'named "%s". Fix your URL conf, or set the `.lookup_field` '
'attribute on the view correctly.' %
(self.__class__.__name__, self.lookup_field)
)
filter_kwargs = {lookup_field: lookup}
obj = get_object_or_404(queryset, **filter_kwargs)
self.check_object_permissions(self.request, obj)
return obj
| bsd-2-clause |
lociii/googleads-python-lib | examples/adspygoogle/dfp/v201308/activity_group_service/create_activity_groups.py | 2 | 2563 | #!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates new activity groups.
To determine which activity groups exist, run get_all_activity_groups.py.
Tags: ActivityGroupService.createActivityGroups
"""
__author__ = 'Vincent Tsao'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.common import Utils
# Set the ID of the advertiser company this activity group is associated with.
ADVERTISER_COMPANY_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
def main(client, advertiser_company_id):
# Initialize appropriate service.
activity_group_service = client.GetService('ActivityGroupService',
version='v201308')
# Create a short-term activity group.
short_term_activity_group = {
'name': 'Short-term activity group #%s' % Utils.GetUniqueName(),
'companyIds': [advertiser_company_id],
'clicksLookback': '1',
'impressionsLookback': '1'
}
# Create a long-term activity group.
long_term_activity_group = {
'name': 'Long-term activity group #%s' % Utils.GetUniqueName(),
'companyIds': [advertiser_company_id],
'clicksLookback': '30',
'impressionsLookback': '30'
}
# Create the activity groups on the server.
activity_groups = activity_group_service.CreateActivityGroups([
short_term_activity_group, long_term_activity_group])
# Display results.
for activity_group in activity_groups:
print ('Activity group with ID \'%s\' and name \'%s\' was created.'
% (activity_group['id'], activity_group['name']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
main(dfp_client, ADVERTISER_COMPANY_ID)
| apache-2.0 |
gacarrillor/QGIS | python/plugins/processing/script/DeleteScriptAction.py | 45 | 2468 | # -*- coding: utf-8 -*-
"""
***************************************************************************
DeleteScriptAction.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
import os
from qgis.PyQt.QtCore import QCoreApplication
from qgis.PyQt.QtWidgets import QMessageBox
from qgis.core import QgsApplication, QgsProcessingAlgorithm
from processing.gui.ContextAction import ContextAction
from processing.script import ScriptUtils
class DeleteScriptAction(ContextAction):
def __init__(self):
super().__init__()
self.name = QCoreApplication.translate("DeleteScriptAction", "Delete Script…")
def isEnabled(self):
return isinstance(self.itemData, QgsProcessingAlgorithm) and self.itemData.provider().id() == "script"
def execute(self):
reply = QMessageBox.question(None,
self.tr("Delete Script"),
self.tr("Are you sure you want to delete this script?"),
QMessageBox.Yes | QMessageBox.No,
QMessageBox.No)
if reply == QMessageBox.Yes:
filePath = ScriptUtils.findAlgorithmSource(self.itemData.name())
if filePath is not None:
os.remove(filePath)
QgsApplication.processingRegistry().providerById("script").refreshAlgorithms()
else:
QMessageBox.warning(None,
self.tr("Delete Script"),
self.tr("Can not find corresponding script file.")
)
| gpl-2.0 |
ccellis/WHACK2016 | flask/lib/python2.7/site-packages/sqlalchemy/testing/suite/test_update_delete.py | 203 | 1582 | from .. import fixtures, config
from ..assertions import eq_
from sqlalchemy import Integer, String
from ..schema import Table, Column
class SimpleUpdateDeleteTest(fixtures.TablesTest):
run_deletes = 'each'
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table('plain_pk', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50))
)
@classmethod
def insert_data(cls):
config.db.execute(
cls.tables.plain_pk.insert(),
[
{"id": 1, "data": "d1"},
{"id": 2, "data": "d2"},
{"id": 3, "data": "d3"},
]
)
def test_update(self):
t = self.tables.plain_pk
r = config.db.execute(
t.update().where(t.c.id == 2),
data="d2_new"
)
assert not r.is_insert
assert not r.returns_rows
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[
(1, "d1"),
(2, "d2_new"),
(3, "d3")
]
)
def test_delete(self):
t = self.tables.plain_pk
r = config.db.execute(
t.delete().where(t.c.id == 2)
)
assert not r.is_insert
assert not r.returns_rows
eq_(
config.db.execute(t.select().order_by(t.c.id)).fetchall(),
[
(1, "d1"),
(3, "d3")
]
)
__all__ = ('SimpleUpdateDeleteTest', )
| bsd-3-clause |
Silmathoron/NNGT | nngt/io/saving_helpers.py | 1 | 4298 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# This file is part of the NNGT project to generate and analyze
# neuronal networks and their activity.
# Copyright (C) 2015-2019 Tanguy Fardet
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" IO tools for NNGT """
def _neighbour_list(graph, separator, secondary, attributes):
'''
Generate a string containing the neighbour list of the graph as well as a
dict containing the notifiers as key and the associated values.
@todo: speed this up!
'''
lst_neighbours = list(graph.adjacency_matrix(mformat="lil").rows)
for v1 in range(graph.node_nb()):
for i, v2 in enumerate(lst_neighbours[v1]):
str_edge = str(v2)
eattr = graph.get_edge_attributes((v1, v2))
for attr in attributes:
str_edge += "{}{}".format(secondary, eattr[attr])
lst_neighbours[v1][i] = str_edge
lst_neighbours[v1] = "{}{}{}".format(
v1, separator, separator.join(lst_neighbours[v1]))
str_neighbours = "\n".join(lst_neighbours)
return str_neighbours
def _edge_list(graph, separator, secondary, attributes):
''' Generate a string containing the edge list and their properties. '''
edges = graph.edges_array
di_attributes = {
k: v for k, v in graph.edge_attributes.items()
if k != 'bweight'
}
end_strings = (len(attributes) - 1)*[secondary]
end_strings.append('')
lst_edges = []
for i, e in enumerate(edges):
str_edge = "{}{}{}".format(e[0], separator, e[1])
if attributes:
str_edge += separator
for end, attr in zip(end_strings, attributes):
str_edge += "{}{}".format(di_attributes[attr][i], end)
lst_edges.append(str_edge)
str_edges = "\n".join(lst_edges)
return str_edges
def _dot(graph, attributes, **kwargs):
pass
def _gml(graph, *args, **kwargs):
''' Generate a string containing the edge list and their properties. '''
node_str = " node\n [\n id {id}{attr}\n ]"
edge_str = " edge\n [\n source {s}\n target {t}\n{attr}\n ]"
attr_str = " {key} {val}"
indent = " "
# set nodes
lst_elements = []
for i in range(graph.node_nb()):
lst_attr = []
for k, v in graph.node_attributes.items():
lst_attr.append(attr_str.format(key=k, val=v[i]))
nattr = "\n" + "\n".join(lst_attr)
lst_elements.append(node_str.format(id=i, attr=nattr))
# set edges
edges = graph.edges_array
for i, e in enumerate(edges):
lst_attr = []
for k, v in graph.edge_attributes.items():
lst_attr.append(attr_str.format(key=k, val=v[i]))
eattr = "\n".join(lst_attr)
lst_elements.append(edge_str.format(s=e[0], t=e[1], attr=eattr))
str_gml = "\n".join(lst_elements)
str_gml += "\n]"
return str_gml
def _xml(graph, attributes, **kwargs):
pass
def _gt(graph, attributes, **kwargs):
pass
def _custom_info(graph_info, notifier, *args, **kwargs):
''' Format the graph information for custom formats '''
info_str = ""
for key, val in iter(graph_info.items()):
info_str += "{}{}={}\n".format(notifier, key, val)
return info_str
def _gml_info(graph_info, *args, **kwargs):
''' Format the graph information for the GML format '''
info_str = "graph\n[\n"
for key, val in iter(graph_info.items()):
if not key.startswith("na_"):
val = 1 if val is True else (0 if val is False else val)
info_str += " {} {}\n".format(key, val)
return info_str
def _str_bytes_len(s):
return len(s.encode('utf-8'))
| gpl-3.0 |
hpk42/pluggy | testing/test_details.py | 2 | 3565 | import warnings
import pytest
from pluggy import PluginManager, HookimplMarker, HookspecMarker
hookspec = HookspecMarker("example")
hookimpl = HookimplMarker("example")
def test_parse_hookimpl_override():
class MyPluginManager(PluginManager):
def parse_hookimpl_opts(self, module_or_class, name):
opts = PluginManager.parse_hookimpl_opts(self, module_or_class, name)
if opts is None:
if name.startswith("x1"):
opts = {}
return opts
class Plugin:
def x1meth(self):
pass
@hookimpl(hookwrapper=True, tryfirst=True)
def x1meth2(self):
yield # pragma: no cover
class Spec:
@hookspec
def x1meth(self):
pass
@hookspec
def x1meth2(self):
pass
pm = MyPluginManager(hookspec.project_name)
pm.register(Plugin())
pm.add_hookspecs(Spec)
assert not pm.hook.x1meth._nonwrappers[0].hookwrapper
assert not pm.hook.x1meth._nonwrappers[0].tryfirst
assert not pm.hook.x1meth._nonwrappers[0].trylast
assert not pm.hook.x1meth._nonwrappers[0].optionalhook
assert pm.hook.x1meth2._wrappers[0].tryfirst
assert pm.hook.x1meth2._wrappers[0].hookwrapper
def test_warn_when_deprecated_specified(recwarn):
warning = DeprecationWarning("foo is deprecated")
class Spec:
@hookspec(warn_on_impl=warning)
def foo(self):
pass
class Plugin:
@hookimpl
def foo(self):
pass
pm = PluginManager(hookspec.project_name)
pm.add_hookspecs(Spec)
with pytest.warns(DeprecationWarning) as records:
pm.register(Plugin())
(record,) = records
assert record.message is warning
assert record.filename == Plugin.foo.__code__.co_filename
assert record.lineno == Plugin.foo.__code__.co_firstlineno
def test_plugin_getattr_raises_errors():
"""Pluggy must be able to handle plugins which raise weird exceptions
when getattr() gets called (#11).
"""
class DontTouchMe:
def __getattr__(self, x):
raise Exception("cant touch me")
class Module:
pass
module = Module()
module.x = DontTouchMe()
pm = PluginManager(hookspec.project_name)
# register() would raise an error
pm.register(module, "donttouch")
assert pm.get_plugin("donttouch") is module
def test_warning_on_call_vs_hookspec_arg_mismatch():
"""Verify that is a hook is called with less arguments then defined in the
spec that a warning is emitted.
"""
class Spec:
@hookspec
def myhook(self, arg1, arg2):
pass
class Plugin:
@hookimpl
def myhook(self, arg1):
pass
pm = PluginManager(hookspec.project_name)
pm.register(Plugin())
pm.add_hookspecs(Spec())
with warnings.catch_warnings(record=True) as warns:
warnings.simplefilter("always")
# calling should trigger a warning
pm.hook.myhook(arg1=1)
assert len(warns) == 1
warning = warns[-1]
assert issubclass(warning.category, Warning)
assert "Argument(s) ('arg2',)" in str(warning.message)
def test_repr():
class Plugin:
@hookimpl
def myhook(self):
raise NotImplementedError()
pm = PluginManager(hookspec.project_name)
plugin = Plugin()
pname = pm.register(plugin)
assert repr(pm.hook.myhook._nonwrappers[0]) == (
"<HookImpl plugin_name=%r, plugin=%r>" % (pname, plugin)
)
| mit |
vmiklos/vmexam | pdfium/find_old_experimental.py | 1 | 1574 | #!/usr/bin/env python3
#
# Copyright 2019 Miklos Vajna. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
"""Finds my old + experimental APIs."""
import subprocess
import time
def main() -> None:
"""Commandline interface to this module."""
apis_bytes = subprocess.check_output(["git", "grep", "-n", "Experimental API", "public/"])
apis = apis_bytes.decode("utf-8").strip().split("\n")
author_date_loc = []
for api in apis:
tokens = api.split(":")
path = tokens[0]
line_num = tokens[1]
blame_bytes = subprocess.check_output(["git", "blame", "--porcelain", "-L", line_num + "," + line_num, path])
blame_lines = blame_bytes.decode("utf-8").strip().split("\n")
date = 0
author = ""
for line in blame_lines:
if line.startswith("author-time"):
tokens = line.split(" ")
date = int(tokens[1])
elif line.startswith("author "):
tokens = line.split(" ")
author = tokens[1]
author_date_loc.append((author, date, path + ":" + line_num))
author_date_loc = sorted(author_date_loc, key=lambda x: x[1])
today = time.time()
for author, date, loc in author_date_loc:
if author != "Miklos":
continue
# Year in seconds.
if date >= today - 3 * 31536000:
continue
parsed_date = time.localtime(date)
date_string = time.strftime("%Y-%m-%d", parsed_date)
print("date: '"+date_string+"', loc: "+loc+"")
if __name__ == "__main__":
main()
# vim:set shiftwidth=4 softtabstop=4 expandtab:
| mit |
ai-se/x-effort | Models/isbsg10.py | 2 | 2354 | """
# https://code.google.com/p/promisedata/source/browse/#svn%2Ftrunk%2Feffort%2Falbrecht
Standard header:
"""
from __future__ import division,print_function
import sys
sys.dont_write_bytecode = True
from lib import *
"""
Data
"""
def isbsg10(weighFeature = False,
split = "median"):
vl=1;l=2;n=3;h=4;vh=5;xh=6;_=0
return data(indep= [
# 0..10
'Data_Quality','UFP','IS','DP','LT','PPL','CA','FS','RS','Recording_Method','FPS'],
less = ['Effort'],
_rows=[
[1,1,1,1,1,1,1,225,1,1,1,1856],
[1,1,1,1,1,2,1,599,2,1,2,10083],
[1,1,1,2,1,2,1,333,2,1,3,5208],
[1,1,2,3,2,3,1,748,2,2,3,1518],
[1,1,1,1,1,4,1,158,1,1,4,3376],
[1,1,1,1,1,2,1,427,2,1,3,5170],
[2,2,3,4,3,5,1,461,2,3,4,12149],
[1,1,4,3,2,3,1,257,1,2,3,452],
[1,1,1,2,3,6,1,115,1,1,4,441],
[1,1,5,3,2,3,1,116,1,4,4,112],
[1,1,1,2,1,7,1,323,2,1,3,1530],
[1,1,1,2,1,1,1,134,1,1,3,1807],
[1,1,1,2,1,14,1,292,1,1,3,1087],
[2,2,4,4,1,8,1,399,2,3,3,7037],
[1,1,1,1,1,2,1,44,3,1,4,784],
[1,1,1,2,1,9,1,298,1,1,4,3268],
[1,1,1,2,1,2,1,66,3,1,3,800],
[1,1,6,3,2,3,1,243,1,2,4,257],
[1,1,1,4,1,10,1,1105,4,1,5,14453],
[1,1,4,3,2,3,1,679,2,4,4,326],
[2,2,7,5,1,4,1,303,2,3,4,8490],
[1,1,1,2,1,1,1,147,1,1,3,672],
[1,1,7,3,2,3,1,143,1,2,3,98],
[1,1,1,2,1,11,1,614,2,1,4,3280],
[2,2,7,4,3,5,1,183,1,3,4,7327],
[1,1,8,3,2,3,1,138,1,2,4,87],
[1,1,1,2,3,12,1,129,1,1,3,1261],
[1,1,1,2,1,2,1,205,1,1,3,3272],
[1,1,1,2,1,1,1,471,2,1,3,1464],
[1,1,1,5,1,4,1,97,3,1,3,1273],
[1,1,3,3,2,3,1,1371,4,2,3,2274],
[1,1,1,4,1,2,1,291,1,1,4,1772],
[1,1,9,3,2,3,1,995,2,2,4,614],
[1,2,4,2,3,6,2,211,1,3,4,1021],
[2,2,10,2,3,13,2,192,1,3,4,1806],
[2,2,10,2,3,13,2,98,3,3,4,921],
[2,2,7,4,1,14,1,112,1,3,4,2134]
],
_tunings =[[
# vlow low nom high vhigh xhigh
#scale factors:
'Prec', 6.20, 4.96, 3.72, 2.48, 1.24, _ ],[
'Flex', 5.07, 4.05, 3.04, 2.03, 1.01, _ ],[
'Resl', 7.07, 5.65, 4.24, 2.83, 1.41, _ ],[
'Pmat', 7.80, 6.24, 4.68, 3.12, 1.56, _ ],[
'Team', 5.48, 4.38, 3.29, 2.19, 1.01, _ ]],
weighFeature = weighFeature,
_split = split,
_isCocomo = False
)
def _isbsg10(): print(isbsg10()) | mit |
MOSAIC-UA/802.11ah-ns3 | ns-3/.waf-1.8.12-f00e5b53f6bbeab1384a38c9cc5d51f7/waflib/Tools/suncc.py | 10 | 1186 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
from waflib import Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@conf
def find_scc(conf):
v=conf.env
cc=conf.find_program('cc',var='CC')
try:
conf.cmd_and_log(cc+['-flags'])
except Exception:
conf.fatal('%r is not a Sun compiler'%cc)
v.CC_NAME='sun'
conf.get_suncc_version(cc)
@conf
def scc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=''
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['SONAME_ST']='-Wl,-h,%s'
v['SHLIB_MARKER']='-Bdynamic'
v['STLIB_MARKER']='-Bstatic'
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-Kpic','-DPIC']
v['LINKFLAGS_cshlib']=['-G']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=['-Bstatic']
v['cstlib_PATTERN']='lib%s.a'
def configure(conf):
conf.find_scc()
conf.find_ar()
conf.scc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| gpl-2.0 |
auduny/home-assistant | homeassistant/components/smartthings/climate.py | 5 | 15443 | """Support for climate devices through the SmartThings cloud API."""
import asyncio
import logging
from typing import Iterable, Optional, Sequence
from homeassistant.components.climate import (
DOMAIN as CLIMATE_DOMAIN, ClimateDevice)
from homeassistant.components.climate.const import (
ATTR_OPERATION_MODE, ATTR_TARGET_TEMP_HIGH, ATTR_TARGET_TEMP_LOW,
STATE_AUTO, STATE_COOL, STATE_DRY, STATE_ECO, STATE_FAN_ONLY, STATE_HEAT,
SUPPORT_FAN_MODE, SUPPORT_ON_OFF, SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_TEMPERATURE, SUPPORT_TARGET_TEMPERATURE_HIGH,
SUPPORT_TARGET_TEMPERATURE_LOW)
from homeassistant.const import (
ATTR_TEMPERATURE, STATE_OFF, TEMP_CELSIUS, TEMP_FAHRENHEIT)
from . import SmartThingsEntity
from .const import DATA_BROKERS, DOMAIN
ATTR_OPERATION_STATE = 'operation_state'
MODE_TO_STATE = {
'auto': STATE_AUTO,
'cool': STATE_COOL,
'eco': STATE_ECO,
'rush hour': STATE_ECO,
'emergency heat': STATE_HEAT,
'heat': STATE_HEAT,
'off': STATE_OFF
}
STATE_TO_MODE = {
STATE_AUTO: 'auto',
STATE_COOL: 'cool',
STATE_ECO: 'eco',
STATE_HEAT: 'heat',
STATE_OFF: 'off'
}
AC_MODE_TO_STATE = {
'auto': STATE_AUTO,
'cool': STATE_COOL,
'dry': STATE_DRY,
'heat': STATE_HEAT,
'fanOnly': STATE_FAN_ONLY
}
STATE_TO_AC_MODE = {v: k for k, v in AC_MODE_TO_STATE.items()}
SPEED_TO_FAN_MODE = {
0: 'auto',
1: 'low',
2: 'medium',
3: 'high',
4: 'turbo'
}
FAN_MODE_TO_SPEED = {v: k for k, v in SPEED_TO_FAN_MODE.items()}
UNIT_MAP = {
'C': TEMP_CELSIUS,
'F': TEMP_FAHRENHEIT
}
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Platform uses config entry setup."""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add climate entities for a config entry."""
from pysmartthings import Capability
ac_capabilities = [
Capability.air_conditioner_mode,
Capability.fan_speed,
Capability.switch,
Capability.temperature_measurement,
Capability.thermostat_cooling_setpoint]
broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id]
entities = []
for device in broker.devices.values():
if not broker.any_assigned(device.device_id, CLIMATE_DOMAIN):
continue
if all(capability in device.capabilities
for capability in ac_capabilities):
entities.append(SmartThingsAirConditioner(device))
else:
entities.append(SmartThingsThermostat(device))
async_add_entities(entities, True)
def get_capabilities(capabilities: Sequence[str]) -> Optional[Sequence[str]]:
"""Return all capabilities supported if minimum required are present."""
from pysmartthings import Capability
supported = [
Capability.air_conditioner_mode,
Capability.demand_response_load_control,
Capability.fan_speed,
Capability.power_consumption_report,
Capability.relative_humidity_measurement,
Capability.switch,
Capability.temperature_measurement,
Capability.thermostat,
Capability.thermostat_cooling_setpoint,
Capability.thermostat_fan_mode,
Capability.thermostat_heating_setpoint,
Capability.thermostat_mode,
Capability.thermostat_operating_state]
# Can have this legacy/deprecated capability
if Capability.thermostat in capabilities:
return supported
# Or must have all of these thermostat capabilities
thermostat_capabilities = [
Capability.temperature_measurement,
Capability.thermostat_cooling_setpoint,
Capability.thermostat_heating_setpoint,
Capability.thermostat_mode]
if all(capability in capabilities
for capability in thermostat_capabilities):
return supported
# Or must have all of these A/C capabilities
ac_capabilities = [
Capability.air_conditioner_mode,
Capability.fan_speed,
Capability.switch,
Capability.temperature_measurement,
Capability.thermostat_cooling_setpoint]
if all(capability in capabilities
for capability in ac_capabilities):
return supported
return None
class SmartThingsThermostat(SmartThingsEntity, ClimateDevice):
"""Define a SmartThings climate entities."""
def __init__(self, device):
"""Init the class."""
super().__init__(device)
self._supported_features = self._determine_features()
self._current_operation = None
self._operations = None
def _determine_features(self):
from pysmartthings import Capability
flags = SUPPORT_OPERATION_MODE \
| SUPPORT_TARGET_TEMPERATURE \
| SUPPORT_TARGET_TEMPERATURE_LOW \
| SUPPORT_TARGET_TEMPERATURE_HIGH
if self._device.get_capability(
Capability.thermostat_fan_mode, Capability.thermostat):
flags |= SUPPORT_FAN_MODE
return flags
async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
await self._device.set_thermostat_fan_mode(fan_mode, set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state(True)
async def async_set_operation_mode(self, operation_mode):
"""Set new target operation mode."""
mode = STATE_TO_MODE[operation_mode]
await self._device.set_thermostat_mode(mode, set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state(True)
async def async_set_temperature(self, **kwargs):
"""Set new operation mode and target temperatures."""
# Operation state
operation_state = kwargs.get(ATTR_OPERATION_MODE)
if operation_state:
mode = STATE_TO_MODE[operation_state]
await self._device.set_thermostat_mode(mode, set_status=True)
await self.async_update()
# Heat/cool setpoint
heating_setpoint = None
cooling_setpoint = None
if self.current_operation == STATE_HEAT:
heating_setpoint = kwargs.get(ATTR_TEMPERATURE)
elif self.current_operation == STATE_COOL:
cooling_setpoint = kwargs.get(ATTR_TEMPERATURE)
else:
heating_setpoint = kwargs.get(ATTR_TARGET_TEMP_LOW)
cooling_setpoint = kwargs.get(ATTR_TARGET_TEMP_HIGH)
tasks = []
if heating_setpoint is not None:
tasks.append(self._device.set_heating_setpoint(
round(heating_setpoint, 3), set_status=True))
if cooling_setpoint is not None:
tasks.append(self._device.set_cooling_setpoint(
round(cooling_setpoint, 3), set_status=True))
await asyncio.gather(*tasks)
# State is set optimistically in the commands above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state(True)
async def async_update(self):
"""Update the attributes of the climate device."""
thermostat_mode = self._device.status.thermostat_mode
self._current_operation = MODE_TO_STATE.get(thermostat_mode)
if self._current_operation is None:
_LOGGER.debug('Device %s (%s) returned an invalid'
'thermostat mode: %s', self._device.label,
self._device.device_id, thermostat_mode)
supported_modes = self._device.status.supported_thermostat_modes
if isinstance(supported_modes, Iterable):
operations = set()
for mode in supported_modes:
state = MODE_TO_STATE.get(mode)
if state is not None:
operations.add(state)
else:
_LOGGER.debug('Device %s (%s) returned an invalid '
'supported thermostat mode: %s',
self._device.label, self._device.device_id,
mode)
self._operations = operations
else:
_LOGGER.debug('Device %s (%s) returned invalid supported '
'thermostat modes: %s', self._device.label,
self._device.device_id, supported_modes)
@property
def current_fan_mode(self):
"""Return the fan setting."""
return self._device.status.thermostat_fan_mode
@property
def current_humidity(self):
"""Return the current humidity."""
return self._device.status.humidity
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return self._current_operation
@property
def current_temperature(self):
"""Return the current temperature."""
return self._device.status.temperature
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {
ATTR_OPERATION_STATE:
self._device.status.thermostat_operating_state
}
@property
def fan_list(self):
"""Return the list of available fan modes."""
return self._device.status.supported_thermostat_fan_modes
@property
def operation_list(self):
"""Return the list of available operation modes."""
return self._operations
@property
def supported_features(self):
"""Return the supported features."""
return self._supported_features
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self.current_operation == STATE_COOL:
return self._device.status.cooling_setpoint
if self.current_operation == STATE_HEAT:
return self._device.status.heating_setpoint
return None
@property
def target_temperature_high(self):
"""Return the highbound target temperature we try to reach."""
if self.current_operation == STATE_AUTO:
return self._device.status.cooling_setpoint
return None
@property
def target_temperature_low(self):
"""Return the lowbound target temperature we try to reach."""
if self.current_operation == STATE_AUTO:
return self._device.status.heating_setpoint
return None
@property
def temperature_unit(self):
"""Return the unit of measurement."""
from pysmartthings import Attribute
return UNIT_MAP.get(
self._device.status.attributes[Attribute.temperature].unit)
class SmartThingsAirConditioner(SmartThingsEntity, ClimateDevice):
"""Define a SmartThings Air Conditioner."""
async def async_set_fan_mode(self, fan_mode):
"""Set new target fan mode."""
await self._device.set_fan_speed(
FAN_MODE_TO_SPEED[fan_mode], set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state()
async def async_set_operation_mode(self, operation_mode):
"""Set new target operation mode."""
await self._device.set_air_conditioner_mode(
STATE_TO_AC_MODE[operation_mode], set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state()
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
tasks = []
# operation mode
operation_mode = kwargs.get(ATTR_OPERATION_MODE)
if operation_mode:
tasks.append(self.async_set_operation_mode(operation_mode))
# temperature
tasks.append(self._device.set_cooling_setpoint(
kwargs[ATTR_TEMPERATURE], set_status=True))
await asyncio.gather(*tasks)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state()
async def async_turn_on(self):
"""Turn device on."""
await self._device.switch_on(set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state()
async def async_turn_off(self):
"""Turn device off."""
await self._device.switch_off(set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_schedule_update_ha_state()
@property
def current_fan_mode(self):
"""Return the fan setting."""
return SPEED_TO_FAN_MODE.get(self._device.status.fan_speed)
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return AC_MODE_TO_STATE.get(self._device.status.air_conditioner_mode)
@property
def current_temperature(self):
"""Return the current temperature."""
return self._device.status.temperature
@property
def device_state_attributes(self):
"""
Return device specific state attributes.
Include attributes from the Demand Response Load Control (drlc)
and Power Consumption capabilities.
"""
attributes = [
'drlc_status_duration',
'drlc_status_level',
'drlc_status_start',
'drlc_status_override',
'power_consumption_start',
'power_consumption_power',
'power_consumption_energy',
'power_consumption_end'
]
state_attributes = {}
for attribute in attributes:
value = getattr(self._device.status, attribute)
if value is not None:
state_attributes[attribute] = value
return state_attributes
@property
def fan_list(self):
"""Return the list of available fan modes."""
return list(FAN_MODE_TO_SPEED)
@property
def is_on(self):
"""Return true if on."""
return self._device.status.switch
@property
def operation_list(self):
"""Return the list of available operation modes."""
return list(STATE_TO_AC_MODE)
@property
def supported_features(self):
"""Return the supported features."""
return SUPPORT_OPERATION_MODE | SUPPORT_TARGET_TEMPERATURE \
| SUPPORT_FAN_MODE | SUPPORT_ON_OFF
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._device.status.cooling_setpoint
@property
def temperature_unit(self):
"""Return the unit of measurement."""
from pysmartthings import Attribute
return UNIT_MAP.get(
self._device.status.attributes[Attribute.temperature].unit)
| apache-2.0 |
romain-dartigues/ansible | lib/ansible/modules/cloud/cloudstack/cs_iso.py | 5 | 13550 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_iso
short_description: Manages ISO images on Apache CloudStack based clouds.
description:
- Register and remove ISO images.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the ISO.
required: true
display_text:
description:
- Display text of the ISO.
- If not specified, C(name) will be used.
version_added: "2.4"
url:
description:
- URL where the ISO can be downloaded from. Required if C(state) is present.
os_type:
description:
- Name of the OS that best represents the OS of this ISO. If the iso is bootable this parameter needs to be passed. Required if C(state) is present.
is_ready:
description:
- This flag is used for searching existing ISOs. If set to C(yes), it will only list ISO ready for deployment e.g.
successfully downloaded and installed. Recommended to set it to C(no).
type: bool
default: no
is_public:
description:
- Register the ISO to be publicly available to all users. Only used if C(state) is present.
is_featured:
description:
- Register the ISO to be featured. Only used if C(state) is present.
type: bool
is_dynamically_scalable:
description:
- Register the ISO having XS/VMWare tools installed inorder to support dynamic scaling of VM cpu/memory. Only used if C(state) is present.
type: bool
checksum:
description:
- The MD5 checksum value of this ISO. If set, we search by checksum instead of name.
bootable:
description:
- Register the ISO to be bootable. Only used if C(state) is present.
type: bool
domain:
description:
- Domain the ISO is related to.
account:
description:
- Account the ISO is related to.
project:
description:
- Name of the project the ISO to be registered in.
zone:
description:
- Name of the zone you wish the ISO to be registered or deleted from.
- If not specified, first zone found will be used.
cross_zones:
description:
- Whether the ISO should be synced or removed across zones.
- Mutually exclusive with C(zone).
type: bool
default: 'no'
version_added: "2.4"
iso_filter:
description:
- Name of the filter used to search for the ISO.
default: 'self'
choices: [ 'featured', 'self', 'selfexecutable','sharedexecutable','executable', 'community' ]
state:
description:
- State of the ISO.
default: 'present'
choices: [ 'present', 'absent' ]
poll_async:
description:
- Poll async jobs until job has finished.
type: bool
default: 'yes'
version_added: "2.3"
tags:
description:
- List of tags. Tags are a list of dictionaries having keys C(key) and C(value).
- "To delete all tags, set a empty list e.g. C(tags: [])."
aliases: [ 'tag' ]
version_added: "2.4"
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Register an ISO if ISO name does not already exist.
- local_action:
module: cs_iso
name: Debian 7 64-bit
url: http://mirror.switch.ch/ftp/mirror/debian-cd/current/amd64/iso-cd/debian-7.7.0-amd64-netinst.iso
os_type: Debian GNU/Linux 7(64-bit)
# Register an ISO with given name if ISO md5 checksum does not already exist.
- local_action:
module: cs_iso
name: Debian 7 64-bit
url: http://mirror.switch.ch/ftp/mirror/debian-cd/current/amd64/iso-cd/debian-7.7.0-amd64-netinst.iso
os_type: Debian GNU/Linux 7(64-bit)
checksum: 0b31bccccb048d20b551f70830bb7ad0
# Remove an ISO by name
- local_action:
module: cs_iso
name: Debian 7 64-bit
state: absent
# Remove an ISO by checksum
- local_action:
module: cs_iso
name: Debian 7 64-bit
checksum: 0b31bccccb048d20b551f70830bb7ad0
state: absent
'''
RETURN = '''
---
id:
description: UUID of the ISO.
returned: success
type: string
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
name:
description: Name of the ISO.
returned: success
type: string
sample: Debian 7 64-bit
display_text:
description: Text to be displayed of the ISO.
returned: success
type: string
sample: Debian 7.7 64-bit minimal 2015-03-19
zone:
description: Name of zone the ISO is registered in.
returned: success
type: string
sample: zuerich
status:
description: Status of the ISO.
returned: success
type: string
sample: Successfully Installed
is_ready:
description: True if the ISO is ready to be deployed from.
returned: success
type: boolean
sample: true
is_public:
description: True if the ISO is public.
returned: success
type: boolean
sample: true
version_added: "2.4"
bootable:
description: True if the ISO is bootable.
returned: success
type: boolean
sample: true
version_added: "2.4"
is_featured:
description: True if the ISO is featured.
returned: success
type: boolean
sample: true
version_added: "2.4"
format:
description: Format of the ISO.
returned: success
type: string
sample: ISO
version_added: "2.4"
os_type:
description: Typo of the OS.
returned: success
type: string
sample: CentOS 6.5 (64-bit)
version_added: "2.4"
checksum:
description: MD5 checksum of the ISO.
returned: success
type: string
sample: 0b31bccccb048d20b551f70830bb7ad0
created:
description: Date of registering.
returned: success
type: string
sample: 2015-03-29T14:57:06+0200
cross_zones:
description: true if the ISO is managed across all zones, false otherwise.
returned: success
type: boolean
sample: false
version_added: "2.4"
domain:
description: Domain the ISO is related to.
returned: success
type: string
sample: example domain
account:
description: Account the ISO is related to.
returned: success
type: string
sample: example account
project:
description: Project the ISO is related to.
returned: success
type: string
sample: example project
tags:
description: List of resource tags associated with the ISO.
returned: success
type: dict
sample: '[ { "key": "foo", "value": "bar" } ]'
version_added: "2.4"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together
)
class AnsibleCloudStackIso(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackIso, self).__init__(module)
self.returns = {
'checksum': 'checksum',
'status': 'status',
'isready': 'is_ready',
'crossZones': 'cross_zones',
'format': 'format',
'ostypename': 'os_type',
'isfeatured': 'is_featured',
'bootable': 'bootable',
'ispublic': 'is_public',
}
self.iso = None
def _get_common_args(self):
return {
'name': self.module.params.get('name'),
'displaytext': self.get_or_fallback('display_text', 'name'),
'isdynamicallyscalable': self.module.params.get('is_dynamically_scalable'),
'ostypeid': self.get_os_type('id'),
'bootable': self.module.params.get('bootable'),
}
def register_iso(self):
args = self._get_common_args()
args.update({
'domainid': self.get_domain('id'),
'account': self.get_account('name'),
'projectid': self.get_project('id'),
'checksum': self.module.params.get('checksum'),
'isfeatured': self.module.params.get('is_featured'),
'ispublic': self.module.params.get('is_public'),
})
if not self.module.params.get('cross_zones'):
args['zoneid'] = self.get_zone(key='id')
else:
args['zoneid'] = -1
if args['bootable'] and not args['ostypeid']:
self.module.fail_json(msg="OS type 'os_type' is required if 'bootable=true'.")
args['url'] = self.module.params.get('url')
if not args['url']:
self.module.fail_json(msg="URL is required.")
self.result['changed'] = True
if not self.module.check_mode:
res = self.query_api('registerIso', **args)
self.iso = res['iso'][0]
return self.iso
def present_iso(self):
iso = self.get_iso()
if not iso:
iso = self.register_iso()
else:
iso = self.update_iso(iso)
if iso:
iso = self.ensure_tags(resource=iso, resource_type='ISO')
self.iso = iso
return iso
def update_iso(self, iso):
args = self._get_common_args()
args.update({
'id': iso['id'],
})
if self.has_changed(args, iso):
self.result['changed'] = True
if not self.module.params.get('cross_zones'):
args['zoneid'] = self.get_zone(key='id')
else:
# Workaround API does not return cross_zones=true
self.result['cross_zones'] = True
args['zoneid'] = -1
if not self.module.check_mode:
res = self.query_api('updateIso', **args)
self.iso = res['iso']
return self.iso
def get_iso(self):
if not self.iso:
args = {
'isready': self.module.params.get('is_ready'),
'isofilter': self.module.params.get('iso_filter'),
'domainid': self.get_domain('id'),
'account': self.get_account('name'),
'projectid': self.get_project('id'),
}
if not self.module.params.get('cross_zones'):
args['zoneid'] = self.get_zone(key='id')
# if checksum is set, we only look on that.
checksum = self.module.params.get('checksum')
if not checksum:
args['name'] = self.module.params.get('name')
isos = self.query_api('listIsos', **args)
if isos:
if not checksum:
self.iso = isos['iso'][0]
else:
for i in isos['iso']:
if i['checksum'] == checksum:
self.iso = i
break
return self.iso
def absent_iso(self):
iso = self.get_iso()
if iso:
self.result['changed'] = True
args = {
'id': iso['id'],
'projectid': self.get_project('id'),
}
if not self.module.params.get('cross_zones'):
args['zoneid'] = self.get_zone(key='id')
if not self.module.check_mode:
res = self.query_api('deleteIso', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'iso')
return iso
def get_result(self, iso):
super(AnsibleCloudStackIso, self).get_result(iso)
# Workaround API does not return cross_zones=true
if self.module.params.get('cross_zones'):
self.result['cross_zones'] = True
if 'zone' in self.result:
del self.result['zone']
return self.result
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
display_text=dict(),
url=dict(),
os_type=dict(),
zone=dict(),
cross_zones=dict(type='bool', default=False),
iso_filter=dict(default='self', choices=['featured', 'self', 'selfexecutable', 'sharedexecutable', 'executable', 'community']),
domain=dict(),
account=dict(),
project=dict(),
checksum=dict(),
is_ready=dict(type='bool', default=False),
bootable=dict(type='bool'),
is_featured=dict(type='bool'),
is_dynamically_scalable=dict(type='bool'),
state=dict(choices=['present', 'absent'], default='present'),
poll_async=dict(type='bool', default=True),
tags=dict(type='list', aliases=['tag']),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
mutually_exclusive=(
['zone', 'cross_zones'],
),
supports_check_mode=True
)
acs_iso = AnsibleCloudStackIso(module)
state = module.params.get('state')
if state in ['absent']:
iso = acs_iso.absent_iso()
else:
iso = acs_iso.present_iso()
result = acs_iso.get_result(iso)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.