repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
openstack/watcher | watcher/tests/api/v1/test_utils.py | 2 | 2219 | # Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import wsme
from oslo_config import cfg
from watcher.api.controllers.v1 import utils
from watcher.tests import base
CONF = cfg.CONF
class TestApiUtils(base.TestCase):
def test_validate_limit(self):
limit = utils.validate_limit(10)
self.assertEqual(10, 10)
# max limit
limit = utils.validate_limit(999999999)
self.assertEqual(CONF.api.max_limit, limit)
# negative
self.assertRaises(wsme.exc.ClientSideError, utils.validate_limit, -1)
# zero
self.assertRaises(wsme.exc.ClientSideError, utils.validate_limit, 0)
def test_validate_sort_dir(self):
# if sort_dir is valid, nothing should happen
try:
utils.validate_sort_dir('asc')
except Exception as exc:
self.fail(exc)
# invalid sort_dir parameter
self.assertRaises(wsme.exc.ClientSideError,
utils.validate_sort_dir,
'fake-sort')
def test_validate_search_filters(self):
allowed_fields = ["allowed", "authorized"]
test_filters = {"allowed": 1, "authorized": 2}
try:
utils.validate_search_filters(test_filters, allowed_fields)
except Exception as exc:
self.fail(exc)
def test_validate_search_filters_with_invalid_key(self):
allowed_fields = ["allowed", "authorized"]
test_filters = {"allowed": 1, "unauthorized": 2}
self.assertRaises(
wsme.exc.ClientSideError, utils.validate_search_filters,
test_filters, allowed_fields)
| apache-2.0 |
da1z/intellij-community | python/lib/Lib/sets.py | 132 | 19624 | """Classes to represent arbitrary sets (including sets of sets).
This module implements sets using dictionaries whose values are
ignored. The usual operations (union, intersection, deletion, etc.)
are provided as both methods and operators.
Important: sets are not sequences! While they support 'x in s',
'len(s)', and 'for x in s', none of those operations are unique for
sequences; for example, mappings support all three as well. The
characteristic operation for sequences is subscripting with small
integers: s[i], for i in range(len(s)). Sets don't support
subscripting at all. Also, sequences allow multiple occurrences and
their elements have a definite order; sets on the other hand don't
record multiple occurrences and don't remember the order of element
insertion (which is why they don't support s[i]).
The following classes are provided:
BaseSet -- All the operations common to both mutable and immutable
sets. This is an abstract class, not meant to be directly
instantiated.
Set -- Mutable sets, subclass of BaseSet; not hashable.
ImmutableSet -- Immutable sets, subclass of BaseSet; hashable.
An iterable argument is mandatory to create an ImmutableSet.
_TemporarilyImmutableSet -- A wrapper around a Set, hashable,
giving the same hash value as the immutable set equivalent
would have. Do not use this class directly.
Only hashable objects can be added to a Set. In particular, you cannot
really add a Set as an element to another Set; if you try, what is
actually added is an ImmutableSet built from it (it compares equal to
the one you tried adding).
When you ask if `x in y' where x is a Set and y is a Set or
ImmutableSet, x is wrapped into a _TemporarilyImmutableSet z, and
what's tested is actually `z in y'.
"""
# Code history:
#
# - Greg V. Wilson wrote the first version, using a different approach
# to the mutable/immutable problem, and inheriting from dict.
#
# - Alex Martelli modified Greg's version to implement the current
# Set/ImmutableSet approach, and make the data an attribute.
#
# - Guido van Rossum rewrote much of the code, made some API changes,
# and cleaned up the docstrings.
#
# - Raymond Hettinger added a number of speedups and other
# improvements.
from __future__ import generators
try:
from itertools import ifilter, ifilterfalse
except ImportError:
# Code to make the module run under Py2.2
def ifilter(predicate, iterable):
if predicate is None:
def predicate(x):
return x
for x in iterable:
if predicate(x):
yield x
def ifilterfalse(predicate, iterable):
if predicate is None:
def predicate(x):
return x
for x in iterable:
if not predicate(x):
yield x
try:
True, False
except NameError:
True, False = (0==0, 0!=0)
__all__ = ['BaseSet', 'Set', 'ImmutableSet']
class BaseSet(object):
"""Common base class for mutable and immutable sets."""
__slots__ = ['_data']
# Constructor
def __init__(self):
"""This is an abstract class."""
# Don't call this from a concrete subclass!
if self.__class__ is BaseSet:
raise TypeError, ("BaseSet is an abstract class. "
"Use Set or ImmutableSet.")
# Standard protocols: __len__, __repr__, __str__, __iter__
def __len__(self):
"""Return the number of elements of a set."""
return len(self._data)
def __repr__(self):
"""Return string representation of a set.
This looks like 'Set([<list of elements>])'.
"""
return self._repr()
# __str__ is the same as __repr__
__str__ = __repr__
def _repr(self, sorted=False):
elements = self._data.keys()
if sorted:
elements.sort()
return '%s(%r)' % (self.__class__.__name__, elements)
def __iter__(self):
"""Return an iterator over the elements or a set.
This is the keys iterator for the underlying dict.
"""
return self._data.iterkeys()
# Three-way comparison is not supported. However, because __eq__ is
# tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and
# then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this
# case).
def __cmp__(self, other):
raise TypeError, "can't compare sets using cmp()"
# Equality comparisons using the underlying dicts. Mixed-type comparisons
# are allowed here, where Set == z for non-Set z always returns False,
# and Set != z always True. This allows expressions like "x in y" to
# give the expected result when y is a sequence of mixed types, not
# raising a pointless TypeError just because y contains a Set, or x is
# a Set and y contain's a non-set ("in" invokes only __eq__).
# Subtle: it would be nicer if __eq__ and __ne__ could return
# NotImplemented instead of True or False. Then the other comparand
# would get a chance to determine the result, and if the other comparand
# also returned NotImplemented then it would fall back to object address
# comparison (which would always return False for __eq__ and always
# True for __ne__). However, that doesn't work, because this type
# *also* implements __cmp__: if, e.g., __eq__ returns NotImplemented,
# Python tries __cmp__ next, and the __cmp__ here then raises TypeError.
def __eq__(self, other):
if isinstance(other, BaseSet):
return self._data == other._data
else:
return False
def __ne__(self, other):
if isinstance(other, BaseSet):
return self._data != other._data
else:
return True
# Copying operations
def copy(self):
"""Return a shallow copy of a set."""
result = self.__class__()
result._data.update(self._data)
return result
__copy__ = copy # For the copy module
def __deepcopy__(self, memo):
"""Return a deep copy of a set; used by copy module."""
# This pre-creates the result and inserts it in the memo
# early, in case the deep copy recurses into another reference
# to this same set. A set can't be an element of itself, but
# it can certainly contain an object that has a reference to
# itself.
from copy import deepcopy
result = self.__class__()
memo[id(self)] = result
data = result._data
value = True
for elt in self:
data[deepcopy(elt, memo)] = value
return result
# Standard set operations: union, intersection, both differences.
# Each has an operator version (e.g. __or__, invoked with |) and a
# method version (e.g. union).
# Subtle: Each pair requires distinct code so that the outcome is
# correct when the type of other isn't suitable. For example, if
# we did "union = __or__" instead, then Set().union(3) would return
# NotImplemented instead of raising TypeError (albeit that *why* it
# raises TypeError as-is is also a bit subtle).
def __or__(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.union(other)
def union(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
result = self.__class__(self)
result._update(other)
return result
def __and__(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.intersection(other)
def intersection(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
other = Set(other)
if len(self) <= len(other):
little, big = self, other
else:
little, big = other, self
common = ifilter(big._data.has_key, little)
return self.__class__(common)
def __xor__(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.symmetric_difference(other)
def symmetric_difference(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
result = self.__class__()
data = result._data
value = True
selfdata = self._data
try:
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
for elt in ifilterfalse(otherdata.has_key, selfdata):
data[elt] = value
for elt in ifilterfalse(selfdata.has_key, otherdata):
data[elt] = value
return result
def __sub__(self, other):
"""Return the difference of two sets as a new Set.
(I.e. all elements that are in this set and not in the other.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.difference(other)
def difference(self, other):
"""Return the difference of two sets as a new Set.
(I.e. all elements that are in this set and not in the other.)
"""
result = self.__class__()
data = result._data
try:
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
value = True
for elt in ifilterfalse(otherdata.has_key, self):
data[elt] = value
return result
# Membership test
def __contains__(self, element):
"""Report whether an element is a member of a set.
(Called in response to the expression `element in self'.)
"""
try:
return element in self._data
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
return transform() in self._data
# Subset and superset test
def issubset(self, other):
"""Report whether another set contains this set."""
self._binary_sanity_check(other)
if len(self) > len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(other._data.has_key, self):
return False
return True
def issuperset(self, other):
"""Report whether this set contains another set."""
self._binary_sanity_check(other)
if len(self) < len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(self._data.has_key, other):
return False
return True
# Inequality comparisons using the is-subset relation.
__le__ = issubset
__ge__ = issuperset
def __lt__(self, other):
self._binary_sanity_check(other)
return len(self) < len(other) and self.issubset(other)
def __gt__(self, other):
self._binary_sanity_check(other)
return len(self) > len(other) and self.issuperset(other)
# Assorted helpers
def _binary_sanity_check(self, other):
# Check that the other argument to a binary operation is also
# a set, raising a TypeError otherwise.
if not isinstance(other, BaseSet):
raise TypeError, "Binary operation only permitted between sets"
def _compute_hash(self):
# Calculate hash code for a set by xor'ing the hash codes of
# the elements. This ensures that the hash code does not depend
# on the order in which elements are added to the set. This is
# not called __hash__ because a BaseSet should not be hashable;
# only an ImmutableSet is hashable.
result = 0
for elt in self:
result ^= hash(elt)
return result
def _update(self, iterable):
# The main loop for update() and the subclass __init__() methods.
data = self._data
# Use the fast update() method when a dictionary is available.
if isinstance(iterable, BaseSet):
data.update(iterable._data)
return
value = True
if type(iterable) in (list, tuple, xrange):
# Optimized: we know that __iter__() and next() can't
# raise TypeError, so we can move 'try:' out of the loop.
it = iter(iterable)
while True:
try:
for element in it:
data[element] = value
return
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
else:
# Safe: only catch TypeError where intended
for element in iterable:
try:
data[element] = value
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
class ImmutableSet(BaseSet):
"""Immutable set class."""
__slots__ = ['_hashcode']
# BaseSet + hashing
def __init__(self, iterable=None):
"""Construct an immutable set from an optional iterable."""
self._hashcode = None
self._data = {}
if iterable is not None:
self._update(iterable)
def __hash__(self):
if self._hashcode is None:
self._hashcode = self._compute_hash()
return self._hashcode
def __getstate__(self):
return self._data, self._hashcode
def __setstate__(self, state):
self._data, self._hashcode = state
class Set(BaseSet):
""" Mutable set class."""
__slots__ = []
# BaseSet + operations requiring mutability; no hashing
def __init__(self, iterable=None):
"""Construct a set from an optional iterable."""
self._data = {}
if iterable is not None:
self._update(iterable)
def __getstate__(self):
# getstate's results are ignored if it is not
return self._data,
def __setstate__(self, data):
self._data, = data
def __hash__(self):
"""A Set cannot be hashed."""
# We inherit object.__hash__, so we must deny this explicitly
raise TypeError, "Can't hash a Set, only an ImmutableSet."
# In-place union, intersection, differences.
# Subtle: The xyz_update() functions deliberately return None,
# as do all mutating operations on built-in container types.
# The __xyz__ spellings have to return self, though.
def __ior__(self, other):
"""Update a set with the union of itself and another."""
self._binary_sanity_check(other)
self._data.update(other._data)
return self
def union_update(self, other):
"""Update a set with the union of itself and another."""
self._update(other)
def __iand__(self, other):
"""Update a set with the intersection of itself and another."""
self._binary_sanity_check(other)
self._data = (self & other)._data
return self
def intersection_update(self, other):
"""Update a set with the intersection of itself and another."""
if isinstance(other, BaseSet):
self &= other
else:
self._data = (self.intersection(other))._data
def __ixor__(self, other):
"""Update a set with the symmetric difference of itself and another."""
self._binary_sanity_check(other)
self.symmetric_difference_update(other)
return self
def symmetric_difference_update(self, other):
"""Update a set with the symmetric difference of itself and another."""
data = self._data
value = True
if not isinstance(other, BaseSet):
other = Set(other)
if self is other:
self.clear()
for elt in other:
if elt in data:
del data[elt]
else:
data[elt] = value
def __isub__(self, other):
"""Remove all elements of another set from this set."""
self._binary_sanity_check(other)
self.difference_update(other)
return self
def difference_update(self, other):
"""Remove all elements of another set from this set."""
data = self._data
if not isinstance(other, BaseSet):
other = Set(other)
if self is other:
self.clear()
for elt in ifilter(data.has_key, other):
del data[elt]
# Python dict-like mass mutations: update, clear
def update(self, iterable):
"""Add all values from an iterable (such as a list or file)."""
self._update(iterable)
def clear(self):
"""Remove all elements from this set."""
self._data.clear()
# Single-element mutations: add, remove, discard
def add(self, element):
"""Add an element to a set.
This has no effect if the element is already present.
"""
try:
self._data[element] = True
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
self._data[transform()] = True
def remove(self, element):
"""Remove an element from a set; it must be a member.
If the element is not a member, raise a KeyError.
"""
try:
del self._data[element]
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
del self._data[transform()]
def discard(self, element):
"""Remove an element from a set if it is a member.
If the element is not a member, do nothing.
"""
try:
self.remove(element)
except KeyError:
pass
def pop(self):
"""Remove and return an arbitrary set element."""
return self._data.popitem()[0]
def __as_immutable__(self):
# Return a copy of self as an immutable set
return ImmutableSet(self)
def __as_temporarily_immutable__(self):
# Return self wrapped in a temporarily immutable set
return _TemporarilyImmutableSet(self)
class _TemporarilyImmutableSet(BaseSet):
# Wrap a mutable set as if it was temporarily immutable.
# This only supplies hashing and equality comparisons.
def __init__(self, set):
self._set = set
self._data = set._data # Needed by ImmutableSet.__eq__()
def __hash__(self):
return self._set._compute_hash()
| apache-2.0 |
robobrobro/ballin-octo-shame | lib/Python-3.4.3/Lib/encodings/cp1140.py | 272 | 13105 | """ Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1140',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x9c' # 0x04 -> CONTROL
'\t' # 0x05 -> HORIZONTAL TABULATION
'\x86' # 0x06 -> CONTROL
'\x7f' # 0x07 -> DELETE
'\x97' # 0x08 -> CONTROL
'\x8d' # 0x09 -> CONTROL
'\x8e' # 0x0A -> CONTROL
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x9d' # 0x14 -> CONTROL
'\x85' # 0x15 -> CONTROL
'\x08' # 0x16 -> BACKSPACE
'\x87' # 0x17 -> CONTROL
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x92' # 0x1A -> CONTROL
'\x8f' # 0x1B -> CONTROL
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
'\x80' # 0x20 -> CONTROL
'\x81' # 0x21 -> CONTROL
'\x82' # 0x22 -> CONTROL
'\x83' # 0x23 -> CONTROL
'\x84' # 0x24 -> CONTROL
'\n' # 0x25 -> LINE FEED
'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
'\x1b' # 0x27 -> ESCAPE
'\x88' # 0x28 -> CONTROL
'\x89' # 0x29 -> CONTROL
'\x8a' # 0x2A -> CONTROL
'\x8b' # 0x2B -> CONTROL
'\x8c' # 0x2C -> CONTROL
'\x05' # 0x2D -> ENQUIRY
'\x06' # 0x2E -> ACKNOWLEDGE
'\x07' # 0x2F -> BELL
'\x90' # 0x30 -> CONTROL
'\x91' # 0x31 -> CONTROL
'\x16' # 0x32 -> SYNCHRONOUS IDLE
'\x93' # 0x33 -> CONTROL
'\x94' # 0x34 -> CONTROL
'\x95' # 0x35 -> CONTROL
'\x96' # 0x36 -> CONTROL
'\x04' # 0x37 -> END OF TRANSMISSION
'\x98' # 0x38 -> CONTROL
'\x99' # 0x39 -> CONTROL
'\x9a' # 0x3A -> CONTROL
'\x9b' # 0x3B -> CONTROL
'\x14' # 0x3C -> DEVICE CONTROL FOUR
'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
'\x9e' # 0x3E -> CONTROL
'\x1a' # 0x3F -> SUBSTITUTE
' ' # 0x40 -> SPACE
'\xa0' # 0x41 -> NO-BREAK SPACE
'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
'\xa2' # 0x4A -> CENT SIGN
'.' # 0x4B -> FULL STOP
'<' # 0x4C -> LESS-THAN SIGN
'(' # 0x4D -> LEFT PARENTHESIS
'+' # 0x4E -> PLUS SIGN
'|' # 0x4F -> VERTICAL LINE
'&' # 0x50 -> AMPERSAND
'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
'!' # 0x5A -> EXCLAMATION MARK
'$' # 0x5B -> DOLLAR SIGN
'*' # 0x5C -> ASTERISK
')' # 0x5D -> RIGHT PARENTHESIS
';' # 0x5E -> SEMICOLON
'\xac' # 0x5F -> NOT SIGN
'-' # 0x60 -> HYPHEN-MINUS
'/' # 0x61 -> SOLIDUS
'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
'\xa6' # 0x6A -> BROKEN BAR
',' # 0x6B -> COMMA
'%' # 0x6C -> PERCENT SIGN
'_' # 0x6D -> LOW LINE
'>' # 0x6E -> GREATER-THAN SIGN
'?' # 0x6F -> QUESTION MARK
'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
'`' # 0x79 -> GRAVE ACCENT
':' # 0x7A -> COLON
'#' # 0x7B -> NUMBER SIGN
'@' # 0x7C -> COMMERCIAL AT
"'" # 0x7D -> APOSTROPHE
'=' # 0x7E -> EQUALS SIGN
'"' # 0x7F -> QUOTATION MARK
'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
'a' # 0x81 -> LATIN SMALL LETTER A
'b' # 0x82 -> LATIN SMALL LETTER B
'c' # 0x83 -> LATIN SMALL LETTER C
'd' # 0x84 -> LATIN SMALL LETTER D
'e' # 0x85 -> LATIN SMALL LETTER E
'f' # 0x86 -> LATIN SMALL LETTER F
'g' # 0x87 -> LATIN SMALL LETTER G
'h' # 0x88 -> LATIN SMALL LETTER H
'i' # 0x89 -> LATIN SMALL LETTER I
'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
'\xb1' # 0x8F -> PLUS-MINUS SIGN
'\xb0' # 0x90 -> DEGREE SIGN
'j' # 0x91 -> LATIN SMALL LETTER J
'k' # 0x92 -> LATIN SMALL LETTER K
'l' # 0x93 -> LATIN SMALL LETTER L
'm' # 0x94 -> LATIN SMALL LETTER M
'n' # 0x95 -> LATIN SMALL LETTER N
'o' # 0x96 -> LATIN SMALL LETTER O
'p' # 0x97 -> LATIN SMALL LETTER P
'q' # 0x98 -> LATIN SMALL LETTER Q
'r' # 0x99 -> LATIN SMALL LETTER R
'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
'\xb8' # 0x9D -> CEDILLA
'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
'\u20ac' # 0x9F -> EURO SIGN
'\xb5' # 0xA0 -> MICRO SIGN
'~' # 0xA1 -> TILDE
's' # 0xA2 -> LATIN SMALL LETTER S
't' # 0xA3 -> LATIN SMALL LETTER T
'u' # 0xA4 -> LATIN SMALL LETTER U
'v' # 0xA5 -> LATIN SMALL LETTER V
'w' # 0xA6 -> LATIN SMALL LETTER W
'x' # 0xA7 -> LATIN SMALL LETTER X
'y' # 0xA8 -> LATIN SMALL LETTER Y
'z' # 0xA9 -> LATIN SMALL LETTER Z
'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
'\xbf' # 0xAB -> INVERTED QUESTION MARK
'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
'\xae' # 0xAF -> REGISTERED SIGN
'^' # 0xB0 -> CIRCUMFLEX ACCENT
'\xa3' # 0xB1 -> POUND SIGN
'\xa5' # 0xB2 -> YEN SIGN
'\xb7' # 0xB3 -> MIDDLE DOT
'\xa9' # 0xB4 -> COPYRIGHT SIGN
'\xa7' # 0xB5 -> SECTION SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
'[' # 0xBA -> LEFT SQUARE BRACKET
']' # 0xBB -> RIGHT SQUARE BRACKET
'\xaf' # 0xBC -> MACRON
'\xa8' # 0xBD -> DIAERESIS
'\xb4' # 0xBE -> ACUTE ACCENT
'\xd7' # 0xBF -> MULTIPLICATION SIGN
'{' # 0xC0 -> LEFT CURLY BRACKET
'A' # 0xC1 -> LATIN CAPITAL LETTER A
'B' # 0xC2 -> LATIN CAPITAL LETTER B
'C' # 0xC3 -> LATIN CAPITAL LETTER C
'D' # 0xC4 -> LATIN CAPITAL LETTER D
'E' # 0xC5 -> LATIN CAPITAL LETTER E
'F' # 0xC6 -> LATIN CAPITAL LETTER F
'G' # 0xC7 -> LATIN CAPITAL LETTER G
'H' # 0xC8 -> LATIN CAPITAL LETTER H
'I' # 0xC9 -> LATIN CAPITAL LETTER I
'\xad' # 0xCA -> SOFT HYPHEN
'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
'}' # 0xD0 -> RIGHT CURLY BRACKET
'J' # 0xD1 -> LATIN CAPITAL LETTER J
'K' # 0xD2 -> LATIN CAPITAL LETTER K
'L' # 0xD3 -> LATIN CAPITAL LETTER L
'M' # 0xD4 -> LATIN CAPITAL LETTER M
'N' # 0xD5 -> LATIN CAPITAL LETTER N
'O' # 0xD6 -> LATIN CAPITAL LETTER O
'P' # 0xD7 -> LATIN CAPITAL LETTER P
'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
'R' # 0xD9 -> LATIN CAPITAL LETTER R
'\xb9' # 0xDA -> SUPERSCRIPT ONE
'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
'\\' # 0xE0 -> REVERSE SOLIDUS
'\xf7' # 0xE1 -> DIVISION SIGN
'S' # 0xE2 -> LATIN CAPITAL LETTER S
'T' # 0xE3 -> LATIN CAPITAL LETTER T
'U' # 0xE4 -> LATIN CAPITAL LETTER U
'V' # 0xE5 -> LATIN CAPITAL LETTER V
'W' # 0xE6 -> LATIN CAPITAL LETTER W
'X' # 0xE7 -> LATIN CAPITAL LETTER X
'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
'\xb2' # 0xEA -> SUPERSCRIPT TWO
'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
'0' # 0xF0 -> DIGIT ZERO
'1' # 0xF1 -> DIGIT ONE
'2' # 0xF2 -> DIGIT TWO
'3' # 0xF3 -> DIGIT THREE
'4' # 0xF4 -> DIGIT FOUR
'5' # 0xF5 -> DIGIT FIVE
'6' # 0xF6 -> DIGIT SIX
'7' # 0xF7 -> DIGIT SEVEN
'8' # 0xF8 -> DIGIT EIGHT
'9' # 0xF9 -> DIGIT NINE
'\xb3' # 0xFA -> SUPERSCRIPT THREE
'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
AleksNeStu/ggrc-core | src/ggrc/models/event.py | 5 | 1025 | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from ggrc import db
from ggrc.models.mixins import Base
class Event(Base, db.Model):
__tablename__ = 'events'
action = db.Column(
db.Enum(u'POST', u'PUT', u'DELETE', u'BULK', u'GET'),
nullable=False,
)
resource_id = db.Column(db.Integer)
resource_type = db.Column(db.String)
revisions = db.relationship(
'Revision',
backref='event',
cascade='all, delete-orphan',
)
_publish_attrs = [
'action',
'resource_id',
'resource_type',
'revisions',
]
_include_links = [
'revisions',
]
@staticmethod
def _extra_table_args(class_):
return (
db.Index('events_modified_by', 'modified_by_id'),
)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Event, cls).eager_query()
return query.options(
orm.subqueryload('revisions').undefer_group('Revision_complete'),
)
| apache-2.0 |
40223208/2015cdb_g4_0420 | static/Brython3.1.1-20150328-091302/Lib/unittest/result.py | 727 | 6397 | """Test result object"""
import io
import sys
import traceback
from . import util
from functools import wraps
__unittest = True
def failfast(method):
@wraps(method)
def inner(self, *args, **kw):
if getattr(self, 'failfast', False):
self.stop()
return method(self, *args, **kw)
return inner
STDOUT_LINE = '\nStdout:\n%s'
STDERR_LINE = '\nStderr:\n%s'
class TestResult(object):
"""Holder for test result information.
Test results are automatically managed by the TestCase and TestSuite
classes, and do not need to be explicitly manipulated by writers of tests.
Each instance holds the total number of tests run, and collections of
failures and errors that occurred among those test runs. The collections
contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
formatted traceback of the error that occurred.
"""
_previousTestClass = None
_testRunEntered = False
_moduleSetUpFailed = False
def __init__(self, stream=None, descriptions=None, verbosity=None):
self.failfast = False
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
self.buffer = False
self._stdout_buffer = None
self._stderr_buffer = None
self._original_stdout = sys.stdout
self._original_stderr = sys.stderr
self._mirrorOutput = False
def printErrors(self):
"Called by TestRunner after test run"
#fixme brython
pass
def startTest(self, test):
"Called when the given test is about to be run"
self.testsRun += 1
self._mirrorOutput = False
self._setupStdout()
def _setupStdout(self):
if self.buffer:
if self._stderr_buffer is None:
self._stderr_buffer = io.StringIO()
self._stdout_buffer = io.StringIO()
sys.stdout = self._stdout_buffer
sys.stderr = self._stderr_buffer
def startTestRun(self):
"""Called once before any tests are executed.
See startTest for a method called before each test.
"""
def stopTest(self, test):
"""Called when the given test has been run"""
self._restoreStdout()
self._mirrorOutput = False
def _restoreStdout(self):
if self.buffer:
if self._mirrorOutput:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
self._original_stdout.write(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
self._original_stderr.write(STDERR_LINE % error)
sys.stdout = self._original_stdout
sys.stderr = self._original_stderr
self._stdout_buffer.seek(0)
self._stdout_buffer.truncate()
self._stderr_buffer.seek(0)
self._stderr_buffer.truncate()
def stopTestRun(self):
"""Called once after all tests are executed.
See stopTest for a method called after each test.
"""
@failfast
def addError(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
"""
self.errors.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
@failfast
def addFailure(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info()."""
self.failures.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
def addSkip(self, test, reason):
"""Called when a test is skipped."""
self.skipped.append((test, reason))
def addExpectedFailure(self, test, err):
"""Called when an expected failure/error occured."""
self.expectedFailures.append(
(test, self._exc_info_to_string(err, test)))
@failfast
def addUnexpectedSuccess(self, test):
"""Called when a test was expected to fail, but succeed."""
self.unexpectedSuccesses.append(test)
def wasSuccessful(self):
"Tells whether or not this result was a success"
return len(self.failures) == len(self.errors) == 0
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = True
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msgLines = traceback.format_exception(exctype, value, tb, length)
else:
msgLines = traceback.format_exception(exctype, value, tb)
if self.buffer:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
msgLines.append(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
msgLines.append(STDERR_LINE % error)
return ''.join(msgLines)
def _is_relevant_tb_level(self, tb):
#fix me brython
#return '__unittest' in tb.tb_frame.f_globals
return True #for now, lets just return False
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def __repr__(self):
return ("<%s run=%i errors=%i failures=%i>" %
(util.strclass(self.__class__), self.testsRun, len(self.errors),
len(self.failures)))
| gpl-3.0 |
iivic/BoiseStateX | common/djangoapps/student/tests/test_roles.py | 147 | 7798 | """
Tests of student.roles
"""
import ddt
from django.test import TestCase
from courseware.tests.factories import UserFactory, StaffFactory, InstructorFactory
from student.tests.factories import AnonymousUserFactory
from student.roles import (
GlobalStaff, CourseRole, CourseStaffRole, CourseInstructorRole,
OrgStaffRole, OrgInstructorRole, RoleCache, CourseBetaTesterRole
)
from opaque_keys.edx.locations import SlashSeparatedCourseKey
class RolesTestCase(TestCase):
"""
Tests of student.roles
"""
def setUp(self):
super(RolesTestCase, self).setUp()
self.course_key = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
self.course_loc = self.course_key.make_usage_key('course', '2012_Fall')
self.anonymous_user = AnonymousUserFactory()
self.student = UserFactory()
self.global_staff = UserFactory(is_staff=True)
self.course_staff = StaffFactory(course_key=self.course_key)
self.course_instructor = InstructorFactory(course_key=self.course_key)
def test_global_staff(self):
self.assertFalse(GlobalStaff().has_user(self.student))
self.assertFalse(GlobalStaff().has_user(self.course_staff))
self.assertFalse(GlobalStaff().has_user(self.course_instructor))
self.assertTrue(GlobalStaff().has_user(self.global_staff))
def test_group_name_case_sensitive(self):
uppercase_course_id = "ORG/COURSE/NAME"
lowercase_course_id = uppercase_course_id.lower()
uppercase_course_key = SlashSeparatedCourseKey.from_deprecated_string(uppercase_course_id)
lowercase_course_key = SlashSeparatedCourseKey.from_deprecated_string(lowercase_course_id)
role = "role"
lowercase_user = UserFactory()
CourseRole(role, lowercase_course_key).add_users(lowercase_user)
uppercase_user = UserFactory()
CourseRole(role, uppercase_course_key).add_users(uppercase_user)
self.assertTrue(CourseRole(role, lowercase_course_key).has_user(lowercase_user))
self.assertFalse(CourseRole(role, uppercase_course_key).has_user(lowercase_user))
self.assertFalse(CourseRole(role, lowercase_course_key).has_user(uppercase_user))
self.assertTrue(CourseRole(role, uppercase_course_key).has_user(uppercase_user))
def test_course_role(self):
"""
Test that giving a user a course role enables access appropriately
"""
self.assertFalse(
CourseStaffRole(self.course_key).has_user(self.student),
"Student has premature access to {}".format(self.course_key)
)
CourseStaffRole(self.course_key).add_users(self.student)
self.assertTrue(
CourseStaffRole(self.course_key).has_user(self.student),
"Student doesn't have access to {}".format(unicode(self.course_key))
)
# remove access and confirm
CourseStaffRole(self.course_key).remove_users(self.student)
self.assertFalse(
CourseStaffRole(self.course_key).has_user(self.student),
"Student still has access to {}".format(self.course_key)
)
def test_org_role(self):
"""
Test that giving a user an org role enables access appropriately
"""
self.assertFalse(
OrgStaffRole(self.course_key.org).has_user(self.student),
"Student has premature access to {}".format(self.course_key.org)
)
OrgStaffRole(self.course_key.org).add_users(self.student)
self.assertTrue(
OrgStaffRole(self.course_key.org).has_user(self.student),
"Student doesn't have access to {}".format(unicode(self.course_key.org))
)
# remove access and confirm
OrgStaffRole(self.course_key.org).remove_users(self.student)
if hasattr(self.student, '_roles'):
del self.student._roles
self.assertFalse(
OrgStaffRole(self.course_key.org).has_user(self.student),
"Student still has access to {}".format(self.course_key.org)
)
def test_org_and_course_roles(self):
"""
Test that Org and course roles don't interfere with course roles or vice versa
"""
OrgInstructorRole(self.course_key.org).add_users(self.student)
CourseInstructorRole(self.course_key).add_users(self.student)
self.assertTrue(
OrgInstructorRole(self.course_key.org).has_user(self.student),
"Student doesn't have access to {}".format(unicode(self.course_key.org))
)
self.assertTrue(
CourseInstructorRole(self.course_key).has_user(self.student),
"Student doesn't have access to {}".format(unicode(self.course_key))
)
# remove access and confirm
OrgInstructorRole(self.course_key.org).remove_users(self.student)
self.assertFalse(
OrgInstructorRole(self.course_key.org).has_user(self.student),
"Student still has access to {}".format(self.course_key.org)
)
self.assertTrue(
CourseInstructorRole(self.course_key).has_user(self.student),
"Student doesn't have access to {}".format(unicode(self.course_key))
)
# ok now keep org role and get rid of course one
OrgInstructorRole(self.course_key.org).add_users(self.student)
CourseInstructorRole(self.course_key).remove_users(self.student)
self.assertTrue(
OrgInstructorRole(self.course_key.org).has_user(self.student),
"Student lost has access to {}".format(self.course_key.org)
)
self.assertFalse(
CourseInstructorRole(self.course_key).has_user(self.student),
"Student doesn't have access to {}".format(unicode(self.course_key))
)
def test_get_user_for_role(self):
"""
test users_for_role
"""
role = CourseStaffRole(self.course_key)
role.add_users(self.student)
self.assertGreater(len(role.users_with_role()), 0)
def test_add_users_doesnt_add_duplicate_entry(self):
"""
Tests that calling add_users multiple times before a single call
to remove_users does not result in the user remaining in the group.
"""
role = CourseStaffRole(self.course_key)
role.add_users(self.student)
self.assertTrue(role.has_user(self.student))
# Call add_users a second time, then remove just once.
role.add_users(self.student)
role.remove_users(self.student)
self.assertFalse(role.has_user(self.student))
@ddt.ddt
class RoleCacheTestCase(TestCase):
IN_KEY = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
NOT_IN_KEY = SlashSeparatedCourseKey('edX', 'toy', '2013_Fall')
ROLES = (
(CourseStaffRole(IN_KEY), ('staff', IN_KEY, 'edX')),
(CourseInstructorRole(IN_KEY), ('instructor', IN_KEY, 'edX')),
(OrgStaffRole(IN_KEY.org), ('staff', None, 'edX')),
(OrgInstructorRole(IN_KEY.org), ('instructor', None, 'edX')),
(CourseBetaTesterRole(IN_KEY), ('beta_testers', IN_KEY, 'edX')),
)
def setUp(self):
super(RoleCacheTestCase, self).setUp()
self.user = UserFactory()
@ddt.data(*ROLES)
@ddt.unpack
def test_only_in_role(self, role, target):
role.add_users(self.user)
cache = RoleCache(self.user)
self.assertTrue(cache.has_role(*target))
for other_role, other_target in self.ROLES:
if other_role == role:
continue
self.assertFalse(cache.has_role(*other_target))
@ddt.data(*ROLES)
@ddt.unpack
def test_empty_cache(self, role, target):
cache = RoleCache(self.user)
self.assertFalse(cache.has_role(*target))
| agpl-3.0 |
matteopantano/youbot-thesis | states/move_base_state.py | 1 | 2886 | #!/usr/bin/env python
from flexbe_core import EventState, Logger
from flexbe_core.proxy import ProxyActionClient
from actionlib_msgs.msg import GoalStatus
from move_base_msgs.msg import *
from geometry_msgs.msg import Pose, Point, Quaternion, Pose2D
from tf import transformations
"""
Created on 11/19/2015
@author: Spyros Maniatopoulos
"""
class MoveBaseState(EventState):
"""
Navigates a robot to a desired position and orientation using move_base.
># waypoint Pose2D Target waypoint for navigation.
<= arrived Navigation to target pose succeeded.
<= failed Navigation to target pose failed.
"""
def __init__(self):
"""Constructor"""
super(MoveBaseState, self).__init__(outcomes = ['arrived', 'failed'],
input_keys = ['waypoint'])
self._action_topic = "/move_base"
self._client = ProxyActionClient({self._action_topic: MoveBaseAction})
self._arrived = False
self._failed = False
def execute(self, userdata):
"""Wait for action result and return outcome accordingly"""
if self._arrived:
return 'arrived'
if self._failed:
return 'failed'
if self._client.has_result(self._action_topic):
status = self._client.get_state(self._action_topic)
if status == GoalStatus.SUCCEEDED:
self._arrived = True
return 'arrived'
elif status in [GoalStatus.PREEMPTED, GoalStatus.REJECTED,
GoalStatus.RECALLED, GoalStatus.ABORTED]:
Logger.logwarn('Navigation failed: %s' % str(status))
self._failed = True
return 'failed'
def on_enter(self, userdata):
"""Create and send action goal"""
self._arrived = False
self._failed = False
# Create and populate action goal
goal = MoveBaseGoal()
pt = Point(x = userdata.waypoint.x, y = userdata.waypoint.y)
qt = transformations.quaternion_from_euler(0, 0, userdata.waypoint.theta)
goal.target_pose.pose = Pose(position = pt,
orientation = Quaternion(*qt))
goal.target_pose.header.frame_id = "map"
# goal.target_pose.header.stamp.secs = 5.0
# Send the action goal for execution
try:
self._client.send_goal(self._action_topic, goal)
except Exception as e:
Logger.logwarn("Unable to send navigation action goal:\n%s" % str(e))
self._failed = True
def on_exit(self, userdata):
Logger.loginfo('Exiting Move Base State')
if not self._client.has_result(self._action_topic):
self._client.cancel(self._action_topic)
Logger.loginfo('Cancelled active action goal.')
| mit |
ksrajkumar/openerp-6.1 | openerp/addons/base_synchro/wizard/base_synchro.py | 8 | 11046 | ## -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import osv
from datetime import date
import time
import pooler
import xmlrpclib
import re
import tools
import threading
from osv import osv, fields
class RPCProxyOne(object):
def __init__(self, server, ressource):
self.server = server
local_url = 'http://%s:%d/xmlrpc/common'%(server.server_url,server.server_port)
rpc = xmlrpclib.ServerProxy(local_url)
self.uid = rpc.login(server.server_db, server.login, server.password)
local_url = 'http://%s:%d/xmlrpc/object'%(server.server_url,server.server_port)
self.rpc = xmlrpclib.ServerProxy(local_url)
self.ressource = ressource
def __getattr__(self, name):
return lambda cr, uid, *args, **kwargs: self.rpc.execute(self.server.server_db, self.uid, self.server.password, self.ressource, name, *args)
class RPCProxy(object):
def __init__(self, server):
self.server = server
def get(self, ressource):
return RPCProxyOne(self.server, ressource)
class base_synchro(osv.osv_memory):
"""Base Synchronization """
_name = 'base.synchro'
_columns = {
'server_url': fields.many2one('base.synchro.server', "Server URL", required=True),
'user_id': fields.many2one('res.users', "Send Result To",),
}
_defaults = {
'user_id': lambda self,cr,uid,context: uid,
}
start_date = time.strftime('%Y-%m-%d, %Hh %Mm %Ss')
report = []
report_total = 0
report_create = 0
report_write = 0
def synchronize(self, cr, uid, server, object, context=None):
pool = pooler.get_pool(cr.dbname)
self.meta = {}
ids = []
pool1 = RPCProxy(server)
pool2 = pool
#try:
if object.action in ('d','b'):
ids = pool1.get('base.synchro.obj').get_ids(cr, uid,
object.model_id.model,
object.synchronize_date,
eval(object.domain),
{'action':'d'}
)
if object.action in ('u','b'):
ids += pool2.get('base.synchro.obj').get_ids(cr, uid,
object.model_id.model,
object.synchronize_date,
eval(object.domain),
{'action':'u'}
)
ids.sort()
iii = 0
for dt, id, action in ids:
print 'Process', dt, id, action
iii +=1
if action=='u':
pool_src = pool2
pool_dest = pool1
else:
pool_src = pool1
pool_dest = pool2
print 'Read', object.model_id.model, id
fields = False
if object.model_id.model=='crm.case.history':
fields = ['email','description','log_id']
value = pool_src.get(object.model_id.model).read(cr, uid, [id], fields)[0]
if 'create_date' in value:
del value['create_date']
for key ,val in value.iteritems():
if type(val)==tuple:
value.update({key:val[0]})
value = self.data_transform(cr, uid, pool_src, pool_dest, object.model_id.model, value, action, context=context)
id2 = self.get_id(cr, uid, object.id, id, action, context)
#
# Transform value
#
#tid=pool_dest.get(object.model_id.model).name_search(cr, uid, value['name'],[],'=',)
if not (iii%50):
print 'Record', iii
# Filter fields to not sync
for field in object.avoid_ids:
if field.name in value:
del value[field.name]
if id2:
#try:
pool_dest.get(object.model_id.model).write(cr, uid, [id2], value)
#except Exception, e:
#self.report.append('ERROR: Unable to update record ['+str(id2)+']:'+str(value.get('name', '?')))
self.report_total+=1
self.report_write+=1
else:
print value
idnew = pool_dest.get(object.model_id.model).create(cr, uid, value)
synid = self.pool.get('base.synchro.obj.line').create(cr, uid, {
'obj_id': object.id,
'local_id': (action=='u') and id or idnew,
'remote_id': (action=='d') and id or idnew
})
self.report_total+=1
self.report_create+=1
self.meta = {}
return True
def get_id(self, cr, uid, object_id, id, action, context=None):
pool = pooler.get_pool(cr.dbname)
line_pool = pool.get('base.synchro.obj.line')
field_src = (action=='u') and 'local_id' or 'remote_id'
field_dest = (action=='d') and 'local_id' or 'remote_id'
rid = line_pool.search(cr, uid, [('obj_id','=',object_id), (field_src,'=',id)], context=context)
result = False
if rid:
result = line_pool.read(cr, uid, rid, [field_dest], context=context)[0][field_dest]
return result
def relation_transform(self, cr, uid, pool_src, pool_dest, object, id, action, context=None):
if not id:
return False
pool = pooler.get_pool(cr.dbname)
cr.execute('''select o.id from base_synchro_obj o left join ir_model m on (o.model_id =m.id) where
m.model=%s and
o.active''', (object,))
obj = cr.fetchone()
result = False
if obj:
#
# If the object is synchronised and found, set it
#
result = self.get_id(cr, uid, obj[0], id, action, context)
else:
#
# If not synchronized, try to find it with name_get/name_search
#
names = pool_src.get(object).name_get(cr, uid, [id])[0][1]
res = pool_dest.get(object).name_search(cr, uid, names, [], 'like')
if res:
result = res[0][0]
else:
# LOG this in the report, better message.
print self.report.append('WARNING: Record "%s" on relation %s not found, set to null.' % (names,object))
return result
#
# IN: object and ID
# OUT: ID of the remote object computed:
# If object is synchronised, read the sync database
# Otherwise, use the name_search method
#
def data_transform(self, cr, uid, pool_src, pool_dest, object, data, action='u', context=None):
self.meta.setdefault(pool_src, {})
if not object in self.meta[pool_src]:
self.meta[pool_src][object] = pool_src.get(object).fields_get(cr, uid)
fields = self.meta[pool_src][object]
for f in fields:
if f not in data:
continue
ftype = fields[f]['type']
if ftype in ('function', 'one2many', 'one2one'):
del data[f]
elif ftype == 'many2one':
if (isinstance(data[f], list)) and data[f]:
fdata = data[f][0]
else:
fdata = data[f]
df = self.relation_transform(cr, uid, pool_src, pool_dest, fields[f]['relation'], fdata, action, context=context)
data[f] = df
if not data[f]:
del data[f]
elif ftype == 'many2many':
res = map(lambda x: self.relation_transform(cr, uid, pool_src, pool_dest, fields[f]['relation'], x, action, context), data[f])
data[f] = [(6, 0, res)]
del data['id']
return data
#
# Find all objects that are created or modified after the synchronize_date
# Synchronize these obejcts
#
def upload_download(self, cr, uid, ids, context=None):
start_date = time.strftime('%Y-%m-%d, %Hh %Mm %Ss')
syn_obj = self.browse(cr, uid, ids, context=context)[0]
pool = pooler.get_pool(cr.dbname)
server = pool.get('base.synchro.server').browse(cr, uid, syn_obj.server_url.id, context=context)
for object in server.obj_ids:
dt = time.strftime('%Y-%m-%d %H:%M:%S')
self.synchronize(cr, uid, server, object, context=context)
if object.action=='b':
time.sleep(1)
dt = time.strftime('%Y-%m-%d %H:%M:%S')
self.pool.get('base.synchro.obj').write(cr, uid, [object.id], {'synchronize_date': dt})
end_date = time.strftime('%Y-%m-%d, %Hh %Mm %Ss')
if syn_obj.user_id:
request = pooler.get_pool(cr.dbname).get('res.request')
if not self.report:
self.report.append('No exception.')
summary = '''Here is the synchronization report:
Synchronization started: %s
Synchronization finnished: %s
Synchronized records: %d
Records updated: %d
Records created: %d
Exceptions:
'''% (start_date,end_date,self.report_total, self.report_write,self.report_create)
summary += '\n'.join(self.report)
request.create(cr, uid, {
'name' : "Synchronization report",
'act_from' : uid,
'act_to' : syn_obj.user_id.id,
'body': summary,
})
return True
def upload_download_multi_thread(self, cr, uid, data, context=None):
threaded_synchronization = threading.Thread(target=self.upload_download, args=(cr, uid, data, context))
threaded_synchronization.run()
data_obj = self.pool.get('ir.model.data')
id2 = data_obj._get_id(cr, uid, 'base_synchro', 'view_base_synchro_finish')
if id2:
id2 = data_obj.browse(cr, uid, id2, context=context).res_id
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'base.synchro',
'views': [(id2, 'form')],
'view_id': False,
'type': 'ir.actions.act_window',
'target': 'new',
}
base_synchro()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
taohungyang/cloud-custodian | tests/test_eks.py | 1 | 2017 | # Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from .common import BaseTest
class EKS(BaseTest):
def test_query_with_subnet_sg_filter(self):
factory = self.replay_flight_data("test_eks_query")
p = self.load_policy(
{
"name": "eks",
"resource": "eks",
"filters": [
{'type': 'subnet',
'key': 'tag:kubernetes.io/cluster/dev',
'value': 'shared'},
{'type': 'security-group',
'key': 'tag:App',
'value': 'eks'}]
},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]['name'], 'dev')
def test_delete_eks(self):
factory = self.replay_flight_data("test_eks_delete")
p = self.load_policy(
{
"name": "eksdelete",
"resource": "eks",
"filters": [{"name": "dev"}],
"actions": ["delete"],
},
session_factory=factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
client = factory().client("eks")
cluster = client.describe_cluster(name='dev').get('cluster')
self.assertEqual(cluster['status'], 'DELETING')
| apache-2.0 |
edensparkles/FIRSTAID | FIRST_AID/venv/Lib/site-packages/pip/_vendor/requests/api.py | 150 | 5795 | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
to add for the file.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send data
before giving up, as a float, or a :ref:`(connect timeout, read
timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
# By using the 'with' statement we are sure the session is closed, thus we
# avoid leaving sockets open which can trigger a ResourceWarning in some
# cases, and look like a memory leak in others.
with sessions.Session() as session:
return session.request(method=method, url=url, **kwargs)
def get(url, params=None, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, params=params, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, json=None, **kwargs):
"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('delete', url, **kwargs)
| mit |
Blitzen/oauthlib | oauthlib/oauth1/rfc5849/request_validator.py | 42 | 30446 | # -*- coding: utf-8 -*-
"""
oauthlib.oauth1.rfc5849
~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for signing and checking OAuth 1.0 RFC 5849 requests.
"""
from __future__ import absolute_import, unicode_literals
from . import SIGNATURE_METHODS, utils
class RequestValidator(object):
"""A validator/datastore interaction base class for OAuth 1 providers.
OAuth providers should inherit from RequestValidator and implement the
methods and properties outlined below. Further details are provided in the
documentation for each method and property.
Methods used to check the format of input parameters. Common tests include
length, character set, membership, range or pattern. These tests are
referred to as `whitelisting or blacklisting`_. Whitelisting is better
but blacklisting can be usefull to spot malicious activity.
The following have methods a default implementation:
- check_client_key
- check_request_token
- check_access_token
- check_nonce
- check_verifier
- check_realms
The methods above default to whitelist input parameters, checking that they
are alphanumerical and between a minimum and maximum length. Rather than
overloading the methods a few properties can be used to configure these
methods.
* @safe_characters -> (character set)
* @client_key_length -> (min, max)
* @request_token_length -> (min, max)
* @access_token_length -> (min, max)
* @nonce_length -> (min, max)
* @verifier_length -> (min, max)
* @realms -> [list, of, realms]
Methods used to validate/invalidate input parameters. These checks usually
hit either persistent or temporary storage such as databases or the
filesystem. See each methods documentation for detailed usage.
The following methods must be implemented:
- validate_client_key
- validate_request_token
- validate_access_token
- validate_timestamp_and_nonce
- validate_redirect_uri
- validate_requested_realms
- validate_realms
- validate_verifier
- invalidate_request_token
Methods used to retrieve sensitive information from storage.
The following methods must be implemented:
- get_client_secret
- get_request_token_secret
- get_access_token_secret
- get_rsa_key
- get_realms
- get_default_realms
- get_redirect_uri
Methods used to save credentials.
The following methods must be implemented:
- save_request_token
- save_verifier
- save_access_token
Methods used to verify input parameters. This methods are used during
authorizing request token by user (AuthorizationEndpoint), to check if
parameters are valid. During token authorization request is not signed,
thus 'validation' methods can not be used. The following methods must be
implemented:
- verify_realms
- verify_request_token
To prevent timing attacks it is necessary to not exit early even if the
client key or resource owner key is invalid. Instead dummy values should
be used during the remaining verification process. It is very important
that the dummy client and token are valid input parameters to the methods
get_client_secret, get_rsa_key and get_(access/request)_token_secret and
that the running time of those methods when given a dummy value remain
equivalent to the running time when given a valid client/resource owner.
The following properties must be implemented:
* @dummy_client
* @dummy_request_token
* @dummy_access_token
Example implementations have been provided, note that the database used is
a simple dictionary and serves only an illustrative purpose. Use whichever
database suits your project and how to access it is entirely up to you.
The methods are introduced in an order which should make understanding
their use more straightforward and as such it could be worth reading what
follows in chronological order.
.. _`whitelisting or blacklisting`: http://www.schneier.com/blog/archives/2011/01/whitelisting_vs.html
"""
def __init__(self):
pass
@property
def allowed_signature_methods(self):
return SIGNATURE_METHODS
@property
def safe_characters(self):
return set(utils.UNICODE_ASCII_CHARACTER_SET)
@property
def client_key_length(self):
return 20, 30
@property
def request_token_length(self):
return 20, 30
@property
def access_token_length(self):
return 20, 30
@property
def timestamp_lifetime(self):
return 600
@property
def nonce_length(self):
return 20, 30
@property
def verifier_length(self):
return 20, 30
@property
def realms(self):
return []
@property
def enforce_ssl(self):
return True
def check_client_key(self, client_key):
"""Check that the client key only contains safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.client_key_length
return (set(client_key) <= self.safe_characters and
lower <= len(client_key) <= upper)
def check_request_token(self, request_token):
"""Checks that the request token contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.request_token_length
return (set(request_token) <= self.safe_characters and
lower <= len(request_token) <= upper)
def check_access_token(self, request_token):
"""Checks that the token contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.access_token_length
return (set(request_token) <= self.safe_characters and
lower <= len(request_token) <= upper)
def check_nonce(self, nonce):
"""Checks that the nonce only contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.nonce_length
return (set(nonce) <= self.safe_characters and
lower <= len(nonce) <= upper)
def check_verifier(self, verifier):
"""Checks that the verifier contains only safe characters
and is no shorter than lower and no longer than upper.
"""
lower, upper = self.verifier_length
return (set(verifier) <= self.safe_characters and
lower <= len(verifier) <= upper)
def check_realms(self, realms):
"""Check that the realm is one of a set allowed realms."""
return all((r in self.realms for r in realms))
@property
def dummy_client(self):
"""Dummy client used when an invalid client key is supplied.
:returns: The dummy client key string.
The dummy client should be associated with either a client secret,
a rsa key or both depending on which signature methods are supported.
Providers should make sure that
get_client_secret(dummy_client)
get_rsa_key(dummy_client)
return a valid secret or key for the dummy client.
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
@property
def dummy_request_token(self):
"""Dummy request token used when an invalid token was supplied.
:returns: The dummy request token string.
The dummy request token should be associated with a request token
secret such that get_request_token_secret(.., dummy_request_token)
returns a valid secret.
This method is used by
* AccessTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
@property
def dummy_access_token(self):
"""Dummy access token used when an invalid token was supplied.
:returns: The dummy access token string.
The dummy access token should be associated with an access token
secret such that get_access_token_secret(.., dummy_access_token)
returns a valid secret.
This method is used by
* ResourceEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_client_secret(self, client_key, request):
"""Retrieves the client secret associated with the client key.
:param client_key: The client/consumer key.
:param request: An oauthlib.common.Request object.
:returns: The client secret as a string.
This method must allow the use of a dummy client_key value.
Fetching the secret using the dummy key must take the same amount of
time as fetching a secret for a valid client::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import ClientSecret
if ClientSecret.has(client_key):
return ClientSecret.get(client_key)
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import ClientSecret
return ClientSecret.get(client_key, 'dummy')
Note that the returned key must be in plaintext.
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_request_token_secret(self, client_key, token, request):
"""Retrieves the shared secret associated with the request token.
:param client_key: The client/consumer key.
:param token: The request token string.
:param request: An oauthlib.common.Request object.
:returns: The token secret as a string.
This method must allow the use of a dummy values and the running time
must be roughly equivalent to that of the running time of valid values::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import RequestTokenSecret
if RequestTokenSecret.has(client_key):
return RequestTokenSecret.get((client_key, request_token))
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import RequestTokenSecret
return ClientSecret.get((client_key, request_token), 'dummy')
Note that the returned key must be in plaintext.
This method is used by
* AccessTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_access_token_secret(self, client_key, token, request):
"""Retrieves the shared secret associated with the access token.
:param client_key: The client/consumer key.
:param token: The access token string.
:param request: An oauthlib.common.Request object.
:returns: The token secret as a string.
This method must allow the use of a dummy values and the running time
must be roughly equivalent to that of the running time of valid values::
# Unlikely to be near constant time as it uses two database
# lookups for a valid client, and only one for an invalid.
from your_datastore import AccessTokenSecret
if AccessTokenSecret.has(client_key):
return AccessTokenSecret.get((client_key, request_token))
else:
return 'dummy'
# Aim to mimic number of latency inducing operations no matter
# whether the client is valid or not.
from your_datastore import AccessTokenSecret
return ClientSecret.get((client_key, request_token), 'dummy')
Note that the returned key must be in plaintext.
This method is used by
* ResourceEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_default_realms(self, client_key, request):
"""Get the default realms for a client.
:param client_key: The client/consumer key.
:param request: An oauthlib.common.Request object.
:returns: The list of default realms associated with the client.
The list of default realms will be set during client registration and
is outside the scope of OAuthLib.
This method is used by
* RequestTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_realms(self, token, request):
"""Get realms associated with a request token.
:param token: The request token string.
:param request: An oauthlib.common.Request object.
:returns: The list of realms associated with the request token.
This method is used by
* AuthorizationEndpoint
* AccessTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_redirect_uri(self, token, request):
"""Get the redirect URI associated with a request token.
:param token: The request token string.
:param request: An oauthlib.common.Request object.
:returns: The redirect URI associated with the request token.
It may be desirable to return a custom URI if the redirect is set to "oob".
In this case, the user will be redirected to the returned URI and at that
endpoint the verifier can be displayed.
This method is used by
* AuthorizationEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def get_rsa_key(self, client_key, request):
"""Retrieves a previously stored client provided RSA key.
:param client_key: The client/consumer key.
:param request: An oauthlib.common.Request object.
:returns: The rsa public key as a string.
This method must allow the use of a dummy client_key value. Fetching
the rsa key using the dummy key must take the same amount of time
as fetching a key for a valid client. The dummy key must also be of
the same bit length as client keys.
Note that the key must be returned in plaintext.
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def invalidate_request_token(self, client_key, request_token, request):
"""Invalidates a used request token.
:param client_key: The client/consumer key.
:param request_token: The request token string.
:param request: An oauthlib.common.Request object.
:returns: The rsa public key as a string.
Per `Section 2.3`__ of the spec:
"The server MUST (...) ensure that the temporary
credentials have not expired or been used before."
.. _`Section 2.3`: http://tools.ietf.org/html/rfc5849#section-2.3
This method should ensure that provided token won't validate anymore.
It can be simply removing RequestToken from storage or setting
specific flag that makes it invalid (note that such flag should be
also validated during request token validation).
This method is used by
* AccessTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_client_key(self, client_key, request):
"""Validates that supplied client key is a registered and valid client.
:param client_key: The client/consumer key.
:param request: An oauthlib.common.Request object.
:returns: True or False
Note that if the dummy client is supplied it should validate in same
or nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import Client
try:
return Client.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import Client
if access_token == self.dummy_access_token:
return False
else:
return Client.exists(client_key, access_token)
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_request_token(self, client_key, token, request):
"""Validates that supplied request token is registered and valid.
:param client_key: The client/consumer key.
:param token: The request token string.
:param request: An oauthlib.common.Request object.
:returns: True or False
Note that if the dummy request_token is supplied it should validate in
the same nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import RequestToken
try:
return RequestToken.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import RequestToken
if access_token == self.dummy_access_token:
return False
else:
return RequestToken.exists(client_key, access_token)
This method is used by
* AccessTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_access_token(self, client_key, token, request):
"""Validates that supplied access token is registered and valid.
:param client_key: The client/consumer key.
:param token: The access token string.
:param request: An oauthlib.common.Request object.
:returns: True or False
Note that if the dummy access token is supplied it should validate in
the same or nearly the same amount of time as a valid one.
Ensure latency inducing tasks are mimiced even for dummy clients.
For example, use::
from your_datastore import AccessToken
try:
return AccessToken.exists(client_key, access_token)
except DoesNotExist:
return False
Rather than::
from your_datastore import AccessToken
if access_token == self.dummy_access_token:
return False
else:
return AccessToken.exists(client_key, access_token)
This method is used by
* ResourceEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request, request_token=None, access_token=None):
"""Validates that the nonce has not been used before.
:param client_key: The client/consumer key.
:param timestamp: The ``oauth_timestamp`` parameter.
:param nonce: The ``oauth_nonce`` parameter.
:param request_token: Request token string, if any.
:param access_token: Access token string, if any.
:param request: An oauthlib.common.Request object.
:returns: True or False
Per `Section 3.3`_ of the spec.
"A nonce is a random string, uniquely generated by the client to allow
the server to verify that a request has never been made before and
helps prevent replay attacks when requests are made over a non-secure
channel. The nonce value MUST be unique across all requests with the
same timestamp, client credentials, and token combinations."
.. _`Section 3.3`: http://tools.ietf.org/html/rfc5849#section-3.3
One of the first validation checks that will be made is for the validity
of the nonce and timestamp, which are associated with a client key and
possibly a token. If invalid then immediately fail the request
by returning False. If the nonce/timestamp pair has been used before and
you may just have detected a replay attack. Therefore it is an essential
part of OAuth security that you not allow nonce/timestamp reuse.
Note that this validation check is done before checking the validity of
the client and token.::
nonces_and_timestamps_database = [
(u'foo', 1234567890, u'rannoMstrInghere', u'bar')
]
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request_token=None, access_token=None):
return ((client_key, timestamp, nonce, request_token or access_token)
not in self.nonces_and_timestamps_database)
This method is used by
* AccessTokenEndpoint
* RequestTokenEndpoint
* ResourceEndpoint
* SignatureOnlyEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_redirect_uri(self, client_key, redirect_uri, request):
"""Validates the client supplied redirection URI.
:param client_key: The client/consumer key.
:param redirect_uri: The URI the client which to redirect back to after
authorization is successful.
:param request: An oauthlib.common.Request object.
:returns: True or False
It is highly recommended that OAuth providers require their clients
to register all redirection URIs prior to using them in requests and
register them as absolute URIs. See `CWE-601`_ for more information
about open redirection attacks.
By requiring registration of all redirection URIs it should be
straightforward for the provider to verify whether the supplied
redirect_uri is valid or not.
Alternatively per `Section 2.1`_ of the spec:
"If the client is unable to receive callbacks or a callback URI has
been established via other means, the parameter value MUST be set to
"oob" (case sensitive), to indicate an out-of-band configuration."
.. _`CWE-601`: http://cwe.mitre.org/top25/index.html#CWE-601
.. _`Section 2.1`: https://tools.ietf.org/html/rfc5849#section-2.1
This method is used by
* RequestTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_requested_realms(self, client_key, realms, request):
"""Validates that the client may request access to the realm.
:param client_key: The client/consumer key.
:param realms: The list of realms that client is requesting access to.
:param request: An oauthlib.common.Request object.
:returns: True or False
This method is invoked when obtaining a request token and should
tie a realm to the request token and after user authorization
this realm restriction should transfer to the access token.
This method is used by
* RequestTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_realms(self, client_key, token, request, uri=None,
realms=None):
"""Validates access to the request realm.
:param client_key: The client/consumer key.
:param token: A request token string.
:param request: An oauthlib.common.Request object.
:param uri: The URI the realms is protecting.
:param realms: A list of realms that must have been granted to
the access token.
:returns: True or False
How providers choose to use the realm parameter is outside the OAuth
specification but it is commonly used to restrict access to a subset
of protected resources such as "photos".
realms is a convenience parameter which can be used to provide
a per view method pre-defined list of allowed realms.
Can be as simple as::
from your_datastore import RequestToken
request_token = RequestToken.get(token, None)
if not request_token:
return False
return set(request_token.realms).issuperset(set(realms))
This method is used by
* ResourceEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def validate_verifier(self, client_key, token, verifier, request):
"""Validates a verification code.
:param client_key: The client/consumer key.
:param token: A request token string.
:param verifier: The authorization verifier string.
:param request: An oauthlib.common.Request object.
:returns: True or False
OAuth providers issue a verification code to clients after the
resource owner authorizes access. This code is used by the client to
obtain token credentials and the provider must verify that the
verifier is valid and associated with the client as well as the
resource owner.
Verifier validation should be done in near constant time
(to avoid verifier enumeration). To achieve this we need a
constant time string comparison which is provided by OAuthLib
in ``oauthlib.common.safe_string_equals``::
from your_datastore import Verifier
correct_verifier = Verifier.get(client_key, request_token)
from oauthlib.common import safe_string_equals
return safe_string_equals(verifier, correct_verifier)
This method is used by
* AccessTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def verify_request_token(self, token, request):
"""Verify that the given OAuth1 request token is valid.
:param token: A request token string.
:param request: An oauthlib.common.Request object.
:returns: True or False
This method is used only in AuthorizationEndpoint to check whether the
oauth_token given in the authorization URL is valid or not.
This request is not signed and thus similar ``validate_request_token``
method can not be used.
This method is used by
* AuthorizationEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def verify_realms(self, token, realms, request):
"""Verify authorized realms to see if they match those given to token.
:param token: An access token string.
:param realms: A list of realms the client attempts to access.
:param request: An oauthlib.common.Request object.
:returns: True or False
This prevents the list of authorized realms sent by the client during
the authorization step to be altered to include realms outside what
was bound with the request token.
Can be as simple as::
valid_realms = self.get_realms(token)
return all((r in valid_realms for r in realms))
This method is used by
* AuthorizationEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def save_access_token(self, token, request):
"""Save an OAuth1 access token.
:param token: A dict with token credentials.
:param request: An oauthlib.common.Request object.
The token dictionary will at minimum include
* ``oauth_token`` the access token string.
* ``oauth_token_secret`` the token specific secret used in signing.
* ``oauth_authorized_realms`` a space separated list of realms.
Client key can be obtained from ``request.client_key``.
The list of realms (not joined string) can be obtained from
``request.realm``.
This method is used by
* AccessTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def save_request_token(self, token, request):
"""Save an OAuth1 request token.
:param token: A dict with token credentials.
:param request: An oauthlib.common.Request object.
The token dictionary will at minimum include
* ``oauth_token`` the request token string.
* ``oauth_token_secret`` the token specific secret used in signing.
* ``oauth_callback_confirmed`` the string ``true``.
Client key can be obtained from ``request.client_key``.
This method is used by
* RequestTokenEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
def save_verifier(self, token, verifier, request):
"""Associate an authorization verifier with a request token.
:param token: A request token string.
:param verifier A dictionary containing the oauth_verifier and
oauth_token
:param request: An oauthlib.common.Request object.
We need to associate verifiers with tokens for validation during the
access token request.
Note that unlike save_x_token token here is the ``oauth_token`` token
string from the request token saved previously.
This method is used by
* AuthorizationEndpoint
"""
raise NotImplementedError("Subclasses must implement this function.")
| bsd-3-clause |
pylayers/pylayers | pylayers/antprop/examples/ex_antenna2.py | 3 | 1396 | from pylayers.antprop.antenna import *
from pylayers.antprop.antvsh import *
import matplotlib.pylab as plt
from numpy import *
import pdb
"""
This test :
1 : loads a measured antenna
2 : applies an electrical delay obtained from data with getdelay method
3 : evaluate the antenna vsh coefficient with a downsampling factor of 2
4 : display the 16 first
"""
filename = 'S1R1.mat'
A = Antenna(filename,directory='ant/UWBAN/Matfile')
#plot(freq,angle(A.Ftheta[:,maxPowerInd[1],maxPowerInd[2]]*exp(2j*pi*freq.reshape(len(freq))*electricalDelay)))
freq = A.fa.reshape(104,1,1)
delayCandidates = arange(-10,10,0.001)
electricalDelay = A.getdelay(freq,delayCandidates)
disp('Electrical Delay = ' + str(electricalDelay)+' ns')
A.Ftheta = A.Ftheta*exp(2*1j*pi*freq*electricalDelay)
A.Fphi = A.Fphi*exp(2*1j*pi*freq*electricalDelay)
dsf = 2
#
# Calculate Vector Spherical Harmonics
#
A = vsh(A,dsf)
v = np.abs(A.C.Br.s1)
u = np.nonzero(v==v.max())
plt.figure(figsize=(15,15))
for l in range(16):
plt.subplot(4,4,l+1)
plt.plot(np.real(A.C.Br.s1[:,l,0]),np.imag(A.C.Br.s1[:,l,0]),'k')
plt.plot(np.real(A.C.Br.s1[:,l,1]),np.imag(A.C.Br.s1[:,l,1]),'b')
plt.plot(np.real(A.C.Br.s1[:,l,2]),np.imag(A.C.Br.s1[:,l,2]),'r')
plt.plot(np.real(A.C.Br.s1[:,l,2]),np.imag(A.C.Br.s1[:,l,2]),'g')
plt.axis([-0.6,0.6,-0.6,0.6])
plt.title('l='+str(l))
plt.show()
| mit |
mrnamingo/enigma2-test | lib/python/Screens/ButtonSetup.py | 1 | 29638 | from GlobalActions import globalActionMap
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.Button import Button
from Components.ChoiceList import ChoiceList, ChoiceEntryComponent
from Components.SystemInfo import SystemInfo
from Components.config import config, ConfigSubsection, ConfigText, ConfigYesNo
from Components.PluginComponent import plugins
from Screens.ChoiceBox import ChoiceBox
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Tools.BoundFunction import boundFunction
from ServiceReference import ServiceReference
from enigma import eServiceReference, eActionMap
from Components.Label import Label
import os
def getButtonSetupKeys():
return [(_("Red"), "red", ""),
(_("Red long"), "red_long", ""),
(_("Green"), "green", ""),
(_("Green long"), "green_long", ""),
(_("Yellow"), "yellow", ""),
(_("Yellow long"), "yellow_long", ""),
(_("Blue"), "blue", ""),
(_("Blue long"), "blue_long", ""),
(_("Info (EPG)"), "info", "Infobar/InfoPressed/1"),
(_("Info (EPG) Long"), "info_long", "Infobar/showEventInfoPlugins/1"),
(_("Epg/Guide"), "epg", "Infobar/EPGPressed/1"),
(_("Epg/Guide long"), "epg_long", "Infobar/showEventGuidePlugins/1"),
(_("Left"), "cross_left", ""),
(_("Right"), "cross_right", ""),
(_("Up"), "cross_up", ""),
(_("Down"), "cross_down", ""),
(_("PageUp"), "pageup", ""),
(_("PageUp long"), "pageup_long", ""),
(_("PageDown"), "pagedown", ""),
(_("PageDown long"), "pagedown_long", ""),
(_("Channel up"), "channelup", ""),
(_("Channel down"), "channeldown", ""),
(_("TV"), "showTv", ""),
(_("Radio"), "radio", ""),
(_("Radio long"), "radio_long", ""),
(_("Rec"), "rec", ""),
(_("Rec long"), "rec_long", ""),
(_("Teletext"), "text", ""),
(_("Help"), "displayHelp", ""),
(_("Help long"), "displayHelp_long", ""),
(_("Subtitle"), "subtitle", ""),
(_("Subtitle long"), "subtitle_long", ""),
(_("Menu"), "mainMenu", ""),
(_("List/Fav"), "list", ""),
(_("List/Fav long"), "list_long", ""),
(_("PVR"), "pvr", ""),
(_("PVR long"), "pvr_long", ""),
(_("Favorites"), "favorites", ""),
(_("Favorites long"), "favorites_long", ""),
(_("File"), "file", ""),
(_("File long"), "file_long", ""),
(_("OK long"), "ok_long", ""),
(_("Media"), "media", ""),
(_("Media long"), "media_long", ""),
(_("Open"), "open", ""),
(_("Open long"), "open_long", ""),
(_("Www"), "www", ""),
(_("Www long"), "www_long", ""),
(_("Directory"), "directory", ""),
(_("Directory long"), "directory_long", ""),
(_("Back/Recall"), "back", ""),
(_("Back/Recall") + " " + _("long"), "back_long", ""),
(_("Home"), "home", ""),
(_("End"), "end", ""),
(_("Next"), "next", ""),
(_("Previous"), "previous", ""),
(_("Audio"), "audio", ""),
(_("Play"), "play", ""),
(_("Playpause"), "playpause", ""),
(_("Stop"), "stop", ""),
(_("Pause"), "pause", ""),
(_("Rewind"), "rewind", ""),
(_("Fastforward"), "fastforward", ""),
(_("Skip back"), "skip_back", ""),
(_("Skip forward"), "skip_forward", ""),
(_("activatePiP"), "activatePiP", ""),
(_("Timer"), "timer", ""),
(_("Playlist"), "playlist", ""),
(_("Playlist long"), "playlist_long", ""),
(_("Timeshift"), "timeshift", ""),
(_("Homepage"), "homep", ""),
(_("Homepage long"), "homep_long", ""),
(_("Search/WEB"), "search", ""),
(_("Search/WEB long"), "search_long", ""),
(_("Slow"), "slow", ""),
(_("Mark/Portal/Playlist"), "mark", ""),
(_("Sleep"), "sleep", ""),
(_("Sleep long"), "sleep_long", ""),
(_("Power"), "power", ""),
(_("Power long"), "power_long", ""),
(_("HDMIin"), "HDMIin", "Infobar/HDMIIn"),
(_("HDMIin") + " " + _("long"), "HDMIin_long", (SystemInfo["LcdLiveTV"] and "Infobar/ToggleLCDLiveTV") or ""),
(_("Context"), "contextMenu", "Infobar/showExtensionSelection"),
(_("Context long"), "context_long", ""),
(_("SAT"), "sat", "Infobar/openSatellites"),
(_("SAT long"), "sat_long", ""),
(_("Prov"), "prov", ""),
(_("Prov long"), "prov_long", ""),
(_("F1/LAN"), "f1", ""),
(_("F1/LAN long"), "f1_long", ""),
(_("F2"), "f2", ""),
(_("F2 long"), "f2_long", ""),
(_("F3"), "f3", ""),
(_("F3 long"), "f3_long", ""),
(_("F4"), "f4", ""),
(_("F4 long"), "f4_long", ""),]
config.misc.ButtonSetup = ConfigSubsection()
config.misc.ButtonSetup.additional_keys = ConfigYesNo(default=True)
for x in getButtonSetupKeys():
exec "config.misc.ButtonSetup." + x[1] + " = ConfigText(default='" + x[2] + "')"
def getButtonSetupFunctions():
ButtonSetupFunctions = []
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "EPG"))
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "Plugins"))
twinPlugins.append(plugin.name)
ButtonSetupFunctions.append((_("Show graphical multi EPG"), "Infobar/openGraphEPG", "EPG"))
ButtonSetupFunctions.append((_("Main menu"), "Infobar/mainMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Show help"), "Infobar/showHelp", "InfoBar"))
ButtonSetupFunctions.append((_("Show extension selection"), "Infobar/showExtensionSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Zap down"), "Infobar/zapDown", "InfoBar"))
ButtonSetupFunctions.append((_("Zap up"), "Infobar/zapUp", "InfoBar"))
ButtonSetupFunctions.append((_("Volume down"), "Infobar/volumeDown", "InfoBar"))
ButtonSetupFunctions.append((_("Volume up"), "Infobar/volumeUp", "InfoBar"))
ButtonSetupFunctions.append((_("Show Infobar"), "Infobar/toggleShow", "InfoBar"))
ButtonSetupFunctions.append((_("Show service list"), "Infobar/openServiceList", "InfoBar"))
ButtonSetupFunctions.append((_("Show favourites list"), "Infobar/openBouquets", "InfoBar"))
ButtonSetupFunctions.append((_("Show satellites list"), "Infobar/openSatellites", "InfoBar"))
ButtonSetupFunctions.append((_("History back"), "Infobar/historyBack", "InfoBar"))
ButtonSetupFunctions.append((_("History next"), "Infobar/historyNext", "InfoBar"))
ButtonSetupFunctions.append((_("Show eventinfo plugins"), "Infobar/showEventInfoPlugins", "EPG"))
ButtonSetupFunctions.append((_("Show event details"), "Infobar/openEventView", "EPG"))
ButtonSetupFunctions.append((_("Show single service EPG"), "Infobar/openSingleServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show multi channel EPG"), "Infobar/openMultiServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show Audioselection"), "Infobar/audioSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Enable digital downmix"), "Infobar/audioDownmixOn", "InfoBar"))
ButtonSetupFunctions.append((_("Disable digital downmix"), "Infobar/audioDownmixOff", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to radio mode"), "Infobar/showRadio", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to TV mode"), "Infobar/showTv", "InfoBar"))
ButtonSetupFunctions.append((_("Show servicelist or movies"), "Infobar/showServiceListOrMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Show movies"), "Infobar/showMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Instant record"), "Infobar/instantRecord", "InfoBar"))
ButtonSetupFunctions.append((_("Start instant recording"), "Infobar/startInstantRecording", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift End"), "Infobar/activateTimeshiftEnd", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift end and pause"), "Infobar/activateTimeshiftEndAndPause", "InfoBar"))
ButtonSetupFunctions.append((_("Start timeshift"), "Infobar/startTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Stop timeshift"), "Infobar/stopTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Start teletext"), "Infobar/startTeletext", "InfoBar"))
ButtonSetupFunctions.append((_("Show subservice selection"), "Infobar/subserviceSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle selection"), "Infobar/subtitleSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle quick menu"), "Infobar/subtitleQuickMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Letterbox zoom"), "Infobar/vmodeSelection", "InfoBar"))
if SystemInfo["PIPAvailable"]:
ButtonSetupFunctions.append((_("Show PIP"), "Infobar/showPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Swap PIP"), "Infobar/swapPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Move PIP"), "Infobar/movePiP", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle PIPzap"), "Infobar/togglePipzap", "InfoBar"))
ButtonSetupFunctions.append((_("Activate HbbTV (Redbutton)"), "Infobar/activateRedButton", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In full screen"), "Infobar/HDMIInFull", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In PiP"), "Infobar/HDMIInPiP", "InfoBar"))
if SystemInfo["LcdLiveTV"]:
ButtonSetupFunctions.append((_("Toggle LCD LiveTV"), "Infobar/ToggleLCDLiveTV", "InfoBar"))
ButtonSetupFunctions.append((_("Hotkey Setup"), "Module/Screens.ButtonSetup/ButtonSetup", "Setup"))
ButtonSetupFunctions.append((_("Software update"), "Module/Screens.SoftwareUpdate/UpdatePlugin", "Setup"))
ButtonSetupFunctions.append((_("CI (Common Interface) Setup"), "Module/Screens.Ci/CiSelection", "Setup"))
ButtonSetupFunctions.append((_("Tuner Configuration"), "Module/Screens.Satconfig/NimSelection", "Scanning"))
ButtonSetupFunctions.append((_("Manual Scan"), "Module/Screens.ScanSetup/ScanSetup", "Scanning"))
ButtonSetupFunctions.append((_("Automatic Scan"), "Module/Screens.ScanSetup/ScanSimple", "Scanning"))
for plugin in plugins.getPluginsForMenu("scan"):
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/scan/" + plugin[2], "Scanning"))
ButtonSetupFunctions.append((_("Network setup"), "Module/Screens.NetworkSetup/NetworkAdapterSelection", "Setup"))
ButtonSetupFunctions.append((_("Network menu"), "Infobar/showNetworkMounts", "Setup"))
ButtonSetupFunctions.append((_("Plugin Browser"), "Module/Screens.PluginBrowser/PluginBrowser", "Setup"))
ButtonSetupFunctions.append((_("Channel Info"), "Module/Screens.ServiceInfo/ServiceInfo", "Setup"))
ButtonSetupFunctions.append((_("SkinSelector"), "Module/Screens.SkinSelector/SkinSelector", "Setup"))
ButtonSetupFunctions.append((_("LCD SkinSelector"), "Module/Screens.SkinSelector/LcdSkinSelector", "Setup"))
ButtonSetupFunctions.append((_("Timer"), "Module/Screens.TimerEdit/TimerEditList", "Setup"))
ButtonSetupFunctions.append((_("Open AutoTimer"), "Infobar/showAutoTimerList", "Setup"))
for plugin in plugins.getPluginsForMenu("system"):
if plugin[2]:
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/system/" + plugin[2], "Setup"))
ButtonSetupFunctions.append((_("Standby"), "Module/Screens.Standby/Standby", "Power"))
ButtonSetupFunctions.append((_("Restart"), "Module/Screens.Standby/TryQuitMainloop/2", "Power"))
ButtonSetupFunctions.append((_("Restart enigma"), "Module/Screens.Standby/TryQuitMainloop/3", "Power"))
ButtonSetupFunctions.append((_("Deep standby"), "Module/Screens.Standby/TryQuitMainloop/1", "Power"))
ButtonSetupFunctions.append((_("SleepTimer"), "Module/Screens.SleepTimerEdit/SleepTimerEdit", "Power"))
ButtonSetupFunctions.append((_("PowerTimer"), "Module/Screens.PowerTimerEdit/PowerTimerEditList", "Power"))
ButtonSetupFunctions.append((_("Usage Setup"), "Setup/usage", "Setup"))
ButtonSetupFunctions.append((_("User interface settings"), "Setup/userinterface", "Setup"))
ButtonSetupFunctions.append((_("Recording Setup"), "Setup/recording", "Setup"))
ButtonSetupFunctions.append((_("Harddisk Setup"), "Setup/harddisk", "Setup"))
ButtonSetupFunctions.append((_("Subtitles Settings"), "Setup/subtitlesetup", "Setup"))
ButtonSetupFunctions.append((_("Language"), "Module/Screens.LanguageSelection/LanguageSelection", "Setup"))
ButtonSetupFunctions.append((_("OscamInfo Mainmenu"), "Module/Screens.OScamInfo/OscamInfoMenu", "Plugins"))
ButtonSetupFunctions.append((_("CCcamInfo Mainmenu"), "Module/Screens.CCcamInfo/CCcamInfoMain", "Plugins"))
if os.path.isdir("/etc/ppanels"):
for x in [x for x in os.listdir("/etc/ppanels") if x.endswith(".xml")]:
x = x[:-4]
ButtonSetupFunctions.append((_("PPanel") + " " + x, "PPanel/" + x, "PPanels"))
if os.path.isdir("/usr/script"):
for x in [x for x in os.listdir("/usr/script") if x.endswith(".sh")]:
x = x[:-3]
ButtonSetupFunctions.append((_("Shellscript") + " " + x, "Shellscript/" + x, "Shellscripts"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
ButtonSetupFunctions.append((_("ScriptRunner"), "ScriptRunner/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
ButtonSetupFunctions.append((_("QuickMenu"), "QuickMenu/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
ButtonSetupFunctions.append((_("Kodi MediaCenter"), "Kodi/", "Plugins"))
return ButtonSetupFunctions
class ButtonSetup(Screen):
def __init__(self, session, args=None):
Screen.__init__(self, session)
self['description'] = Label(_('Click on your remote on the button you want to change'))
self.session = session
self.setTitle(_("Hotkey Setup"))
self["key_red"] = Button(_("Exit"))
self.list = []
self.ButtonSetupKeys = getButtonSetupKeys()
self.ButtonSetupFunctions = getButtonSetupFunctions()
for x in self.ButtonSetupKeys:
self.list.append(ChoiceEntryComponent('',(_(x[0]), x[1])))
self["list"] = ChoiceList(list=self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10], selection = 0)
self["choosen"] = ChoiceList(list=[])
self.getFunctions()
self["actions"] = ActionMap(["OkCancelActions"],
{
"cancel": self.close,
}, -1)
self["ButtonSetupButtonActions"] = ButtonSetupActionMap(["ButtonSetupActions"], dict((x[1], self.ButtonSetupGlobal) for x in self.ButtonSetupKeys))
self.longkeyPressed = False
self.onLayoutFinish.append(self.__layoutFinished)
self.onExecBegin.append(self.getFunctions)
self.onShown.append(self.disableKeyMap)
self.onClose.append(self.enableKeyMap)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
index = 0
for x in self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10]:
if key == x[0][1]:
self["list"].moveToIndex(index)
if key.endswith("_long"):
self.longkeyPressed = True
break
index += 1
self.getFunctions()
self.session.open(ButtonSetupSelect, self["list"].l.getCurrentSelection())
def getFunctions(self):
key = self["list"].l.getCurrentSelection()[0][1]
if key:
selected = []
for x in eval("config.misc.ButtonSetup." + key + ".value.split(',')"):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self["choosen"].setList(selected)
class ButtonSetupSelect(Screen):
def __init__(self, session, key, args=None):
Screen.__init__(self, session)
self.skinName="ButtonSetupSelect"
self['description'] = Label(_('Select the desired function and click on "OK" to assign it. Use "CH+/-" to toggle between the lists. Select an assigned function and click on "OK" to de-assign it. Use "Next/Previous" to change the order of the assigned functions.'))
self.session = session
self.key = key
self.setTitle(_("Hotkey Setup for") + ": " + key[0][0])
self["key_red"] = Button(_("Cancel"))
self["key_green"] = Button(_("Save"))
self.mode = "list"
self.ButtonSetupFunctions = getButtonSetupFunctions()
self.config = eval("config.misc.ButtonSetup." + key[0][1])
self.expanded = []
self.selected = []
for x in self.config.value.split(','):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
self.selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self.prevselected = self.selected[:]
self["choosen"] = ChoiceList(list=self.selected, selection=0)
self["list"] = ChoiceList(list=self.getFunctionList(), selection=0)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "KeyboardInputActions"],
{
"ok": self.keyOk,
"cancel": self.cancel,
"red": self.cancel,
"green": self.save,
"up": self.keyUp,
"down": self.keyDown,
"left": self.keyLeft,
"right": self.keyRight,
"pageUp": self.toggleMode,
"pageDown": self.toggleMode,
"shiftUp": self.moveUp,
"shiftDown": self.moveDown,
}, -1)
self.onShown.append(self.enableKeyMap)
self.onClose.append(self.disableKeyMap)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def getFunctionList(self):
functionslist = []
catagories = {}
for function in self.ButtonSetupFunctions:
if not catagories.has_key(function[2]):
catagories[function[2]] = []
catagories[function[2]].append(function)
for catagorie in sorted(list(catagories)):
if catagorie in self.expanded:
functionslist.append(ChoiceEntryComponent('expanded',((catagorie), "Expander")))
for function in catagories[catagorie]:
functionslist.append(ChoiceEntryComponent('verticalline',((function[0]), function[1])))
else:
functionslist.append(ChoiceEntryComponent('expandable',((catagorie), "Expander")))
return functionslist
def toggleMode(self):
if self.mode == "list" and self.selected:
self.mode = "choosen"
self["choosen"].selectionEnabled(1)
self["list"].selectionEnabled(0)
elif self.mode == "choosen":
self.mode = "list"
self["choosen"].selectionEnabled(0)
self["list"].selectionEnabled(1)
def keyOk(self):
if self.mode == "list":
currentSelected = self["list"].l.getCurrentSelection()
if currentSelected[0][1] == "Expander":
if currentSelected[0][0] in self.expanded:
self.expanded.remove(currentSelected[0][0])
else:
self.expanded.append(currentSelected[0][0])
self["list"].setList(self.getFunctionList())
else:
if currentSelected[:2] in self.selected:
self.selected.remove(currentSelected[:2])
else:
self.selected.append(currentSelected[:2])
elif self.selected:
self.selected.remove(self["choosen"].l.getCurrentSelection())
if not self.selected:
self.toggleMode()
self["choosen"].setList(self.selected)
def keyLeft(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageUp)
def keyRight(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageDown)
def keyUp(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveUp)
def keyDown(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveDown)
def moveUp(self):
self.moveChoosen(self.keyUp)
def moveDown(self):
self.moveChoosen(self.keyDown)
def moveChoosen(self, direction):
if self.mode == "choosen":
currentIndex = self["choosen"].getSelectionIndex()
swapIndex = (currentIndex + (direction == self.keyDown and 1 or -1)) % len(self["choosen"].list)
self["choosen"].list[currentIndex], self["choosen"].list[swapIndex] = self["choosen"].list[swapIndex], self["choosen"].list[currentIndex]
self["choosen"].setList(self["choosen"].list)
direction()
else:
return 0
def save(self):
configValue = []
for x in self.selected:
configValue.append(x[0][1])
self.config.value = ",".join(configValue)
self.config.save()
self.close()
def cancel(self):
if self.selected != self.prevselected:
self.session.openWithCallback(self.cancelCallback, MessageBox, _("Are you sure to cancel all changes"), default=False)
else:
self.close()
def cancelCallback(self, answer):
answer and self.close()
class ButtonSetupActionMap(ActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class helpableButtonSetupActionMap(HelpableActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class InfoBarButtonSetup():
def __init__(self):
self.ButtonSetupKeys = getButtonSetupKeys()
self["ButtonSetupButtonActions"] = helpableButtonSetupActionMap(self, "ButtonSetupActions",
dict((x[1],(self.ButtonSetupGlobal, boundFunction(self.getHelpText, x[1]))) for x in self.ButtonSetupKeys), -10)
self.longkeyPressed = False
self.onExecEnd.append(self.clearLongkeyPressed)
def clearLongkeyPressed(self):
self.longkeyPressed = False
def getKeyFunctions(self, key):
if key in ("play", "playpause", "Stop", "stop", "pause", "rewind", "next", "previous", "fastforward", "skip_back", "skip_forward") and (self.__class__.__name__ == "MoviePlayer" or hasattr(self, "timeshiftActivated") and self.timeshiftActivated()):
return False
selection = eval("config.misc.ButtonSetup." + key + ".value.split(',')")
selected = []
for x in selection:
if x.startswith("ZapPanic"):
selected.append(((_("Panic to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
elif x.startswith("Zap"):
selected.append(((_("Zap to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
else:
function = list(function for function in getButtonSetupFunctions() if function[1] == x )
if function:
selected.append(function[0])
return selected
def getHelpText(self, key):
selected = self.getKeyFunctions(key)
if not selected:
return
if len(selected) == 1:
return selected[0][0]
else:
return _("ButtonSetup") + " " + tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
selected = self.getKeyFunctions(key)
if not selected:
return 0
elif len(selected) == 1:
if key.endswith("_long"):
self.longkeyPressed = True
return self.execButtonSetup(selected[0])
else:
key = tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
self.session.openWithCallback(self.execButtonSetup, ChoiceBox, (_("Hotkey")) + " " + key, selected)
def execButtonSetup(self, selected):
if selected:
selected = selected[1].split("/")
if selected[0] == "Plugins":
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
elif selected[0] == "MenuPlugin":
for plugin in plugins.getPluginsForMenu(selected[1]):
if plugin[2] == selected[2]:
self.runPlugin(plugin[1])
return
elif selected[0] == "Infobar":
if hasattr(self, selected[1]):
exec "self." + ".".join(selected[1:]) + "()"
else:
return 0
elif selected[0] == "Module":
try:
exec "from " + selected[1] + " import *"
exec "self.session.open(" + ",".join(selected[2:]) + ")"
except:
print "[ButtonSetup] error during executing module %s, screen %s" % (selected[1], selected[2])
elif selected[0] == "Setup":
exec "from Screens.Setup import *"
exec "self.session.open(Setup, \"" + selected[1] + "\")"
elif selected[0].startswith("Zap"):
if selected[0] == "ZapPanic":
self.servicelist.history = []
self.pipShown() and self.showPiP()
self.servicelist.servicelist.setCurrent(eServiceReference("/".join(selected[1:])))
self.servicelist.zap(enable_pipzap = True)
if hasattr(self, "lastservice"):
self.lastservice = eServiceReference("/".join(selected[1:]))
self.close()
else:
self.show()
from Screens.MovieSelection import defaultMoviePath
moviepath = defaultMoviePath()
if moviepath:
config.movielist.last_videodir.value = moviepath
elif selected[0] == "PPanel":
ppanelFileName = '/etc/ppanels/' + selected[1] + ".xml"
if os.path.isfile(ppanelFileName) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import PPanel
self.session.open(PPanel, name=selected[1] + ' PPanel', node=None, filename=ppanelFileName, deletenode=None)
elif selected[0] == "Shellscript":
command = '/usr/script/' + selected[1] + ".sh"
if os.path.isfile(command) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import Execute
self.session.open(Execute, selected[1] + " shellscript", None, command)
else:
from Screens.Console import Console
exec "self.session.open(Console,_(selected[1]),[command])"
elif selected[0] == "EMC":
try:
from Plugins.Extensions.EnhancedMovieCenter.plugin import showMoviesNew
from Screens.InfoBar import InfoBar
open(showMoviesNew(InfoBar.instance))
except Exception as e:
print('[EMCPlayer] showMovies exception:\n' + str(e))
elif selected[0] == "ScriptRunner":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
from Plugins.Extensions.Infopanel.ScriptRunner import ScriptRunner
self.session.open (ScriptRunner)
elif selected[0] == "QuickMenu":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
from Plugins.Extensions.Infopanel.QuickMenu import QuickMenu
self.session.open (QuickMenu)
elif selected[0] == "Kodi":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
from Plugins.Extensions.Kodi.plugin import KodiMainScreen
self.session.open(KodiMainScreen)
def showServiceListOrMovies(self):
if hasattr(self, "openServiceList"):
self.openServiceList()
elif hasattr(self, "showMovies"):
self.showMovies()
def ToggleLCDLiveTV(self):
config.lcd.showTv.value = not config.lcd.showTv.value
| gpl-2.0 |
yonglehou/scikit-learn | benchmarks/bench_multilabel_metrics.py | 276 | 7138 | #!/usr/bin/env python
"""
A comparison of multilabel target formats and metrics over them
"""
from __future__ import division
from __future__ import print_function
from timeit import timeit
from functools import partial
import itertools
import argparse
import sys
import matplotlib.pyplot as plt
import scipy.sparse as sp
import numpy as np
from sklearn.datasets import make_multilabel_classification
from sklearn.metrics import (f1_score, accuracy_score, hamming_loss,
jaccard_similarity_score)
from sklearn.utils.testing import ignore_warnings
METRICS = {
'f1': partial(f1_score, average='micro'),
'f1-by-sample': partial(f1_score, average='samples'),
'accuracy': accuracy_score,
'hamming': hamming_loss,
'jaccard': jaccard_similarity_score,
}
FORMATS = {
'sequences': lambda y: [list(np.flatnonzero(s)) for s in y],
'dense': lambda y: y,
'csr': lambda y: sp.csr_matrix(y),
'csc': lambda y: sp.csc_matrix(y),
}
@ignore_warnings
def benchmark(metrics=tuple(v for k, v in sorted(METRICS.items())),
formats=tuple(v for k, v in sorted(FORMATS.items())),
samples=1000, classes=4, density=.2,
n_times=5):
"""Times metric calculations for a number of inputs
Parameters
----------
metrics : array-like of callables (1d or 0d)
The metric functions to time.
formats : array-like of callables (1d or 0d)
These may transform a dense indicator matrix into multilabel
representation.
samples : array-like of ints (1d or 0d)
The number of samples to generate as input.
classes : array-like of ints (1d or 0d)
The number of classes in the input.
density : array-like of ints (1d or 0d)
The density of positive labels in the input.
n_times : int
Time calling the metric n_times times.
Returns
-------
array of floats shaped like (metrics, formats, samples, classes, density)
Time in seconds.
"""
metrics = np.atleast_1d(metrics)
samples = np.atleast_1d(samples)
classes = np.atleast_1d(classes)
density = np.atleast_1d(density)
formats = np.atleast_1d(formats)
out = np.zeros((len(metrics), len(formats), len(samples), len(classes),
len(density)), dtype=float)
it = itertools.product(samples, classes, density)
for i, (s, c, d) in enumerate(it):
_, y_true = make_multilabel_classification(n_samples=s, n_features=1,
n_classes=c, n_labels=d * c,
random_state=42)
_, y_pred = make_multilabel_classification(n_samples=s, n_features=1,
n_classes=c, n_labels=d * c,
random_state=84)
for j, f in enumerate(formats):
f_true = f(y_true)
f_pred = f(y_pred)
for k, metric in enumerate(metrics):
t = timeit(partial(metric, f_true, f_pred), number=n_times)
out[k, j].flat[i] = t
return out
def _tabulate(results, metrics, formats):
"""Prints results by metric and format
Uses the last ([-1]) value of other fields
"""
column_width = max(max(len(k) for k in formats) + 1, 8)
first_width = max(len(k) for k in metrics)
head_fmt = ('{:<{fw}s}' + '{:>{cw}s}' * len(formats))
row_fmt = ('{:<{fw}s}' + '{:>{cw}.3f}' * len(formats))
print(head_fmt.format('Metric', *formats,
cw=column_width, fw=first_width))
for metric, row in zip(metrics, results[:, :, -1, -1, -1]):
print(row_fmt.format(metric, *row,
cw=column_width, fw=first_width))
def _plot(results, metrics, formats, title, x_ticks, x_label,
format_markers=('x', '|', 'o', '+'),
metric_colors=('c', 'm', 'y', 'k', 'g', 'r', 'b')):
"""
Plot the results by metric, format and some other variable given by
x_label
"""
fig = plt.figure('scikit-learn multilabel metrics benchmarks')
plt.title(title)
ax = fig.add_subplot(111)
for i, metric in enumerate(metrics):
for j, format in enumerate(formats):
ax.plot(x_ticks, results[i, j].flat,
label='{}, {}'.format(metric, format),
marker=format_markers[j],
color=metric_colors[i % len(metric_colors)])
ax.set_xlabel(x_label)
ax.set_ylabel('Time (s)')
ax.legend()
plt.show()
if __name__ == "__main__":
ap = argparse.ArgumentParser()
ap.add_argument('metrics', nargs='*', default=sorted(METRICS),
help='Specifies metrics to benchmark, defaults to all. '
'Choices are: {}'.format(sorted(METRICS)))
ap.add_argument('--formats', nargs='+', choices=sorted(FORMATS),
help='Specifies multilabel formats to benchmark '
'(defaults to all).')
ap.add_argument('--samples', type=int, default=1000,
help='The number of samples to generate')
ap.add_argument('--classes', type=int, default=10,
help='The number of classes')
ap.add_argument('--density', type=float, default=.2,
help='The average density of labels per sample')
ap.add_argument('--plot', choices=['classes', 'density', 'samples'],
default=None,
help='Plot time with respect to this parameter varying '
'up to the specified value')
ap.add_argument('--n-steps', default=10, type=int,
help='Plot this many points for each metric')
ap.add_argument('--n-times',
default=5, type=int,
help="Time performance over n_times trials")
args = ap.parse_args()
if args.plot is not None:
max_val = getattr(args, args.plot)
if args.plot in ('classes', 'samples'):
min_val = 2
else:
min_val = 0
steps = np.linspace(min_val, max_val, num=args.n_steps + 1)[1:]
if args.plot in ('classes', 'samples'):
steps = np.unique(np.round(steps).astype(int))
setattr(args, args.plot, steps)
if args.metrics is None:
args.metrics = sorted(METRICS)
if args.formats is None:
args.formats = sorted(FORMATS)
results = benchmark([METRICS[k] for k in args.metrics],
[FORMATS[k] for k in args.formats],
args.samples, args.classes, args.density,
args.n_times)
_tabulate(results, args.metrics, args.formats)
if args.plot is not None:
print('Displaying plot', file=sys.stderr)
title = ('Multilabel metrics with %s' %
', '.join('{0}={1}'.format(field, getattr(args, field))
for field in ['samples', 'classes', 'density']
if args.plot != field))
_plot(results, args.metrics, args.formats, title, steps, args.plot)
| bsd-3-clause |
UNR-AERIAL/scikit-learn | examples/semi_supervised/plot_label_propagation_digits.py | 268 | 2723 | """
===================================================
Label Propagation digits: Demonstrating performance
===================================================
This example demonstrates the power of semisupervised learning by
training a Label Spreading model to classify handwritten digits
with sets of very few labels.
The handwritten digit dataset has 1797 total points. The model will
be trained using all points, but only 30 will be labeled. Results
in the form of a confusion matrix and a series of metrics over each
class will be very good.
At the end, the top 10 most uncertain predictions will be shown.
"""
print(__doc__)
# Authors: Clay Woolam <clay@woolam.org>
# Licence: BSD
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn import datasets
from sklearn.semi_supervised import label_propagation
from sklearn.metrics import confusion_matrix, classification_report
digits = datasets.load_digits()
rng = np.random.RandomState(0)
indices = np.arange(len(digits.data))
rng.shuffle(indices)
X = digits.data[indices[:330]]
y = digits.target[indices[:330]]
images = digits.images[indices[:330]]
n_total_samples = len(y)
n_labeled_points = 30
indices = np.arange(n_total_samples)
unlabeled_set = indices[n_labeled_points:]
# shuffle everything around
y_train = np.copy(y)
y_train[unlabeled_set] = -1
###############################################################################
# Learn with LabelSpreading
lp_model = label_propagation.LabelSpreading(gamma=0.25, max_iter=5)
lp_model.fit(X, y_train)
predicted_labels = lp_model.transduction_[unlabeled_set]
true_labels = y[unlabeled_set]
cm = confusion_matrix(true_labels, predicted_labels, labels=lp_model.classes_)
print("Label Spreading model: %d labeled & %d unlabeled points (%d total)" %
(n_labeled_points, n_total_samples - n_labeled_points, n_total_samples))
print(classification_report(true_labels, predicted_labels))
print("Confusion matrix")
print(cm)
# calculate uncertainty values for each transduced distribution
pred_entropies = stats.distributions.entropy(lp_model.label_distributions_.T)
# pick the top 10 most uncertain labels
uncertainty_index = np.argsort(pred_entropies)[-10:]
###############################################################################
# plot
f = plt.figure(figsize=(7, 5))
for index, image_index in enumerate(uncertainty_index):
image = images[image_index]
sub = f.add_subplot(2, 5, index + 1)
sub.imshow(image, cmap=plt.cm.gray_r)
plt.xticks([])
plt.yticks([])
sub.set_title('predict: %i\ntrue: %i' % (
lp_model.transduction_[image_index], y[image_index]))
f.suptitle('Learning with small amount of labeled data')
plt.show()
| bsd-3-clause |
afandria/mojo | third_party/libevent/event_rpcgen.py | 296 | 45502 | #!/usr/bin/env python
#
# Copyright (c) 2005 Niels Provos <provos@citi.umich.edu>
# All rights reserved.
#
# Generates marshaling code based on libevent.
import sys
import re
#
_NAME = "event_rpcgen.py"
_VERSION = "0.1"
_STRUCT_RE = '[a-z][a-z_0-9]*'
# Globals
line_count = 0
white = re.compile(r'^\s+')
cppcomment = re.compile(r'\/\/.*$')
headerdirect = []
cppdirect = []
# Holds everything that makes a struct
class Struct:
def __init__(self, name):
self._name = name
self._entries = []
self._tags = {}
print >>sys.stderr, ' Created struct: %s' % name
def AddEntry(self, entry):
if self._tags.has_key(entry.Tag()):
print >>sys.stderr, ( 'Entry "%s" duplicates tag number '
'%d from "%s" around line %d' ) % (
entry.Name(), entry.Tag(),
self._tags[entry.Tag()], line_count)
sys.exit(1)
self._entries.append(entry)
self._tags[entry.Tag()] = entry.Name()
print >>sys.stderr, ' Added entry: %s' % entry.Name()
def Name(self):
return self._name
def EntryTagName(self, entry):
"""Creates the name inside an enumeration for distinguishing data
types."""
name = "%s_%s" % (self._name, entry.Name())
return name.upper()
def PrintIdented(self, file, ident, code):
"""Takes an array, add indentation to each entry and prints it."""
for entry in code:
print >>file, '%s%s' % (ident, entry)
def PrintTags(self, file):
"""Prints the tag definitions for a structure."""
print >>file, '/* Tag definition for %s */' % self._name
print >>file, 'enum %s_ {' % self._name.lower()
for entry in self._entries:
print >>file, ' %s=%d,' % (self.EntryTagName(entry),
entry.Tag())
print >>file, ' %s_MAX_TAGS' % (self._name.upper())
print >>file, '};\n'
def PrintForwardDeclaration(self, file):
print >>file, 'struct %s;' % self._name
def PrintDeclaration(self, file):
print >>file, '/* Structure declaration for %s */' % self._name
print >>file, 'struct %s_access_ {' % self._name
for entry in self._entries:
dcl = entry.AssignDeclaration('(*%s_assign)' % entry.Name())
dcl.extend(
entry.GetDeclaration('(*%s_get)' % entry.Name()))
if entry.Array():
dcl.extend(
entry.AddDeclaration('(*%s_add)' % entry.Name()))
self.PrintIdented(file, ' ', dcl)
print >>file, '};\n'
print >>file, 'struct %s {' % self._name
print >>file, ' struct %s_access_ *base;\n' % self._name
for entry in self._entries:
dcl = entry.Declaration()
self.PrintIdented(file, ' ', dcl)
print >>file, ''
for entry in self._entries:
print >>file, ' ev_uint8_t %s_set;' % entry.Name()
print >>file, '};\n'
print >>file, \
"""struct %(name)s *%(name)s_new(void);
void %(name)s_free(struct %(name)s *);
void %(name)s_clear(struct %(name)s *);
void %(name)s_marshal(struct evbuffer *, const struct %(name)s *);
int %(name)s_unmarshal(struct %(name)s *, struct evbuffer *);
int %(name)s_complete(struct %(name)s *);
void evtag_marshal_%(name)s(struct evbuffer *, ev_uint32_t,
const struct %(name)s *);
int evtag_unmarshal_%(name)s(struct evbuffer *, ev_uint32_t,
struct %(name)s *);""" % { 'name' : self._name }
# Write a setting function of every variable
for entry in self._entries:
self.PrintIdented(file, '', entry.AssignDeclaration(
entry.AssignFuncName()))
self.PrintIdented(file, '', entry.GetDeclaration(
entry.GetFuncName()))
if entry.Array():
self.PrintIdented(file, '', entry.AddDeclaration(
entry.AddFuncName()))
print >>file, '/* --- %s done --- */\n' % self._name
def PrintCode(self, file):
print >>file, ('/*\n'
' * Implementation of %s\n'
' */\n') % self._name
print >>file, \
'static struct %(name)s_access_ __%(name)s_base = {' % \
{ 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeBase())
print >>file, '};\n'
# Creation
print >>file, (
'struct %(name)s *\n'
'%(name)s_new(void)\n'
'{\n'
' struct %(name)s *tmp;\n'
' if ((tmp = malloc(sizeof(struct %(name)s))) == NULL) {\n'
' event_warn("%%s: malloc", __func__);\n'
' return (NULL);\n'
' }\n'
' tmp->base = &__%(name)s_base;\n') % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeNew('tmp'))
print >>file, ' tmp->%s_set = 0;\n' % entry.Name()
print >>file, (
' return (tmp);\n'
'}\n')
# Adding
for entry in self._entries:
if entry.Array():
self.PrintIdented(file, '', entry.CodeAdd())
print >>file, ''
# Assigning
for entry in self._entries:
self.PrintIdented(file, '', entry.CodeAssign())
print >>file, ''
# Getting
for entry in self._entries:
self.PrintIdented(file, '', entry.CodeGet())
print >>file, ''
# Clearing
print >>file, ( 'void\n'
'%(name)s_clear(struct %(name)s *tmp)\n'
'{'
) % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeClear('tmp'))
print >>file, '}\n'
# Freeing
print >>file, ( 'void\n'
'%(name)s_free(struct %(name)s *tmp)\n'
'{'
) % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(file, ' ', entry.CodeFree('tmp'))
print >>file, (' free(tmp);\n'
'}\n')
# Marshaling
print >>file, ('void\n'
'%(name)s_marshal(struct evbuffer *evbuf, '
'const struct %(name)s *tmp)'
'{') % { 'name' : self._name }
for entry in self._entries:
indent = ' '
# Optional entries do not have to be set
if entry.Optional():
indent += ' '
print >>file, ' if (tmp->%s_set) {' % entry.Name()
self.PrintIdented(
file, indent,
entry.CodeMarshal('evbuf', self.EntryTagName(entry), 'tmp'))
if entry.Optional():
print >>file, ' }'
print >>file, '}\n'
# Unmarshaling
print >>file, ('int\n'
'%(name)s_unmarshal(struct %(name)s *tmp, '
' struct evbuffer *evbuf)\n'
'{\n'
' ev_uint32_t tag;\n'
' while (EVBUFFER_LENGTH(evbuf) > 0) {\n'
' if (evtag_peek(evbuf, &tag) == -1)\n'
' return (-1);\n'
' switch (tag) {\n'
) % { 'name' : self._name }
for entry in self._entries:
print >>file, ' case %s:\n' % self.EntryTagName(entry)
if not entry.Array():
print >>file, (
' if (tmp->%s_set)\n'
' return (-1);'
) % (entry.Name())
self.PrintIdented(
file, ' ',
entry.CodeUnmarshal('evbuf',
self.EntryTagName(entry), 'tmp'))
print >>file, ( ' tmp->%s_set = 1;\n' % entry.Name() +
' break;\n' )
print >>file, ( ' default:\n'
' return -1;\n'
' }\n'
' }\n' )
# Check if it was decoded completely
print >>file, ( ' if (%(name)s_complete(tmp) == -1)\n'
' return (-1);'
) % { 'name' : self._name }
# Successfully decoded
print >>file, ( ' return (0);\n'
'}\n')
# Checking if a structure has all the required data
print >>file, (
'int\n'
'%(name)s_complete(struct %(name)s *msg)\n'
'{' ) % { 'name' : self._name }
for entry in self._entries:
self.PrintIdented(
file, ' ',
entry.CodeComplete('msg'))
print >>file, (
' return (0);\n'
'}\n' )
# Complete message unmarshaling
print >>file, (
'int\n'
'evtag_unmarshal_%(name)s(struct evbuffer *evbuf, '
'ev_uint32_t need_tag, struct %(name)s *msg)\n'
'{\n'
' ev_uint32_t tag;\n'
' int res = -1;\n'
'\n'
' struct evbuffer *tmp = evbuffer_new();\n'
'\n'
' if (evtag_unmarshal(evbuf, &tag, tmp) == -1'
' || tag != need_tag)\n'
' goto error;\n'
'\n'
' if (%(name)s_unmarshal(msg, tmp) == -1)\n'
' goto error;\n'
'\n'
' res = 0;\n'
'\n'
' error:\n'
' evbuffer_free(tmp);\n'
' return (res);\n'
'}\n' ) % { 'name' : self._name }
# Complete message marshaling
print >>file, (
'void\n'
'evtag_marshal_%(name)s(struct evbuffer *evbuf, ev_uint32_t tag, '
'const struct %(name)s *msg)\n'
'{\n'
' struct evbuffer *_buf = evbuffer_new();\n'
' assert(_buf != NULL);\n'
' evbuffer_drain(_buf, -1);\n'
' %(name)s_marshal(_buf, msg);\n'
' evtag_marshal(evbuf, tag, EVBUFFER_DATA(_buf), '
'EVBUFFER_LENGTH(_buf));\n'
' evbuffer_free(_buf);\n'
'}\n' ) % { 'name' : self._name }
class Entry:
def __init__(self, type, name, tag):
self._type = type
self._name = name
self._tag = int(tag)
self._ctype = type
self._optional = 0
self._can_be_array = 0
self._array = 0
self._line_count = -1
self._struct = None
self._refname = None
def GetTranslation(self):
return { "parent_name" : self._struct.Name(),
"name" : self._name,
"ctype" : self._ctype,
"refname" : self._refname
}
def SetStruct(self, struct):
self._struct = struct
def LineCount(self):
assert self._line_count != -1
return self._line_count
def SetLineCount(self, number):
self._line_count = number
def Array(self):
return self._array
def Optional(self):
return self._optional
def Tag(self):
return self._tag
def Name(self):
return self._name
def Type(self):
return self._type
def MakeArray(self, yes=1):
self._array = yes
def MakeOptional(self):
self._optional = 1
def GetFuncName(self):
return '%s_%s_get' % (self._struct.Name(), self._name)
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeGet(self):
code = (
'int',
'%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, '
'%(ctype)s *value)',
'{',
' if (msg->%(name)s_set != 1)',
' return (-1);',
' *value = msg->%(name)s_data;',
' return (0);',
'}' )
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def AssignFuncName(self):
return '%s_%s_assign' % (self._struct.Name(), self._name)
def AddFuncName(self):
return '%s_%s_add' % (self._struct.Name(), self._name)
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
code = [ 'int',
'%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,'
' const %(ctype)s value)',
'{',
' msg->%(name)s_set = 1;',
' msg->%(name)s_data = value;',
' return (0);',
'}' ]
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()) ]
return code
def CodeComplete(self, structname):
if self.Optional():
return []
code = [ 'if (!%s->%s_set)' % (structname, self.Name()),
' return (-1);' ]
return code
def CodeFree(self, name):
return []
def CodeBase(self):
code = [
'%(parent_name)s_%(name)s_assign,',
'%(parent_name)s_%(name)s_get,'
]
if self.Array():
code.append('%(parent_name)s_%(name)s_add,')
code = '\n'.join(code)
code = code % self.GetTranslation()
return code.split('\n')
def Verify(self):
if self.Array() and not self._can_be_array:
print >>sys.stderr, (
'Entry "%s" cannot be created as an array '
'around line %d' ) % (self._name, self.LineCount())
sys.exit(1)
if not self._struct:
print >>sys.stderr, (
'Entry "%s" does not know which struct it belongs to '
'around line %d' ) % (self._name, self.LineCount())
sys.exit(1)
if self._optional and self._array:
print >>sys.stderr, ( 'Entry "%s" has illegal combination of '
'optional and array around line %d' ) % (
self._name, self.LineCount() )
sys.exit(1)
class EntryBytes(Entry):
def __init__(self, type, name, tag, length):
# Init base class
Entry.__init__(self, type, name, tag)
self._length = length
self._ctype = 'ev_uint8_t'
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s **);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def Declaration(self):
dcl = ['ev_uint8_t %s_data[%s];' % (self._name, self._length)]
return dcl
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s **value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, const %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' msg->%s_set = 1;' % name,
' memcpy(msg->%s_data, value, %s);' % (
name, self._length),
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = [ 'if (evtag_unmarshal_fixed(%s, %s, ' % (buf, tag_name) +
'%s->%s_data, ' % (var_name, self._name) +
'sizeof(%s->%s_data)) == -1) {' % (
var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal(%s, %s, %s->%s_data, sizeof(%s->%s_data));' % (
buf, tag_name, var_name, self._name, var_name, self._name )]
return code
def CodeClear(self, structname):
code = [ '%s->%s_set = 0;' % (structname, self.Name()),
'memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
structname, self._name, structname, self._name)]
return code
def CodeNew(self, name):
code = ['memset(%s->%s_data, 0, sizeof(%s->%s_data));' % (
name, self._name, name, self._name)]
return code
def Verify(self):
if not self._length:
print >>sys.stderr, 'Entry "%s" needs a length around line %d' % (
self._name, self.LineCount() )
sys.exit(1)
Entry.Verify(self)
class EntryInt(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'ev_uint32_t'
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_unmarshal_int(%s, %s, &%s->%s_data) == -1) {' % (
buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}' ]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_int(%s, %s, %s->%s_data);' % (
buf, tag_name, var_name, self._name)]
return code
def Declaration(self):
dcl = ['ev_uint32_t %s_data;' % self._name]
return dcl
def CodeNew(self, name):
code = ['%s->%s_data = 0;' % (name, self._name)]
return code
class EntryString(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'char *'
def CodeAssign(self):
name = self._name
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,
const %(ctype)s value)
{
if (msg->%(name)s_data != NULL)
free(msg->%(name)s_data);
if ((msg->%(name)s_data = strdup(value)) == NULL)
return (-1);
msg->%(name)s_set = 1;
return (0);
}""" % self.GetTranslation()
return code.split('\n')
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_unmarshal_string(%s, %s, &%s->%s_data) == -1) {' % (
buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_string(%s, %s, %s->%s_data);' % (
buf, tag_name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free (%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free (%s->%s_data); ' % (name, self._name)]
return code
def Declaration(self):
dcl = ['char *%s_data;' % self._name]
return dcl
class EntryStruct(Entry):
def __init__(self, type, name, tag, refname):
# Init base class
Entry.__init__(self, type, name, tag)
self._can_be_array = 1
self._refname = refname
self._ctype = 'struct %s*' % refname
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1) {' % name,
' msg->%s_data = %s_new();' % (name, self._refname),
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' }',
' *value = msg->%s_data;' % name,
' return (0);',
'}' ]
return code
def CodeAssign(self):
name = self._name
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg,
const %(ctype)s value)
{
struct evbuffer *tmp = NULL;
if (msg->%(name)s_set) {
%(refname)s_clear(msg->%(name)s_data);
msg->%(name)s_set = 0;
} else {
msg->%(name)s_data = %(refname)s_new();
if (msg->%(name)s_data == NULL) {
event_warn("%%s: %(refname)s_new()", __func__);
goto error;
}
}
if ((tmp = evbuffer_new()) == NULL) {
event_warn("%%s: evbuffer_new()", __func__);
goto error;
}
%(refname)s_marshal(tmp, value);
if (%(refname)s_unmarshal(msg->%(name)s_data, tmp) == -1) {
event_warnx("%%s: %(refname)s_unmarshal", __func__);
goto error;
}
msg->%(name)s_set = 1;
evbuffer_free(tmp);
return (0);
error:
if (tmp != NULL)
evbuffer_free(tmp);
if (msg->%(name)s_data != NULL) {
%(refname)s_free(msg->%(name)s_data);
msg->%(name)s_data = NULL;
}
return (-1);
}""" % self.GetTranslation()
return code.split('\n')
def CodeComplete(self, structname):
if self.Optional():
code = [ 'if (%s->%s_set && %s_complete(%s->%s_data) == -1)' % (
structname, self.Name(),
self._refname, structname, self.Name()),
' return (-1);' ]
else:
code = [ 'if (%s_complete(%s->%s_data) == -1)' % (
self._refname, structname, self.Name()),
' return (-1);' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['%s->%s_data = %s_new();' % (
var_name, self._name, self._refname),
'if (%s->%s_data == NULL)' % (var_name, self._name),
' return (-1);',
'if (evtag_unmarshal_%s(%s, %s, %s->%s_data) == -1) {' % (
self._refname, buf, tag_name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal_%s(%s, %s, %s->%s_data);' % (
self._refname, buf, tag_name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' %s_free(%s->%s_data);' % (
self._refname, structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' %s_free(%s->%s_data); ' % (
self._refname, name, self._name)]
return code
def Declaration(self):
dcl = ['%s %s_data;' % (self._ctype, self._name)]
return dcl
class EntryVarBytes(Entry):
def __init__(self, type, name, tag):
# Init base class
Entry.__init__(self, type, name, tag)
self._ctype = 'ev_uint8_t *'
def GetDeclaration(self, funcname):
code = [ 'int %s(struct %s *, %s *, ev_uint32_t *);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, const %s, ev_uint32_t);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def CodeAssign(self):
name = self._name
code = [ 'int',
'%s_%s_assign(struct %s *msg, '
'const %s value, ev_uint32_t len)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_data != NULL)' % name,
' free (msg->%s_data);' % name,
' msg->%s_data = malloc(len);' % name,
' if (msg->%s_data == NULL)' % name,
' return (-1);',
' msg->%s_set = 1;' % name,
' msg->%s_length = len;' % name,
' memcpy(msg->%s_data, value, len);' % name,
' return (0);',
'}' ]
return code
def CodeGet(self):
name = self._name
code = [ 'int',
'%s_%s_get(struct %s *msg, %s *value, ev_uint32_t *plen)' % (
self._struct.Name(), name,
self._struct.Name(), self._ctype),
'{',
' if (msg->%s_set != 1)' % name,
' return (-1);',
' *value = msg->%s_data;' % name,
' *plen = msg->%s_length;' % name,
' return (0);',
'}' ]
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
code = ['if (evtag_payload_length(%s, &%s->%s_length) == -1)' % (
buf, var_name, self._name),
' return (-1);',
# We do not want DoS opportunities
'if (%s->%s_length > EVBUFFER_LENGTH(%s))' % (
var_name, self._name, buf),
' return (-1);',
'if ((%s->%s_data = malloc(%s->%s_length)) == NULL)' % (
var_name, self._name, var_name, self._name),
' return (-1);',
'if (evtag_unmarshal_fixed(%s, %s, %s->%s_data, '
'%s->%s_length) == -1) {' % (
buf, tag_name, var_name, self._name, var_name, self._name),
' event_warnx("%%s: failed to unmarshal %s", __func__);' % (
self._name ),
' return (-1);',
'}'
]
return code
def CodeMarshal(self, buf, tag_name, var_name):
code = ['evtag_marshal(%s, %s, %s->%s_data, %s->%s_length);' % (
buf, tag_name, var_name, self._name, var_name, self._name)]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' free (%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_length = 0;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name) ]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL)' % (name, self._name),
' free (%s->%s_data); ' % (name, self._name)]
return code
def Declaration(self):
dcl = ['ev_uint8_t *%s_data;' % self._name,
'ev_uint32_t %s_length;' % self._name]
return dcl
class EntryArray(Entry):
def __init__(self, entry):
# Init base class
Entry.__init__(self, entry._type, entry._name, entry._tag)
self._entry = entry
self._refname = entry._refname
self._ctype = 'struct %s *' % self._refname
def GetDeclaration(self, funcname):
"""Allows direct access to elements of the array."""
translate = self.GetTranslation()
translate["funcname"] = funcname
code = [
'int %(funcname)s(struct %(parent_name)s *, int, %(ctype)s *);' %
translate ]
return code
def AssignDeclaration(self, funcname):
code = [ 'int %s(struct %s *, int, const %s);' % (
funcname, self._struct.Name(), self._ctype ) ]
return code
def AddDeclaration(self, funcname):
code = [ '%s %s(struct %s *);' % (
self._ctype, funcname, self._struct.Name() ) ]
return code
def CodeGet(self):
code = """int
%(parent_name)s_%(name)s_get(struct %(parent_name)s *msg, int offset,
%(ctype)s *value)
{
if (!msg->%(name)s_set || offset < 0 || offset >= msg->%(name)s_length)
return (-1);
*value = msg->%(name)s_data[offset];
return (0);
}""" % self.GetTranslation()
return code.split('\n')
def CodeAssign(self):
code = """int
%(parent_name)s_%(name)s_assign(struct %(parent_name)s *msg, int off,
const %(ctype)s value)
{
struct evbuffer *tmp = NULL;
if (!msg->%(name)s_set || off < 0 || off >= msg->%(name)s_length)
return (-1);
%(refname)s_clear(msg->%(name)s_data[off]);
if ((tmp = evbuffer_new()) == NULL) {
event_warn("%%s: evbuffer_new()", __func__);
goto error;
}
%(refname)s_marshal(tmp, value);
if (%(refname)s_unmarshal(msg->%(name)s_data[off], tmp) == -1) {
event_warnx("%%s: %(refname)s_unmarshal", __func__);
goto error;
}
evbuffer_free(tmp);
return (0);
error:
if (tmp != NULL)
evbuffer_free(tmp);
%(refname)s_clear(msg->%(name)s_data[off]);
return (-1);
}""" % self.GetTranslation()
return code.split('\n')
def CodeAdd(self):
code = \
"""%(ctype)s
%(parent_name)s_%(name)s_add(struct %(parent_name)s *msg)
{
if (++msg->%(name)s_length >= msg->%(name)s_num_allocated) {
int tobe_allocated = msg->%(name)s_num_allocated;
%(ctype)s* new_data = NULL;
tobe_allocated = !tobe_allocated ? 1 : tobe_allocated << 1;
new_data = (%(ctype)s*) realloc(msg->%(name)s_data,
tobe_allocated * sizeof(%(ctype)s));
if (new_data == NULL)
goto error;
msg->%(name)s_data = new_data;
msg->%(name)s_num_allocated = tobe_allocated;
}
msg->%(name)s_data[msg->%(name)s_length - 1] = %(refname)s_new();
if (msg->%(name)s_data[msg->%(name)s_length - 1] == NULL)
goto error;
msg->%(name)s_set = 1;
return (msg->%(name)s_data[msg->%(name)s_length - 1]);
error:
--msg->%(name)s_length;
return (NULL);
}
""" % self.GetTranslation()
return code.split('\n')
def CodeComplete(self, structname):
code = []
translate = self.GetTranslation()
if self.Optional():
code.append( 'if (%(structname)s->%(name)s_set)' % translate)
translate["structname"] = structname
tmp = """{
int i;
for (i = 0; i < %(structname)s->%(name)s_length; ++i) {
if (%(refname)s_complete(%(structname)s->%(name)s_data[i]) == -1)
return (-1);
}
}""" % translate
code.extend(tmp.split('\n'))
return code
def CodeUnmarshal(self, buf, tag_name, var_name):
translate = self.GetTranslation()
translate["var_name"] = var_name
translate["buf"] = buf
translate["tag_name"] = tag_name
code = """if (%(parent_name)s_%(name)s_add(%(var_name)s) == NULL)
return (-1);
if (evtag_unmarshal_%(refname)s(%(buf)s, %(tag_name)s,
%(var_name)s->%(name)s_data[%(var_name)s->%(name)s_length - 1]) == -1) {
--%(var_name)s->%(name)s_length;
event_warnx("%%s: failed to unmarshal %(name)s", __func__);
return (-1);
}""" % translate
return code.split('\n')
def CodeMarshal(self, buf, tag_name, var_name):
code = ['{',
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
var_name, self._name),
' evtag_marshal_%s(%s, %s, %s->%s_data[i]);' % (
self._refname, buf, tag_name, var_name, self._name),
' }',
'}'
]
return code
def CodeClear(self, structname):
code = [ 'if (%s->%s_set == 1) {' % (structname, self.Name()),
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
structname, self.Name()),
' %s_free(%s->%s_data[i]);' % (
self._refname, structname, self.Name()),
' }',
' free(%s->%s_data);' % (structname, self.Name()),
' %s->%s_data = NULL;' % (structname, self.Name()),
' %s->%s_set = 0;' % (structname, self.Name()),
' %s->%s_length = 0;' % (structname, self.Name()),
' %s->%s_num_allocated = 0;' % (structname, self.Name()),
'}'
]
return code
def CodeNew(self, name):
code = ['%s->%s_data = NULL;' % (name, self._name),
'%s->%s_length = 0;' % (name, self._name),
'%s->%s_num_allocated = 0;' % (name, self._name)]
return code
def CodeFree(self, name):
code = ['if (%s->%s_data != NULL) {' % (name, self._name),
' int i;',
' for (i = 0; i < %s->%s_length; ++i) {' % (
name, self._name),
' %s_free(%s->%s_data[i]); ' % (
self._refname, name, self._name),
' %s->%s_data[i] = NULL;' % (name, self._name),
' }',
' free(%s->%s_data);' % (name, self._name),
' %s->%s_data = NULL;' % (name, self._name),
' %s->%s_length = 0;' % (name, self._name),
' %s->%s_num_allocated = 0;' % (name, self._name),
'}'
]
return code
def Declaration(self):
dcl = ['struct %s **%s_data;' % (self._refname, self._name),
'int %s_length;' % self._name,
'int %s_num_allocated;' % self._name ]
return dcl
def NormalizeLine(line):
global white
global cppcomment
line = cppcomment.sub('', line)
line = line.strip()
line = white.sub(' ', line)
return line
def ProcessOneEntry(newstruct, entry):
optional = 0
array = 0
entry_type = ''
name = ''
tag = ''
tag_set = None
separator = ''
fixed_length = ''
tokens = entry.split(' ')
while tokens:
token = tokens[0]
tokens = tokens[1:]
if not entry_type:
if not optional and token == 'optional':
optional = 1
continue
if not array and token == 'array':
array = 1
continue
if not entry_type:
entry_type = token
continue
if not name:
res = re.match(r'^([^\[\]]+)(\[.*\])?$', token)
if not res:
print >>sys.stderr, 'Cannot parse name: \"%s\" around %d' % (
entry, line_count)
sys.exit(1)
name = res.group(1)
fixed_length = res.group(2)
if fixed_length:
fixed_length = fixed_length[1:-1]
continue
if not separator:
separator = token
if separator != '=':
print >>sys.stderr, 'Expected "=" after name \"%s\" got %s' % (
name, token)
sys.exit(1)
continue
if not tag_set:
tag_set = 1
if not re.match(r'^(0x)?[0-9]+$', token):
print >>sys.stderr, 'Expected tag number: \"%s\"' % entry
sys.exit(1)
tag = int(token, 0)
continue
print >>sys.stderr, 'Cannot parse \"%s\"' % entry
sys.exit(1)
if not tag_set:
print >>sys.stderr, 'Need tag number: \"%s\"' % entry
sys.exit(1)
# Create the right entry
if entry_type == 'bytes':
if fixed_length:
newentry = EntryBytes(entry_type, name, tag, fixed_length)
else:
newentry = EntryVarBytes(entry_type, name, tag)
elif entry_type == 'int' and not fixed_length:
newentry = EntryInt(entry_type, name, tag)
elif entry_type == 'string' and not fixed_length:
newentry = EntryString(entry_type, name, tag)
else:
res = re.match(r'^struct\[(%s)\]$' % _STRUCT_RE,
entry_type, re.IGNORECASE)
if res:
# References another struct defined in our file
newentry = EntryStruct(entry_type, name, tag, res.group(1))
else:
print >>sys.stderr, 'Bad type: "%s" in "%s"' % (entry_type, entry)
sys.exit(1)
structs = []
if optional:
newentry.MakeOptional()
if array:
newentry.MakeArray()
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.Verify()
if array:
# We need to encapsulate this entry into a struct
newname = newentry.Name()+ '_array'
# Now borgify the new entry.
newentry = EntryArray(newentry)
newentry.SetStruct(newstruct)
newentry.SetLineCount(line_count)
newentry.MakeArray()
newstruct.AddEntry(newentry)
return structs
def ProcessStruct(data):
tokens = data.split(' ')
# First three tokens are: 'struct' 'name' '{'
newstruct = Struct(tokens[1])
inside = ' '.join(tokens[3:-1])
tokens = inside.split(';')
structs = []
for entry in tokens:
entry = NormalizeLine(entry)
if not entry:
continue
# It's possible that new structs get defined in here
structs.extend(ProcessOneEntry(newstruct, entry))
structs.append(newstruct)
return structs
def GetNextStruct(file):
global line_count
global cppdirect
got_struct = 0
processed_lines = []
have_c_comment = 0
data = ''
while 1:
line = file.readline()
if not line:
break
line_count += 1
line = line[:-1]
if not have_c_comment and re.search(r'/\*', line):
if re.search(r'/\*.*\*/', line):
line = re.sub(r'/\*.*\*/', '', line)
else:
line = re.sub(r'/\*.*$', '', line)
have_c_comment = 1
if have_c_comment:
if not re.search(r'\*/', line):
continue
have_c_comment = 0
line = re.sub(r'^.*\*/', '', line)
line = NormalizeLine(line)
if not line:
continue
if not got_struct:
if re.match(r'#include ["<].*[>"]', line):
cppdirect.append(line)
continue
if re.match(r'^#(if( |def)|endif)', line):
cppdirect.append(line)
continue
if re.match(r'^#define', line):
headerdirect.append(line)
continue
if not re.match(r'^struct %s {$' % _STRUCT_RE,
line, re.IGNORECASE):
print >>sys.stderr, 'Missing struct on line %d: %s' % (
line_count, line)
sys.exit(1)
else:
got_struct = 1
data += line
continue
# We are inside the struct
tokens = line.split('}')
if len(tokens) == 1:
data += ' ' + line
continue
if len(tokens[1]):
print >>sys.stderr, 'Trailing garbage after struct on line %d' % (
line_count )
sys.exit(1)
# We found the end of the struct
data += ' %s}' % tokens[0]
break
# Remove any comments, that might be in there
data = re.sub(r'/\*.*\*/', '', data)
return data
def Parse(file):
"""
Parses the input file and returns C code and corresponding header file.
"""
entities = []
while 1:
# Just gets the whole struct nicely formatted
data = GetNextStruct(file)
if not data:
break
entities.extend(ProcessStruct(data))
return entities
def GuardName(name):
name = '_'.join(name.split('.'))
name = '_'.join(name.split('/'))
guard = '_'+name.upper()+'_'
return guard
def HeaderPreamble(name):
guard = GuardName(name)
pre = (
'/*\n'
' * Automatically generated from %s\n'
' */\n\n'
'#ifndef %s\n'
'#define %s\n\n' ) % (
name, guard, guard)
# insert stdint.h - let's hope everyone has it
pre += (
'#include <event-config.h>\n'
'#ifdef _EVENT_HAVE_STDINT_H\n'
'#include <stdint.h>\n'
'#endif\n' )
for statement in headerdirect:
pre += '%s\n' % statement
if headerdirect:
pre += '\n'
pre += (
'#define EVTAG_HAS(msg, member) ((msg)->member##_set == 1)\n'
'#ifdef __GNUC__\n'
'#define EVTAG_ASSIGN(msg, member, args...) '
'(*(msg)->base->member##_assign)(msg, ## args)\n'
'#define EVTAG_GET(msg, member, args...) '
'(*(msg)->base->member##_get)(msg, ## args)\n'
'#else\n'
'#define EVTAG_ASSIGN(msg, member, ...) '
'(*(msg)->base->member##_assign)(msg, ## __VA_ARGS__)\n'
'#define EVTAG_GET(msg, member, ...) '
'(*(msg)->base->member##_get)(msg, ## __VA_ARGS__)\n'
'#endif\n'
'#define EVTAG_ADD(msg, member) (*(msg)->base->member##_add)(msg)\n'
'#define EVTAG_LEN(msg, member) ((msg)->member##_length)\n'
)
return pre
def HeaderPostamble(name):
guard = GuardName(name)
return '#endif /* %s */' % guard
def BodyPreamble(name):
global _NAME
global _VERSION
header_file = '.'.join(name.split('.')[:-1]) + '.gen.h'
pre = ( '/*\n'
' * Automatically generated from %s\n'
' * by %s/%s. DO NOT EDIT THIS FILE.\n'
' */\n\n' ) % (name, _NAME, _VERSION)
pre += ( '#include <sys/types.h>\n'
'#ifdef _EVENT_HAVE_SYS_TIME_H\n'
'#include <sys/time.h>\n'
'#endif\n'
'#include <stdlib.h>\n'
'#include <string.h>\n'
'#include <assert.h>\n'
'#define EVENT_NO_STRUCT\n'
'#include <event.h>\n\n'
'#ifdef _EVENT___func__\n'
'#define __func__ _EVENT___func__\n'
'#endif\n' )
for statement in cppdirect:
pre += '%s\n' % statement
pre += '\n#include "%s"\n\n' % header_file
pre += 'void event_err(int eval, const char *fmt, ...);\n'
pre += 'void event_warn(const char *fmt, ...);\n'
pre += 'void event_errx(int eval, const char *fmt, ...);\n'
pre += 'void event_warnx(const char *fmt, ...);\n\n'
return pre
def main(argv):
if len(argv) < 2 or not argv[1]:
print >>sys.stderr, 'Need RPC description file as first argument.'
sys.exit(1)
filename = argv[1]
ext = filename.split('.')[-1]
if ext != 'rpc':
print >>sys.stderr, 'Unrecognized file extension: %s' % ext
sys.exit(1)
print >>sys.stderr, 'Reading \"%s\"' % filename
fp = open(filename, 'r')
entities = Parse(fp)
fp.close()
header_file = '.'.join(filename.split('.')[:-1]) + '.gen.h'
impl_file = '.'.join(filename.split('.')[:-1]) + '.gen.c'
print >>sys.stderr, '... creating "%s"' % header_file
header_fp = open(header_file, 'w')
print >>header_fp, HeaderPreamble(filename)
# Create forward declarations: allows other structs to reference
# each other
for entry in entities:
entry.PrintForwardDeclaration(header_fp)
print >>header_fp, ''
for entry in entities:
entry.PrintTags(header_fp)
entry.PrintDeclaration(header_fp)
print >>header_fp, HeaderPostamble(filename)
header_fp.close()
print >>sys.stderr, '... creating "%s"' % impl_file
impl_fp = open(impl_file, 'w')
print >>impl_fp, BodyPreamble(filename)
for entry in entities:
entry.PrintCode(impl_fp)
impl_fp.close()
if __name__ == '__main__':
main(sys.argv)
| bsd-3-clause |
topojoy/deepdive | util/ddext.py | 15 | 5552 | #! /usr/bin/env python
import sys, re
_libraries = [] # (X,Y,Z): from Y import X as Z
_input_names = []
_input_types = []
_return_names = []
_return_types = []
_run_func_content = ''
_init_func_content = ''
_ext_name = ''
# Add a library (should be installed on machines within GP)
# Sample Usage:
# import_lib(X, Y, Z): from Z import X as Y
# import_lib(X, Y): from Y import X
# import_lib(X, as_name=Z): import X as Z
# import_lib(X): import X
def import_lib(libname, from_package=None, as_name=None):
global _libraries
# (X,Y,Z): from Z import X as Y
_libraries.append((libname, from_package, as_name))
# Add an input variable and its data type
def input(name, datatype):
global _input_names, _input_types
if name in _input_names:
raise RuntimeError('Input variable',name,'is added multiple times!')
_input_names.append(name)
_input_types.append(datatype)
def returns(name, datatype):
global _return_names, _return_types
if name in _return_names:
raise RuntimeError('Return variable',name,'is added multiple times!')
_return_names.append(name)
_return_types.append(datatype)
# def debug_init(init):
# init()
# Match a single function with certain name
def _match_function(code, function_name):
match = re.search(r'def\s+'+function_name+'.*\n.*', code, flags=re.DOTALL | re.M)
func_full = match.group()
start_index = re.search(r':|\n', func_full).end() + 1
end_index = re.search(r'\ndef|$', func_full).start() # TODO Up to the next function or to the end.
func_content = func_full[start_index : end_index]
if re.search(r'\bprint\b', func_content) != None:
print >>sys.stderr, 'WARNING: cannot use print function in extractors. Commented out print sentences.'
func_content = re.sub(r'\bprint\b', '# print', func_content)
return func_content
# Heuristics:
# 1. def must be in a single line
def parse(ext_code, ext_name):
# Match till the end
global _run_func_content, _init_func_content
global _ext_name
_run_func_content = _match_function(ext_code, 'run')
_init_func_content = _match_function(ext_code, 'init')
_ext_name = ext_name
match = re.search(r'def\s+init.*\n.*', ext_code, flags=re.DOTALL | re.M)
func_full = match.group()
start_index = re.search(r':|\n', func_full).end() + 1
end_index = re.search(r'\ndef|$', func_full).start()
func_content = func_full[start_index : end_index]
# This code seems not working....
# # Do not allow 'x = 5' for input argument x's
# for var in _input_names:
# badmatch = re.search(r'\b' + var + r'\b\s*=\s*\b', _run_func_content)
# if badmatch != None:
# print >>sys.stderr, 'WARNING: PlPy do not allow assignments to input arguments.'
# print >>sys.stderr, 'CONTEXT:', _run_func_content[badmatch.start():badmatch.start() + 30] + '...'
def _make_SD():
init_func_exec = re.sub(r'[ \t]*ddext\.', '', _init_func_content)
init_func_exec = re.sub(r'\n[ \t]*', '\n', init_func_exec)
# print >>sys.stderr, init_func_exec
try:
exec(init_func_exec)
except:
print >>sys.stderr, "ERROR: cannot parse init function. Try to remove comments and extra lines in function init()."
print >>sys.stderr, init_func_exec
sys.exit(1)
def make_pg_func():
ret_type_name = 'ret_' + _ext_name
func_name = _ext_name
ret = ''
if len(_return_names) == 0:
raise RuntimeError('Cannot have empty return_names!')
# Create a return type. e.g.:
# CREATE TYPE greeting AS (how text, who text);
ret += 'DROP TYPE IF EXISTS ' + ret_type_name + ' CASCADE;\n'
ret += 'CREATE TYPE ' + ret_type_name + ' AS (' \
+ ', '.join([ _return_names[i] + ' ' \
+ _return_types[i]
# + ' []' # previous version
for i in range(len(_return_names))]) \
+ ');\n'
# Create the function.
ret += 'CREATE OR REPLACE FUNCTION ' + func_name + '''(
''' + ', '.join([ _input_names[i] + ' ' \
+ _input_types[i] for i in range(len(_input_names))]) \
+ ''') RETURNS SETOF ''' + ret_type_name + ' AS\n$$\n';
# Import Libraries
for lib in _libraries:
libname, from_package, as_name = lib
as_str = ''
from_str = ''
varname = libname
if as_name != None:
as_str = ' as ' + as_name
varname = as_name
if from_package != None:
from_str = 'from ' + from_package + ' '
ret += """
if '"""+ varname +"""' in SD:
"""+ varname +""" = SD['"""+ varname +"""']
else:
"""+ from_str + """import """+ libname + as_str +"""
SD['"""+ varname +"""'] = """+ varname + '\n'
# Find out the indent level in the function,
# determined by the first non-empty line
lines = _run_func_content.split('\n')
indent_level = 0
for l in lines:
if l.strip() == '':
continue
indent_level = len(l) - len(l.lstrip(' '))
if indent_level > 0:
break
# Remove first-level indents
run_func_content_noindent = '\n'.join([ l[indent_level:] for l in lines])
ret += '\n' + run_func_content_noindent \
+ '\n$$ LANGUAGE plpythonu ;'
return ret
if __name__ == '__main__':
if len(sys.argv) == 4:
path = sys.argv[1]
outpath = sys.argv[2]
funcname = sys.argv[3]
else:
print >>sys.stderr, 'Usage:',sys.argv[0],'<codePath> <outPath> <funcName>'
sys.exit(1)
code = open(path).read()
parse(code, funcname)
# print >>sys.stderr, '============== RUN: ================'
# print >>sys.stderr, _run_func_content
_make_SD()
# print '============== PG_FUNC: ================'
parsedcode = make_pg_func()
fout = open(outpath, 'w')
print >>fout, parsedcode
fout.close()
| apache-2.0 |
DavidAndreev/indico | indico/modules/events/registration/forms.py | 1 | 18315 | # This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import json
from datetime import time
import jsonschema
from flask import session
from wtforms.fields import StringField, TextAreaField, BooleanField, IntegerField, SelectField
from wtforms.fields.html5 import EmailField, DecimalField
from wtforms.validators import DataRequired, NumberRange, Optional, ValidationError
from wtforms.widgets.html5 import NumberInput
from indico.core.config import Config
from indico.modules.events.features.util import is_feature_enabled
from indico.modules.events.registration.models.forms import ModificationMode
from indico.modules.events.registration.models.invitations import RegistrationInvitation
from indico.modules.events.registration.models.registrations import Registration
from indico.modules.events.payment import settings as payment_global_settings
from indico.util.i18n import _
from indico.util.placeholders import render_placeholder_info, get_missing_placeholders
from indico.web.forms.base import IndicoForm, generated_data
from indico.web.forms.fields import (IndicoDateTimeField, EmailListField, PrincipalListField, IndicoEnumSelectField,
JSONField)
from indico.web.forms.validators import HiddenUnless, LinkedDateTime, IndicoEmail
from indico.web.forms.widgets import SwitchWidget, CKEditorWidget
def _check_if_payment_required(form, field):
if not field.data:
return
if not is_feature_enabled(form.event, 'payment'):
raise ValidationError(_('You have to enable payment feature in order to set the registration fee.'))
class RegistrationFormForm(IndicoForm):
_price_fields = ('currency', 'base_price')
_registrant_notification_fields = ('notification_sender_address',
'message_pending', 'message_unpaid', 'message_complete')
_manager_notification_fields = ('manager_notifications_enabled', 'manager_notification_recipients')
_special_fields = _price_fields + _registrant_notification_fields + _manager_notification_fields
title = StringField(_("Title"), [DataRequired()], description=_("The title of the registration form"))
introduction = TextAreaField(_("Introduction"),
description=_("Introduction to be displayed when filling out the registration form"))
contact_info = StringField(_("Contact info"),
description=_("How registrants can get in touch with somebody for extra information"))
moderation_enabled = BooleanField(_("Moderated"), widget=SwitchWidget(),
description=_("If enabled, registrations require manager approval"))
require_login = BooleanField(_("Only logged-in users"), widget=SwitchWidget(),
description=_("Users must be logged in to register"))
require_user = BooleanField(_("Registrant must have account"), widget=SwitchWidget(),
description=_("Registrations emails must be associated with an Indico account"))
limit_registrations = BooleanField(_("Limit registrations"), widget=SwitchWidget(),
description=_("Whether there is a limit of registrations"))
registration_limit = IntegerField(_("Capacity"), [HiddenUnless('limit_registrations'), DataRequired(),
NumberRange(min=1)],
description=_("Maximum number of registrations"))
modification_mode = IndicoEnumSelectField(_("Modification allowed"), enum=ModificationMode,
description=_("Will users be able to modify their data? When?"))
publish_registrations_enabled = BooleanField(_('Publish registrations'), widget=SwitchWidget(),
description=_("Registrations from this form will be displayed in the "
"event page"))
publish_checkin_enabled = BooleanField(_('Publish check-in status'), widget=SwitchWidget(),
description=_("Check-in status will be shown publicly on the event page"))
base_price = DecimalField(_('Registration fee'), [NumberRange(min=0), Optional(), _check_if_payment_required],
filters=[lambda x: x if x is not None else 0],
widget=NumberInput(step='0.01'),
description=_("A fixed fee all users have to pay when registering."))
currency = SelectField(_('Currency'), [DataRequired()], description=_('The currency for new registrations'))
notification_sender_address = StringField(_('Notification sender address'), [IndicoEmail()],
filters=[lambda x: (x or None)])
message_pending = TextAreaField(_("Message for pending registrations"),
description=_("Text included in emails sent to pending registrations"))
message_unpaid = TextAreaField(_("Message for unpaid registrations"),
description=_("Text included in emails sent to unpaid registrations"))
message_complete = TextAreaField(_("Message for complete registrations"),
description=_("Text included in emails sent to complete registrations"))
manager_notifications_enabled = BooleanField(_('Enabled'), widget=SwitchWidget(),
description=_("Enable notifications to managers about registrations"))
manager_notification_recipients = EmailListField(_('List of recipients'),
[HiddenUnless('manager_notifications_enabled',
preserve_data=True), DataRequired()],
description=_("Email addresses that will receive notifications"))
def __init__(self, *args, **kwargs):
self.event = kwargs.pop('event')
super(IndicoForm, self).__init__(*args, **kwargs)
self._set_currencies()
default_sender_address = Config.getInstance().getNoReplyEmail()
self.notification_sender_address.description = _('Email address set as the sender of all '
'notifications sent to users. If empty, '
'then {0} is used.'.format(default_sender_address))
def _set_currencies(self):
currencies = [(c['code'], '{0[code]} ({0[name]})'.format(c)) for c in payment_global_settings.get('currencies')]
self.currency.choices = sorted(currencies, key=lambda x: x[1].lower())
class RegistrationFormScheduleForm(IndicoForm):
start_dt = IndicoDateTimeField(_("Start"), [Optional()], default_time=time(0, 0),
description=_("Moment when registrations will be open"))
end_dt = IndicoDateTimeField(_("End"), [Optional(), LinkedDateTime('start_dt')], default_time=time(23, 59),
description=_("Moment when registrations will be closed"))
def __init__(self, *args, **kwargs):
regform = kwargs.pop('regform')
self.timezone = regform.event.tz
super(IndicoForm, self).__init__(*args, **kwargs)
class _UsersField(PrincipalListField):
def __init__(self, *args, **kwargs):
super(_UsersField, self).__init__(*args, allow_external=True, **kwargs)
def process_formdata(self, valuelist):
if valuelist:
self.data = json.loads(valuelist[0])
def _value(self):
return self._get_data()
def pre_validate(self, form):
pass
class InvitationFormBase(IndicoForm):
_invitation_fields = ('skip_moderation',)
_email_fields = ('email_from', 'email_body')
email_from = SelectField(_('From'), [DataRequired()])
email_body = TextAreaField(_("Email body"), [DataRequired()], widget=CKEditorWidget(simple=True))
skip_moderation = BooleanField(_("Skip moderation"), widget=SwitchWidget(),
description=_("If enabled, the user's registration will be approved automatically."))
def __init__(self, *args, **kwargs):
self.regform = kwargs.pop('regform')
super(InvitationFormBase, self).__init__(*args, **kwargs)
if not self.regform.moderation_enabled:
del self.skip_moderation
from_addresses = ['{} <{}>'.format(session.user.full_name, email)
for email in sorted(session.user.all_emails, key=lambda x: x != session.user.email)]
self.email_from.choices = zip(from_addresses, from_addresses)
self.email_body.description = render_placeholder_info('registration-invitation-email', invitation=None)
def validate_email_body(self, field):
missing = get_missing_placeholders('registration-invitation-email', field.data, invitation=None)
if missing:
raise ValidationError(_('Missing placeholders: {}').format(', '.join(missing)))
class InvitationFormNew(InvitationFormBase):
_invitation_fields = ('first_name', 'last_name', 'email', 'affiliation') + InvitationFormBase._invitation_fields
first_name = StringField(_('First name'), [DataRequired()],
description=_("The first name of the user you are inviting."))
last_name = StringField(_('Last name'), [DataRequired()],
description=_("The last name of the user you are inviting."))
email = EmailField(_('Email'), [DataRequired()], filters=[lambda x: x.lower() if x else x],
description=_("The invitation will be sent to this address."))
affiliation = StringField(_('Affiliation'),
description=_("The affiliation of the user you are inviting."))
@generated_data
def users(self):
return [{'first_name': self.first_name.data,
'last_name': self.last_name.data,
'email': self.email.data,
'affiliation': self.affiliation.data}]
def validate_email(self, field):
if RegistrationInvitation.find(email=field.data).with_parent(self.regform).count():
raise ValidationError(_("There is already an invitation with this email address."))
if Registration.find(email=field.data, is_active=True).with_parent(self.regform).count():
raise ValidationError(_("There is already a registration with this email address."))
class InvitationFormExisting(InvitationFormBase):
_invitation_fields = ('users_field',) + InvitationFormBase._invitation_fields
users_field = _UsersField(_('Users'), [DataRequired()], description=_("Select the users to invite."))
@generated_data
def users(self):
return [{'first_name': x['firstName'],
'last_name': x['familyName'],
'email': x['email'].lower(),
'affiliation': x['affiliation']}
for x in self.users_field.data]
def validate_users_field(self, field):
emails = {x['email'].lower() for x in field.data}
# invitations
existing = {x.email for x in self.regform.invitations} & emails
if existing:
raise ValidationError(_("There are already invitations for the following email addresses: {emails}")
.format(emails=', '.join(sorted(existing))))
# registrations
existing = {x.email for x in self.regform.registrations if x.is_active} & emails
if existing:
raise ValidationError(_("There are already registrations with the following email addresses: {emails}")
.format(emails=', '.join(sorted(existing))))
class EmailRegistrantsForm(IndicoForm):
from_address = SelectField(_("From"), [DataRequired()])
cc_addresses = EmailListField(_("CC"),
description=_("Beware, addresses in this field will receive one mail per "
"registrant."))
subject = StringField(_("Subject"), [DataRequired()])
body = TextAreaField(_("Email body"), [DataRequired()], widget=CKEditorWidget(simple=True))
def __init__(self, *args, **kwargs):
self.regform = kwargs.pop('regform')
super(EmailRegistrantsForm, self).__init__(*args, **kwargs)
from_addresses = ['{} <{}>'.format(session.user.full_name, email)
for email in sorted(session.user.all_emails, key=lambda x: x != session.user.email)]
self.from_address.choices = zip(from_addresses, from_addresses)
self.body.description = render_placeholder_info('registration-email', regform=self.regform, registration=None)
def validate_body(self, field):
missing = get_missing_placeholders('registration-email', field.data, regform=self.regform, registration=None)
if missing:
raise ValidationError(_('Missing placeholders: {}').format(', '.join(missing)))
class TicketsForm(IndicoForm):
tickets_enabled = BooleanField(_('Enable Tickets'), widget=SwitchWidget(),
description=_('Create tickets for registrations using this registration form.'))
ticket_on_email = BooleanField(_('Attach to registration e-mail'), [HiddenUnless('tickets_enabled',
preserve_data=True)],
widget=SwitchWidget(),
description=_('Attach PDF ticket to the email sent to a user after completing '
'their registration.'))
ticket_on_event_page = BooleanField(_('Download from event homepage'), [HiddenUnless('tickets_enabled',
preserve_data=True)],
widget=SwitchWidget(),
description=_('Allow users to download their ticket from the '
'conference homepage.'))
ticket_on_summary_page = BooleanField(_('Download from summary page'), [HiddenUnless('tickets_enabled',
preserve_data=True)],
widget=SwitchWidget(),
description=_('Allow users to download their ticket from the registration '
'summary page.'))
class ParticipantsDisplayForm(IndicoForm):
"""Form to customize the display of the participant list."""
json = JSONField()
def validate_json(self, field):
schema = {
'type': 'object',
'properties': {
'merge_forms': {'type': 'boolean'},
'participant_list_forms': {
'type': 'array',
'items': {'type': 'integer'}
},
'participant_list_columns': {
'type': 'array',
'items': {'type': 'string'}
}
}
}
try:
jsonschema.validate(field.data, schema)
except jsonschema.ValidationError as exc:
raise ValidationError(exc.message)
class ParticipantsDisplayFormColumnsForm(IndicoForm):
"""Form to customize the columns for a particular registration form on the participant list."""
json = JSONField()
def validate_json(self, field):
schema = {
'type': 'object',
'properties': {
'columns': {
'type': 'array',
'items': {'type': 'integer'}
}
}
}
try:
jsonschema.validate(field.data, schema)
except jsonschema.ValidationError as exc:
raise ValidationError(exc.message)
class RegistrationManagersForm(IndicoForm):
"""Form to manage users with privileges to modify registration-related items"""
managers = PrincipalListField(_('Registration managers'), groups=True, allow_emails=True, allow_external=True,
description=_('List of users allowed to modify registrations'))
class CreateMultipleRegistrationsForm(IndicoForm):
"""Form to create multiple registrations of Indico users at the same time."""
user_principals = PrincipalListField(_("Indico users"), [DataRequired()])
notify_users = BooleanField(_("Send e-mail notifications"),
default=True,
description=_("Notify the users about the registration."),
widget=SwitchWidget())
def __init__(self, *args, **kwargs):
self._regform = kwargs.pop('regform')
open_add_user_dialog = kwargs.pop('open_add_user_dialog', False)
super(CreateMultipleRegistrationsForm, self).__init__(*args, **kwargs)
self.user_principals.open_immediately = open_add_user_dialog
def validate_user_principals(self, field):
for user in field.data:
if user.registrations.filter_by(registration_form=self._regform, is_deleted=False).one_or_none():
raise ValidationError(_("A registration for {} already exists.").format(user.full_name))
| gpl-3.0 |
mariodebian/jclic-browser | python-examples/progress.py | 1 | 4424 | #!/usr/bin/env python
# example progressbar.py
import pygtk
pygtk.require('2.0')
import gtk
# Update the value of the progress bar so that we get
# some movement
def progress_timeout(pbobj):
if pbobj.activity_check.get_active():
pbobj.pbar.pulse()
else:
# Calculate the value of the progress bar using the
# value range set in the adjustment object
new_val = pbobj.pbar.get_fraction() + 0.01
if new_val > 1.0:
new_val = 0.0
# Set the new value
pbobj.pbar.set_fraction(new_val)
# As this is a timeout function, return TRUE so that it
# continues to get called
return True
class ProgressBar:
# Callback that toggles the text display within the progress
# bar trough
def toggle_show_text(self, widget, data=None):
if widget.get_active():
self.pbar.set_text("some text")
else:
self.pbar.set_text("")
# Callback that toggles the activity mode of the progress
# bar
def toggle_activity_mode(self, widget, data=None):
if widget.get_active():
self.pbar.pulse()
else:
self.pbar.set_fraction(0.0)
# Callback that toggles the orientation of the progress bar
def toggle_orientation(self, widget, data=None):
if self.pbar.get_orientation() == gtk.PROGRESS_LEFT_TO_RIGHT:
self.pbar.set_orientation(gtk.PROGRESS_RIGHT_TO_LEFT)
elif self.pbar.get_orientation() == gtk.PROGRESS_RIGHT_TO_LEFT:
self.pbar.set_orientation(gtk.PROGRESS_LEFT_TO_RIGHT)
# Clean up allocated memory and remove the timer
def destroy_progress(self, widget, data=None):
gtk.timeout_remove(self.timer)
self.timer = 0
gtk.main_quit()
def __init__(self):
self.window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.window.set_resizable(True)
self.window.connect("destroy", self.destroy_progress)
self.window.set_title("ProgressBar")
self.window.set_border_width(0)
vbox = gtk.VBox(False, 5)
vbox.set_border_width(10)
self.window.add(vbox)
vbox.show()
# Create a centering alignment object
align = gtk.Alignment(0.5, 0.5, 0, 0)
vbox.pack_start(align, False, False, 5)
align.show()
# Create the ProgressBar
self.pbar = gtk.ProgressBar()
align.add(self.pbar)
self.pbar.show()
# Add a timer callback to update the value of the progress bar
self.timer = gtk.timeout_add (100, progress_timeout, self)
separator = gtk.HSeparator()
vbox.pack_start(separator, False, False, 0)
separator.show()
# rows, columns, homogeneous
table = gtk.Table(2, 2, False)
vbox.pack_start(table, False, True, 0)
table.show()
# Add a check button to select displaying of the trough text
check = gtk.CheckButton("Show text")
table.attach(check, 0, 1, 0, 1,
gtk.EXPAND | gtk.FILL, gtk.EXPAND | gtk.FILL,
5, 5)
check.connect("clicked", self.toggle_show_text)
check.show()
# Add a check button to toggle activity mode
self.activity_check = check = gtk.CheckButton("Activity mode")
table.attach(check, 0, 1, 1, 2,
gtk.EXPAND | gtk.FILL, gtk.EXPAND | gtk.FILL,
5, 5)
check.connect("clicked", self.toggle_activity_mode)
check.show()
# Add a check button to toggle orientation
check = gtk.CheckButton("Right to Left")
table.attach(check, 0, 1, 2, 3,
gtk.EXPAND | gtk.FILL, gtk.EXPAND | gtk.FILL,
5, 5)
check.connect("clicked", self.toggle_orientation)
check.show()
# Add a button to exit the program
button = gtk.Button("close")
button.connect("clicked", self.destroy_progress)
vbox.pack_start(button, False, False, 0)
# This makes it so the button is the default.
button.set_flags(gtk.CAN_DEFAULT)
# This grabs this button to be the default button. Simply hitting
# the "Enter" key will cause this button to activate.
button.grab_default ()
button.show()
self.window.show()
def main():
gtk.main()
return 0
if __name__ == "__main__":
ProgressBar()
main()
| gpl-2.0 |
denkab/FrameworkBenchmarks | frameworks/Python/turbogears/app.py | 4 | 2632 | import os
import sys
import json
from functools import partial
from operator import attrgetter
from random import randint
import bleach
from tg import expose, TGController, AppConfig
from jinja2 import Environment, PackageLoader
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import create_engine
from models.Fortune import Fortune
from models.World import World
DBDRIVER = 'mysql'
DBHOSTNAME = os.environ.get('DBHOST', 'localhost')
DATABASE_URI = '%s://benchmarkdbuser:benchmarkdbpass@%s:3306/hello_world?charset=utf8' % (DBDRIVER, DBHOSTNAME)
db_engine = create_engine(DATABASE_URI)
Session = sessionmaker(bind=db_engine)
db_session = Session()
env = Environment(loader=PackageLoader("app", "templates"))
def getQueryNum(queryString):
try:
num_queries = int(queryString)
if num_queries < 1:
return 1
if num_queries > 500:
return 500
return num_queries
except ValueError:
return 1
class RootController(TGController):
@expose(content_type="text/plain")
def plaintext(self):
return "Hello, World!"
@expose("json")
def json(self):
return {"message": "Hello, World!"}
@expose("json")
def db(self):
wid = randint(1, 10000)
world = db_session.query(World).get(wid).serialize()
return world
@expose("json")
def updates(self, queries=1):
num_queries = getQueryNum(queries)
worlds = []
rp = partial(randint, 1, 10000)
ids = [rp() for _ in xrange(num_queries)]
ids.sort()
for id in ids:
world = db_session.query(World).get(id)
world.randomNumber = rp()
worlds.append(world.serialize())
db_session.commit()
return json.dumps(worlds)
@expose("json")
def queries(self, queries=1):
num_queries = getQueryNum(queries)
rp = partial(randint, 1, 10000)
get = db_session.query(World).get
worlds = [get(rp()).serialize() for _ in xrange(num_queries)]
return json.dumps(worlds)
@expose()
def fortune(self):
fortunes = db_session.query(Fortune).all()
fortunes.append(Fortune(id=0, message="Additional fortune added at request time."))
fortunes.sort(key=attrgetter("message"))
for f in fortunes:
f.message = bleach.clean(f.message)
template = env.get_template("fortunes.html")
return template.render(fortunes=fortunes)
config = AppConfig(minimal=True, root_controller=RootController())
config.renderers.append("jinja")
app = config.make_wsgi_app()
| bsd-3-clause |
ronhandler/gitroot | opencv/colordetect/upload/ColourDetection.py | 2 | 1281 | #!/usr/bin/env python
import cv2
import numpy as np
class ColourDetection(object):
#HSV 180-255-255 max values openCV (recalculate from GIMP)
#these need adjusting
BOUNDARIES = {
'red1': ([0, 50, 50], [20, 255, 255]),
'red2': ([160, 50, 50], [179, 255, 255]),
'blue': ([110, 50, 50], [130, 255, 255]),
'green': ([38, 50, 50], [75, 255, 255]),
'yellow':([103, 50, 50], [145, 255, 255])
}
@staticmethod
def detect_color(detection_image):
img_hsv = cv2.cvtColor(detection_image, cv2.COLOR_BGR2HSV)
#loop for all defined colours
for k,v in ColourDetection.BOUNDARIES.iteritems():
#convert to numpty arrays
lower_color = np.array(v[0], np.uint8)
upper_color = np.array(v[1], np.uint8)
#create mask from colour bounds
mask = cv2.inRange(img_hsv, lower_color, upper_color)
#count found colour pixels
amount_not_zero = cv2.countNonZero(mask)
if amount_not_zero > 9000:
if k=='red1' or k=='red2':
return 'red'
else:
return k
else:
#print(" Did not find " + k)
continue
return "No colour found"
| unlicense |
gnowgi/gnowsys-studio | gstudio/feeds.py | 3 | 11464 |
# Copyright (c) 2011, 2012 Free Software Foundation
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Feeds for Gstudio"""
from urlparse import urljoin
from BeautifulSoup import BeautifulSoup
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.utils.feedgenerator import Atom1Feed
from django.utils.translation import ugettext as _
from django.contrib.syndication.views import Feed
from django.core.urlresolvers import NoReverseMatch
from django.core.exceptions import ObjectDoesNotExist
from tagging.models import Tag
from tagging.models import TaggedItem
from gstudio.models import Nodetype
from gstudio.settings import COPYRIGHT
from gstudio.settings import PROTOCOL
from gstudio.settings import FEEDS_FORMAT
from gstudio.settings import FEEDS_MAX_ITEMS
from gstudio.managers import nodetypes_published
from gstudio.views.metatypes import get_metatype_or_404
from gstudio.templatetags.gstudio_tags import get_gravatar
class GstudioFeed(Feed):
"""Base Feed for Gstudio"""
feed_copyright = COPYRIGHT
def __init__(self):
self.site = Site.objects.get_current()
self.site_url = '%s://%s' % (PROTOCOL, self.site.domain)
if FEEDS_FORMAT == 'atom':
self.feed_type = Atom1Feed
self.subtitle = self.description
class NodetypeFeed(GstudioFeed):
"""Base Nodetype Feed"""
title_template = 'feeds/nodetype_title.html'
description_template = 'feeds/nodetype_description.html'
def item_pubdate(self, item):
"""Publication date of a nodetype"""
return item.creation_date
def item_metatypes(self, item):
"""Nodetype's metatypes"""
return [metatype.title for metatype in item.metatypes.all()]
def item_author_name(self, item):
"""Returns the first author of a nodetype"""
if item.authors.count():
self.item_author = item.authors.all()[0]
return self.item_author.username
def item_author_email(self, item):
"""Returns the first author's email"""
return self.item_author.email
def item_author_link(self, item):
"""Returns the author's URL"""
try:
author_url = reverse('gstudio_author_detail',
args=[self.item_author.username])
return self.site_url + author_url
except NoReverseMatch:
return self.site_url
def item_enclosure_url(self, item):
"""Returns an image for enclosure"""
if item.image:
return item.image.url
img = BeautifulSoup(item.html_content).find('img')
if img:
return urljoin(self.site_url, img['src'])
def item_enclosure_length(self, item):
"""Hardcoded enclosure length"""
return '100000'
def item_enclosure_mime_type(self, item):
"""Hardcoded enclosure mimetype"""
return 'image/jpeg'
class LatestNodetypes(NodetypeFeed):
"""Feed for the latest nodetypes"""
def link(self):
"""URL of latest nodetypes"""
return reverse('gstudio_nodetype_archive_index')
def items(self):
"""Items are published nodetypes"""
return Nodetype.published.all()[:FEEDS_MAX_ITEMS]
def title(self):
"""Title of the feed"""
return '%s - %s' % (self.site.name, _('Latest nodetypes'))
def description(self):
"""Description of the feed"""
return _('The latest nodetypes for the site %s') % self.site.name
class MetatypeNodetypes(NodetypeFeed):
"""Feed filtered by a metatype"""
def get_object(self, request, path):
"""Retrieve the metatype by his path"""
return get_metatype_or_404(path)
def items(self, obj):
"""Items are the published nodetypes of the metatype"""
return obj.nodetypes_published()[:FEEDS_MAX_ITEMS]
def link(self, obj):
"""URL of the metatype"""
return obj.get_absolute_url()
def title(self, obj):
"""Title of the feed"""
return _('Nodetypes for the metatype %s') % obj.title
def description(self, obj):
"""Description of the feed"""
return _('The latest nodetypes for the metatype %s') % obj.title
class AuthorNodetypes(NodetypeFeed):
"""Feed filtered by an author"""
def get_object(self, request, username):
"""Retrieve the author by his username"""
return get_object_or_404(User, username=username)
def items(self, obj):
"""Items are the published nodetypes of the author"""
return nodetypes_published(obj.nodetypes)[:FEEDS_MAX_ITEMS]
def link(self, obj):
"""URL of the author"""
return reverse('gstudio_author_detail', args=[obj.username])
def title(self, obj):
"""Title of the feed"""
return _('Nodetypes for author %s') % obj.username
def description(self, obj):
"""Description of the feed"""
return _('The latest nodetypes by %s') % obj.username
class TagNodetypes(NodetypeFeed):
"""Feed filtered by a tag"""
def get_object(self, request, slug):
"""Retrieve the tag by his name"""
return get_object_or_404(Tag, name=slug)
def items(self, obj):
"""Items are the published nodetypes of the tag"""
return TaggedItem.objects.get_by_model(
Nodetype.published.all(), obj)[:FEEDS_MAX_ITEMS]
def link(self, obj):
"""URL of the tag"""
return reverse('gstudio_tag_detail', args=[obj.name])
def title(self, obj):
"""Title of the feed"""
return _('Nodetypes for the tag %s') % obj.name
def description(self, obj):
"""Description of the feed"""
return _('The latest nodetypes for the tag %s') % obj.name
class SearchNodetypes(NodetypeFeed):
"""Feed filtered by a search pattern"""
def get_object(self, request):
"""The GET parameter 'pattern' is the object"""
pattern = request.GET.get('pattern', '')
if len(pattern) < 3:
raise ObjectDoesNotExist
return pattern
def items(self, obj):
"""Items are the published nodetypes founds"""
return Nodetype.published.search(obj)[:FEEDS_MAX_ITEMS]
def link(self, obj):
"""URL of the search request"""
return '%s?pattern=%s' % (reverse('gstudio_nodetype_search'), obj)
def title(self, obj):
"""Title of the feed"""
return _("Results of the search for '%s'") % obj
def description(self, obj):
"""Description of the feed"""
return _("The nodetypes containing the pattern '%s'") % obj
class NodetypeDiscussions(GstudioFeed):
"""Feed for discussions in a nodetype"""
title_template = 'feeds/discussion_title.html'
description_template = 'feeds/discussion_description.html'
def get_object(self, request, year, month, day, slug):
"""Retrieve the discussions by nodetype's slug"""
return get_object_or_404(Nodetype.published, slug=slug,
creation_date__year=year,
creation_date__month=month,
creation_date__day=day)
def items(self, obj):
"""Items are the discussions on the nodetype"""
return obj.discussions[:FEEDS_MAX_ITEMS]
def item_pubdate(self, item):
"""Publication date of a discussion"""
return item.submit_date
def item_link(self, item):
"""URL of the discussion"""
return item.get_absolute_url()
def link(self, obj):
"""URL of the nodetype"""
return obj.get_absolute_url()
def item_author_name(self, item):
"""Author of the discussion"""
return item.userinfo['name']
def item_author_email(self, item):
"""Author's email of the discussion"""
return item.userinfo['email']
def item_author_link(self, item):
"""Author's URL of the discussion"""
return item.userinfo['url']
def title(self, obj):
"""Title of the feed"""
return _('Discussions on %s') % obj.title
def description(self, obj):
"""Description of the feed"""
return _('The latest discussions for the nodetype %s') % obj.title
class NodetypeComments(NodetypeDiscussions):
"""Feed for comments in a nodetype"""
title_template = 'feeds/comment_title.html'
description_template = 'feeds/comment_description.html'
def items(self, obj):
"""Items are the comments on the nodetype"""
return obj.comments[:FEEDS_MAX_ITEMS]
def item_link(self, item):
"""URL of the comment"""
return item.get_absolute_url('#comment_%(id)s')
def title(self, obj):
"""Title of the feed"""
return _('Comments on %s') % obj.title
def description(self, obj):
"""Description of the feed"""
return _('The latest comments for the nodetype %s') % obj.title
def item_enclosure_url(self, item):
"""Returns a gravatar image for enclosure"""
return get_gravatar(item.userinfo['email'])
def item_enclosure_length(self, item):
"""Hardcoded enclosure length"""
return '100000'
def item_enclosure_mime_type(self, item):
"""Hardcoded enclosure mimetype"""
return 'image/jpeg'
class NodetypePingbacks(NodetypeDiscussions):
"""Feed for pingbacks in a nodetype"""
title_template = 'feeds/pingback_title.html'
description_template = 'feeds/pingback_description.html'
def items(self, obj):
"""Items are the pingbacks on the nodetype"""
return obj.pingbacks[:FEEDS_MAX_ITEMS]
def item_link(self, item):
"""URL of the pingback"""
return item.get_absolute_url('#pingback_%(id)s')
def title(self, obj):
"""Title of the feed"""
return _('Pingbacks on %s') % obj.title
def description(self, obj):
"""Description of the feed"""
return _('The latest pingbacks for the nodetype %s') % obj.title
class NodetypeTrackbacks(NodetypeDiscussions):
"""Feed for trackbacks in a nodetype"""
title_template = 'feeds/trackback_title.html'
description_template = 'feeds/trackback_description.html'
def items(self, obj):
"""Items are the trackbacks on the nodetype"""
return obj.trackbacks[:FEEDS_MAX_ITEMS]
def item_link(self, item):
"""URL of the trackback"""
return item.get_absolute_url('#trackback_%(id)s')
def title(self, obj):
"""Title of the feed"""
return _('Trackbacks on %s') % obj.title
def description(self, obj):
"""Description of the feed"""
return _('The latest trackbacks for the nodetype %s') % obj.title
| agpl-3.0 |
tgrochow/avango | attic/avango-connect/python/avango/connect/_MFString.py | 6 | 1974 | # -*- Mode:Python -*-
##########################################################################
# #
# This file is part of AVANGO. #
# #
# Copyright 1997 - 2009 Fraunhofer-Gesellschaft zur Foerderung der #
# angewandten Forschung (FhG), Munich, Germany. #
# #
# AVANGO is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Lesser General Public License as #
# published by the Free Software Foundation, version 3. #
# #
# AVANGO is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public #
# License along with AVANGO. If not, see <http://www.gnu.org/licenses/>. #
# #
##########################################################################
import avango
from _registry import _register_field
class MFStringDescriptor(object):
'Simple stream support for MFString'
key = "MFString"
def write(self, field, hout):
if field.value:
hout.write('\x00')
hout.write('\x00'.join(field.value))
def read(self, line):
field = avango.MFString()
field.value = line
return field
_register_field(avango.MFString, MFStringDescriptor())
| lgpl-3.0 |
lwander/terraform | vendor/github.com/coreos/etcd/Godeps/_workspace/src/github.com/ugorji/go/codec/test.py | 1138 | 3876 | #!/usr/bin/env python
# This will create golden files in a directory passed to it.
# A Test calls this internally to create the golden files
# So it can process them (so we don't have to checkin the files).
# Ensure msgpack-python and cbor are installed first, using:
# sudo apt-get install python-dev
# sudo apt-get install python-pip
# pip install --user msgpack-python msgpack-rpc-python cbor
import cbor, msgpack, msgpackrpc, sys, os, threading
def get_test_data_list():
# get list with all primitive types, and a combo type
l0 = [
-8,
-1616,
-32323232,
-6464646464646464,
192,
1616,
32323232,
6464646464646464,
192,
-3232.0,
-6464646464.0,
3232.0,
6464646464.0,
False,
True,
None,
u"someday",
u"",
u"bytestring",
1328176922000002000,
-2206187877999998000,
270,
-2013855847999995777,
#-6795364578871345152,
]
l1 = [
{ "true": True,
"false": False },
{ "true": "True",
"false": False,
"uint16(1616)": 1616 },
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
"int32":32323232, "bool": True,
"LONG STRING": "123456789012345678901234567890123456789012345678901234567890",
"SHORT STRING": "1234567890" },
{ True: "true", 8: False, "false": 0 }
]
l = []
l.extend(l0)
l.append(l0)
l.extend(l1)
return l
def build_test_data(destdir):
l = get_test_data_list()
for i in range(len(l)):
# packer = msgpack.Packer()
serialized = msgpack.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
f.write(serialized)
f.close()
serialized = cbor.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
f.write(serialized)
f.close()
def doRpcServer(port, stopTimeSec):
class EchoHandler(object):
def Echo123(self, msg1, msg2, msg3):
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
def EchoStruct(self, msg):
return ("%s" % msg)
addr = msgpackrpc.Address('localhost', port)
server = msgpackrpc.Server(EchoHandler())
server.listen(addr)
# run thread to stop it after stopTimeSec seconds if > 0
if stopTimeSec > 0:
def myStopRpcServer():
server.stop()
t = threading.Timer(stopTimeSec, myStopRpcServer)
t.start()
server.start()
def doRpcClientToPythonSvc(port):
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("Echo123", "A1", "B2", "C3")
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doRpcClientToGoSvc(port):
# print ">>>> port: ", port, " <<<<<"
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doMain(args):
if len(args) == 2 and args[0] == "testdata":
build_test_data(args[1])
elif len(args) == 3 and args[0] == "rpc-server":
doRpcServer(int(args[1]), int(args[2]))
elif len(args) == 2 and args[0] == "rpc-client-python-service":
doRpcClientToPythonSvc(int(args[1]))
elif len(args) == 2 and args[0] == "rpc-client-go-service":
doRpcClientToGoSvc(int(args[1]))
else:
print("Usage: test.py " +
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
if __name__ == "__main__":
doMain(sys.argv[1:])
| mpl-2.0 |
crakensio/django_training | lib/python2.7/site-packages/south/tests/logger.py | 128 | 2939 | import io
import logging
import os
import tempfile
from south.tests import unittest
import sys
from django.conf import settings
from django.db import connection, models
from south.db import db
from south.logger import close_logger
class TestLogger(unittest.TestCase):
"""
Tests if the logging is working reasonably. Some tests ignored if you don't
have write permission to the disk.
"""
def setUp(self):
db.debug = False
self.test_path = tempfile.mkstemp(suffix=".south.log")[1]
def test_db_execute_logging_nofile(self):
"Does logging degrade nicely if SOUTH_LOGGING_ON not set?"
settings.SOUTH_LOGGING_ON = False # this needs to be set to False
# to avoid issues where other tests
# set this to True. settings is shared
# between these tests.
db.create_table("test9", [('email_confirmed', models.BooleanField(default=False))])
def test_db_execute_logging_off_with_basic_config(self):
"""
Does the south logger avoid outputing debug information with
south logging turned off and python logging configured with
a basic config?"
"""
settings.SOUTH_LOGGING_ON = False
# Set root logger to capture WARNING and worse
logging_stream = io.StringIO()
logging.basicConfig(stream=logging_stream, level=logging.WARNING)
db.create_table("test12", [('email_confirmed', models.BooleanField(default=False))])
# since south logging is off, and our root logger is at WARNING
# we should not find DEBUG info in the log
self.assertEqual(logging_stream.getvalue(), '')
def test_db_execute_logging_validfile(self):
"Does logging work when passing in a valid file?"
settings.SOUTH_LOGGING_ON = True
settings.SOUTH_LOGGING_FILE = self.test_path
# Check to see if we can make the logfile
try:
fh = open(self.test_path, "w")
except IOError:
# Permission was denied, ignore the test.
return
else:
fh.close()
# Do an action which logs
db.create_table("test10", [('email_confirmed', models.BooleanField(default=False))])
# Close the logged file
close_logger()
try:
os.remove(self.test_path)
except:
# It's a tempfile, it's not vital we remove it.
pass
def test_db_execute_logging_missingfilename(self):
"Does logging raise an error if there is a missing filename?"
settings.SOUTH_LOGGING_ON = True
settings.SOUTH_LOGGING_FILE = None
self.assertRaises(
IOError,
db.create_table,
"test11",
[('email_confirmed', models.BooleanField(default=False))],
)
| cc0-1.0 |
cvedovini/airbnb-agenda | dateutil/tzwin.py | 304 | 5828 | # This code was originally contributed by Jeffrey Harris.
import datetime
import struct
import _winreg
__author__ = "Jeffrey Harris & Gustavo Niemeyer <gustavo@niemeyer.net>"
__all__ = ["tzwin", "tzwinlocal"]
ONEWEEK = datetime.timedelta(7)
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
def _settzkeyname():
global TZKEYNAME
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
try:
_winreg.OpenKey(handle, TZKEYNAMENT).Close()
TZKEYNAME = TZKEYNAMENT
except WindowsError:
TZKEYNAME = TZKEYNAME9X
handle.Close()
_settzkeyname()
class tzwinbase(datetime.tzinfo):
"""tzinfo class based on win32's timezones available in the registry."""
def utcoffset(self, dt):
if self._isdst(dt):
return datetime.timedelta(minutes=self._dstoffset)
else:
return datetime.timedelta(minutes=self._stdoffset)
def dst(self, dt):
if self._isdst(dt):
minutes = self._dstoffset - self._stdoffset
return datetime.timedelta(minutes=minutes)
else:
return datetime.timedelta(0)
def tzname(self, dt):
if self._isdst(dt):
return self._dstname
else:
return self._stdname
def list():
"""Return a list of all time zones known to the system."""
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
tzkey = _winreg.OpenKey(handle, TZKEYNAME)
result = [_winreg.EnumKey(tzkey, i)
for i in range(_winreg.QueryInfoKey(tzkey)[0])]
tzkey.Close()
handle.Close()
return result
list = staticmethod(list)
def display(self):
return self._display
def _isdst(self, dt):
dston = picknthweekday(dt.year, self._dstmonth, self._dstdayofweek,
self._dsthour, self._dstminute,
self._dstweeknumber)
dstoff = picknthweekday(dt.year, self._stdmonth, self._stddayofweek,
self._stdhour, self._stdminute,
self._stdweeknumber)
if dston < dstoff:
return dston <= dt.replace(tzinfo=None) < dstoff
else:
return not dstoff <= dt.replace(tzinfo=None) < dston
class tzwin(tzwinbase):
def __init__(self, name):
self._name = name
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
tzkey = _winreg.OpenKey(handle, "%s\%s" % (TZKEYNAME, name))
keydict = valuestodict(tzkey)
tzkey.Close()
handle.Close()
self._stdname = keydict["Std"].encode("iso-8859-1")
self._dstname = keydict["Dlt"].encode("iso-8859-1")
self._display = keydict["Display"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=3l16h", keydict["TZI"])
self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[4:9]
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[12:17]
def __repr__(self):
return "tzwin(%s)" % repr(self._name)
def __reduce__(self):
return (self.__class__, (self._name,))
class tzwinlocal(tzwinbase):
def __init__(self):
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
tzlocalkey = _winreg.OpenKey(handle, TZLOCALKEYNAME)
keydict = valuestodict(tzlocalkey)
tzlocalkey.Close()
self._stdname = keydict["StandardName"].encode("iso-8859-1")
self._dstname = keydict["DaylightName"].encode("iso-8859-1")
try:
tzkey = _winreg.OpenKey(handle, "%s\%s"%(TZKEYNAME, self._stdname))
_keydict = valuestodict(tzkey)
self._display = _keydict["Display"]
tzkey.Close()
except OSError:
self._display = None
handle.Close()
self._stdoffset = -keydict["Bias"]-keydict["StandardBias"]
self._dstoffset = self._stdoffset-keydict["DaylightBias"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=8h", keydict["StandardStart"])
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[1:6]
tup = struct.unpack("=8h", keydict["DaylightStart"])
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[1:6]
def __reduce__(self):
return (self.__class__, ())
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
"""dayofweek == 0 means Sunday, whichweek 5 means last instance"""
first = datetime.datetime(year, month, 1, hour, minute)
weekdayone = first.replace(day=((dayofweek-first.isoweekday())%7+1))
for n in xrange(whichweek):
dt = weekdayone+(whichweek-n)*ONEWEEK
if dt.month == month:
return dt
def valuestodict(key):
"""Convert a registry key's values to a dictionary."""
dict = {}
size = _winreg.QueryInfoKey(key)[1]
for i in range(size):
data = _winreg.EnumValue(key, i)
dict[data[0]] = data[1]
return dict
| gpl-3.0 |
sid-kap/pants | src/python/pants/backend/jvm/targets/java_library.py | 16 | 1249 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.backend.jvm.targets.exportable_jvm_library import ExportableJvmLibrary
class JavaLibrary(ExportableJvmLibrary):
"""A collection of Java code.
Normally has conceptually-related sources; invoking the ``compile`` goal
on this target compiles Java and generates classes. Invoking the ``jar``
goal on this target creates a ``.jar``; but that's an unusual thing to do.
Instead, a ``jvm_binary`` might depend on this library; that binary is a
more sensible thing to bundle.
"""
def __init__(self, *args, **kwargs):
"""
:param provides: The ``artifact``
to publish that represents this target outside the repo.
:param resources: An optional list of file paths (DEPRECATED) or
``resources`` targets (which in turn point to file paths). The paths
indicate text file resources to place in this module's jar.
"""
super(JavaLibrary, self).__init__(*args, **kwargs)
self.add_labels('java')
| apache-2.0 |
henkelis/sonospy | sonospy/brisa/upnp/device/xml_gen.py | 1 | 10074 | # Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2007-2008 Brisa Team <brisa-develop@garage.maemo.org>
""" Device description XML generator.
"""
from xml.etree.cElementTree import ElementTree, Element, SubElement
from brisa.upnp.upnp_defaults import UPnPDefaults
class DeviceXMLBuilder(object):
def __init__(self, device):
self.device = device
self.url_base = device.location
def get_element(self):
# <root>
elt = Element('root', xmlns = 'urn:schemas-upnp-org:' +
UPnPDefaults.SCHEMA_VERSION)
# <specVersion>
spec_version_element = SubElement(elt, 'specVersion')
element = SubElement(spec_version_element, 'major')
element.text = UPnPDefaults.SCHEMA_VERSION_MAJOR
element = SubElement(spec_version_element, 'minor')
element.text = UPnPDefaults.SCHEMA_VERSION_MINOR
# <urlBase>
# if self.url_base != None:
# element = SubElement(elt, 'URLBase')
# element.text = self.url_base
# <device>
elt.append(DeviceXMLGenerator(self.device).generate())
return elt
def generate_to_file(self, filepath):
ElementTree(self.get_element()).write(filepath)
ElementTree(self.get_element()).write("device.xml")
def generate(self):
ElementTree(self.get_element())
class DeviceXMLGenerator(object):
def __init__(self, device):
self.device = device
def generate(self):
self.device_element = Element("device")
self._create_device()
self._create_icon_list()
self._create_service_list()
self._create_embedded_devices()
return self.device_element
def _create_device(self):
element = SubElement(self.device_element, "deviceType")
element.text = self.device.device_type
element = SubElement(self.device_element, "friendlyName")
element.text = self.device.friendly_name
element = SubElement(self.device_element, "manufacturer")
element.text = self.device.manufacturer
element = SubElement(self.device_element, "manufacturerURL")
element.text = self.device.manufacturer_url
element = SubElement(self.device_element, "modelDescription")
element.text = self.device.model_description
element = SubElement(self.device_element, "modelName")
element.text = self.device.model_name
element = SubElement(self.device_element, "modelURL")
element.text = self.device.model_url
element = SubElement(self.device_element, "modelNumber")
element.text = self.device.model_number
element = SubElement(self.device_element, "serialNumber")
element.text = self.device.serial_number
element = SubElement(self.device_element, "UDN")
element.text = self.device.udn
# element = SubElement(self.device_element, "UPC")
# element.text = self.device.upc
element = SubElement(self.device_element, "presentationURL")
element.text = self.device.presentation_url
element = SubElement(self.device_element, 'dlna:X_DLNADOC')
element.attrib['xmlns:dlna'] = 'urn:schemas-dlna-org:device-1-0'
element.text = 'DMS-1.00'
# element = SubElement(self.device_element, 'dlna:X_DLNADOC')
# element.attrib['xmlns:dlna'] = 'urn:schemas-dlna-org:device-1-0'
# element.text = 'DMS-1.50'
# element = SubElement(self.device_element, 'dlna:X_DLNADOC')
# element.attrib['xmlns:dlna'] = 'urn:schemas-dlna-org:device-1-0'
# element.text = 'M-DMS-1.50'
# element = SubElement(self.device_element, 'dlna:X_DLNACAP')
# element.attrib['xmlns:dlna'] = 'urn:schemas-dlna-org:device-1-0'
# element.text = 'av-upload,image-upload,audio-upload'
def _create_icon_list(self):
#<device><iconList>
device_icons = self.device.icons
if len(device_icons) > 0:
icon_list_element = SubElement(self.device_element, "iconList")
for device_icon in device_icons:
icon_element = SubElement(icon_list_element, "icon")
element = SubElement(icon_element, "mimetype")
element.text = device_icon.get_mimetype()
element = SubElement(icon_element, "width")
element.text = device_icon.get_width()
element = SubElement(icon_element, "height")
element.text = device_icon.get_height()
element = SubElement(icon_element, "depth")
element.text = device_icon.get_depth()
element = SubElement(icon_element, "url")
element.text = device_icon.get_url()
def _create_service_list(self):
device_services = self.device.services
if len(device_services) > 0:
service_list_element = SubElement(self.device_element,
"serviceList")
for k, device_service in device_services.items():
service_element = SubElement(service_list_element, "service")
element = SubElement(service_element, "serviceType")
element.text = device_service.service_type
element = SubElement(service_element, "serviceId")
element.text = device_service.id
element = SubElement(service_element, "SCPDURL")
element.text = device_service.scpd_url
element = SubElement(service_element, "controlURL")
element.text = device_service.control_url
element = SubElement(service_element, "eventSubURL")
element.text = device_service.event_sub_url
element = SubElement(service_element, "presentationURL")
element.text = device_service.presentation_url
def _create_embedded_devices(self):
if self.device.is_root_device():
embedded_devices = self.device.devices
if len(embedded_devices) > 0:
device_list_element = SubElement(self.device_element,
"deviceList")
for embedded_device in embedded_devices:
embedded_device_description = DeviceXMLGenerator(
embedded_device)
device_list_element.append(embedded_device_description.
create_description())
class ServiceXMLBuilder(object):
def __init__(self, service):
self.service = service
def get_element(self):
# <root>
elt = Element('scpd', xmlns = 'urn:schemas-upnp-org:' +
UPnPDefaults.SERVICE_SCHEMA_VERSION)
# <specVersion>
spec_version_element = SubElement(elt, 'specVersion')
element = SubElement(spec_version_element, 'major')
element.text = UPnPDefaults.SCHEMA_VERSION_MAJOR
element = SubElement(spec_version_element, 'minor')
element.text = UPnPDefaults.SCHEMA_VERSION_MINOR
# <actionList> and <serviceStateTable>
action_list_element, service_state_table_element = ServiceXMLGenerator(self.service).generate()
elt.append(action_list_element)
elt.append(service_state_table_element)
return elt
def generate_to_file(self, filepath):
ElementTree(self.get_element()).write(filepath)
def generate(self):
ElementTree(self.get_element())
class ServiceXMLGenerator(object):
def __init__(self, service):
self.service = service
def generate(self):
self.action_list_element = Element("actionList")
if self.service.get_actions():
self._create_actions(self.service.get_actions())
self.service_state_table_element = Element("serviceStateTable")
self._create_variables(self.service.get_variables())
return self.action_list_element, self.service_state_table_element
def _create_actions(self, actions):
for action_name, action in actions.iteritems():
action_element = SubElement(self.action_list_element, "action")
element = SubElement(action_element, "name")
element.text = action.name
# <argumentList>
argument_list_element = SubElement(action_element, "argumentList")
if action.arguments:
self._create_arguments(argument_list_element, action.arguments)
def _create_arguments(self, argument_list_element, arguments):
for arg in arguments:
arg_element = SubElement(argument_list_element, "argument")
element = SubElement(arg_element, "name")
element.text = arg.name
element = SubElement(arg_element, "direction")
element.text = arg.direction
element = SubElement(arg_element, "relatedStateVariable")
element.text = arg.state_var.name
def _create_variables(self, state_variables):
for var_name, var in state_variables.iteritems():
var_element = SubElement(self.service_state_table_element, "stateVariable")
if var.send_events:
var_element.attrib['sendEvents'] = 'yes'
else:
var_element.attrib['sendEvents'] = 'no'
element = SubElement(var_element, "name")
element.text = var.name
element = SubElement(var_element, "dataType")
element.text = var.data_type
element = SubElement(var_element, "defaultValue")
element.text = var.get_value()
# <allowedValueList>
allowed_value_list_element = SubElement(var_element, "allowedValueList")
for allowed_value in var.allowed_values:
element = SubElement(allowed_value_list_element, "allowedValue")
element.text = allowed_value
| gpl-3.0 |
AleKit/TFGDM17 | dm_spectra_f.py | 1 | 30644 | import numpy as np
import pylab as pl
import scipy as sp
import bisect
from scipy.interpolate import interp1d
from scipy.interpolate import spline
from matplotlib import pyplot as plt
import pyfits
pl.rcParams['figure.figsize'] = (10.0, 7.0)
pl.rcParams['font.size'] = 18
pl.rcParams['font.family'] = 'serif'
pl.rcParams['lines.linewidth'] = 3
pathforfigs ='/home/ale/TFGF/'
pathforaux='/home/ale/TFGF'
filename=pathforaux+'/CascadeSpectra/Spectra/AtProduction_gammas.dat'
path=pathforaux+"/sensitivities/"
#vts_file = np.genfromtxt(path+"Instrument/VERITAS_V6_std_50hr_5sigma_VERITAS2014_DiffSens.dat")
vts_file = np.genfromtxt(path+"Instrument/VERITAS_ICRC2015_envelope.dat")
magic_file = np.genfromtxt(path+"Instrument/MAGIC_DiffSensCU.dat")
hess_file_combined = np.genfromtxt(path+"Instrument/HESS_August2015_CT15_Combined_Std.dat")
hess_file_stereo = np.genfromtxt(path+"Instrument/HESS_August2015_CT15_Stereo_Std.dat")
hess_file_envelope = np.genfromtxt(path+"Instrument/HESS_ICRC2015_envelope.dat")
hawc_1yr_file = np.genfromtxt(path+"Instrument/HAWC300_1y_QuarterDecade_DiffSens.dat")
hawc_5yr_file = np.genfromtxt(path+"Instrument/HAWC300_5y_QuarterDecade_DiffSens.dat")
fermi_b0_file = np.genfromtxt(path+"Instrument/fermi_lat_pass8_l0_b0.dat")
fermi_b30_file = np.genfromtxt(path+"Instrument/fermi_lat_pass8_l0_b30.dat")
fermi_b90_file = np.genfromtxt(path+"Instrument/fermi_lat_pass8_l0_b90.dat")
hs_file = np.genfromtxt(path+"Instrument/hiscore.dat")
lhaaso_file = np.genfromtxt(path+"Instrument/lhaaso.dat")
cta_n_file = np.genfromtxt(path+"North/CTA-Performance-North-50h-DiffSens.txt",skip_header=9)
cta_s_file = np.genfromtxt(path+"South/CTA-Performance-South-50h-DiffSens.txt",skip_header=9)
Qe = 1.602176462e-19
TeV = 1
GeV = 1e-3 * TeV
MeV = 1e-6 * TeV
erg = 0.624151 * TeV
eV = 1e-9 * GeV
def Smooth(E, F):
logE = np.log10(E)
logF = np.log10(F)
logEnew = np.linspace(logE.min(), logE.max(), 300)
logF_smooth = spline(logE, logF, logEnew)
Enew = [10**x for x in logEnew]
F_smooth = [10**x for x in logF_smooth]
return (Enew, F_smooth)
def getMAGIC(magic_file, Escale = GeV, smooth=True):
x = 0.5*(magic_file[:,0] + magic_file[:,1]) * Escale
y = magic_file[:,2]
y_m = [y0 * 3.39e-11 * x0**(-2.51 - 0.21*np.log10(x0)) * x0 * x0 * 1e12 * Qe / 1e-7 for (x0, y0) in zip(x,y)]
if smooth:
return Smooth(x, y_m)
else:
return (x,y_m)
def getVERITAS(veritas_file, index=1, smooth=True):
x = veritas_file[:,0]
y = veritas_file[:,index]
y_m = [y0 * 3.269e-11 * x0**(-2.474 - 0.191*np.log10(x0)) * x0 * x0 * 1e12 * Qe / 1e-7 for (x0, y0) in zip(x,y)]
if smooth:
return Smooth(x, y_m)
else:
return (x,y_m)
def getHESS(hess_file, smooth=True):
x = hess_file[:,0]
y = hess_file[:,1]
x_m = [10**x0 for x0 in x]
y_m = [y0 * x0 * x0 * 1e12 * Qe / 1e-7 for (x0,y0) in zip(x_m,y)]
if smooth:
return Smooth(x_m, y_m)
else:
return (x_m,y_m)
def getHESSEnvelope(hess_file, smooth=True):
x = hess_file[:,0]
y = hess_file[:,1]
y_m = [y0 * x0 * x0 / erg for (x0,y0) in zip(x,y)]
if smooth:
return Smooth(x, y_m)
else:
return (x,y_m)
def getHAWCFermi(hfile, Escale = GeV, smooth=True):
# MeV scale for Fermi, GeV for HAWC
x = hfile[:,0]
y = hfile[:,1]
if smooth:
return Smooth(x * Escale, y)
else:
return (x * Escale,y)
def getCTA(ctafile, smooth=True):
x = (ctafile[:,0] + ctafile[:,1])/2.
y = ctafile[:,2]
if smooth:
return Smooth(x, y)
else:
return (x,y)
# Some useful units
#GeV = 1
#TeV = 1e3 * GeV
#erg = 624.15 * GeV
#eV = 1e-9 * GeV
def InterpolateTauEBL(E, redshift):
#filename = "/a/home/tehanu/santander/ebl/ebl_z%0.1f.dat" % redshift
filename = path + "ebl_z%0.1f.dat" % redshift
eblfile = np.genfromtxt(filename)
z = eblfile[:,0]
Egamma = eblfile[:,1] * TeV
Tau = eblfile[:,3]
EBLInterp = interp1d(np.log10(Egamma), np.log10(Tau), kind='linear')
TauValues = []
for i in range(len(E)):
if E[i] < Egamma[0]:
TauValues.append(np.log10(Tau[0]))
elif E[i] > Egamma[-1]:
TauValues.append(np.log10(Tau[-1]))
else:
TauValues.append(EBLInterp(np.log10(E[i])))
return [10**tau for tau in TauValues]
def SpectrumFlux(A, E, gamma, redshift = 0, Enorm = 1 * GeV, b = 0):
if redshift > 0:
tau = InterpolateTauEBL(E, redshift)
else:
tau = [0 for x in E]
opacity = [np.exp(-t) for t in tau]
return [A * (E0/Enorm)**(-gamma + b * np.log(E0/Enorm)) * exptau for (E0, exptau) in zip(E, opacity)]
def CrabSpectrumBroad(E):
C = -0.12
logf0 = -10.248
Eic = 48*GeV # GeV
a = 2.5
return E**(-2)* 10**(logf0+C*(np.abs(np.log10(E/Eic)))**a)
def SpectrumIntegrator(Ec, Ewidths, Flux):
nbins = 500
Ebins = np.logspace(np.log10(Emin), np.log10(Emax), nbins)
Ecenters = (Ebins[:-1]+Ebins[1:])/2
Flux = SpectrumFlux(A, Ecenters, gamma)
Ewidths = (Ebins[1:]-Ebins[:-1])
return (Ecenters, np.sum(Ewidths * Flux))
def SpectrumIntAboveEnergy(Ec, Ewidths, Flux):
prod = np.array([f * ew for (f, ew) in zip(Flux, Ewidths)])
return [np.sum(prod[bisect.bisect(Ec,En):]) / TeV for En in Ec]
def plotSensitivity(ax, filename, legend="", xscale = 1, yscale=1., color='black', ls='-', lw=3, multE=False, delim=',', inCU=False, CrabEscale = 1):
(Ec, flux) = plotCrabSpectrum(ax, scale=1, plot=False)
f = interp1d(Ec, flux)
if legend == "":
legend = filename
eblfile = np.genfromtxt(filename, delimiter=delim)
x = eblfile[:,0] * xscale
y = eblfile[:,1] * yscale
l = zip(x,y)
l.sort()
xx = [x for (x,y) in l]
yy = [y for (x,y) in l]
if inCU:
yy = f(xx) * yy * 1e-2
if multE:
zz = [xi * yi / TeV for (xi, yi) in zip(xx, yy)]
ax.loglog(xx, zz, label=legend, linewidth=lw, color=color, ls=ls)
else:
ax.loglog(xx,yy, label=legend, linewidth=lw, color=color, ls=ls)
e=1*GeV
e**2*CrabSpectrumBroad(e)
def plotIntegralSpectrum(ax, legend="", color='black', redshift=0, gamma=2, A=1e-8, Enorm = 1 * GeV, scale=1e-2, b = 0, fill=True, lwf=0.8, lwe=1, plot=True):
Emin = 0.1 * GeV
Emax = 1e8 * GeV
nbins = 1500
Ebins = np.logspace(np.log10(Emin), np.log10(Emax), nbins)
Ec = (Ebins[:-1]+Ebins[1:])/2
Ewidths = (Ebins[1:]-Ebins[:-1])
Flux = SpectrumFlux(A, Ec, gamma, redshift, Enorm, b)
IntFlux = SpectrumIntAboveEnergy(Ec, Ewidths, Flux)
if fill:
lowedge = [1e-16 for x in IntFlux]
if plot:
ax.fill_between(Ec,scale*Ec*IntFlux, lowedge, label="z = " + str(redshift),lw=0, alpha=0.08, color='#009933')
ax.loglog(Ec,scale*Ec*IntFlux,lw=lwf, color='#009933', ls='-',alpha=0.5)
return (Ec, scale*Ec*IntFlux)
else:
if plot:
ax.loglog(Ec,scale*Ec*IntFlux,lw=lwe, color=color, ls='--')
return (Ec, scale*Ec*IntFlux)
def plotSpectrum(ax, legend="", color='black', redshift=0, gamma=2, A=1e-8, Enorm = 1 * GeV, scale=1e-2, b = 0, fill=True, lwf=0.8, lwe=1, plot=True, fcolor='#009933', alpha=0.03):
Emin = 0.1 * GeV
Emax = 1e8 * GeV
nbins = 1500
Ebins = np.logspace(np.log10(Emin), np.log10(Emax), nbins)
Ec = (Ebins[:-1]+Ebins[1:])/2
Ewidths = (Ebins[1:]-Ebins[:-1])
Flux = SpectrumFlux(A, Ec, gamma, redshift, Enorm, b)
if fill:
lowedge = [1e-16 for x in Flux]
if plot:
ax.fill_between(Ec,scale*Ec*Ec*Flux, lowedge, label="z = " + str(redshift),lw=0, alpha=alpha, color=fcolor)
ax.loglog(Ec,scale*Ec*Ec*Flux,lw=lwf, color=color, ls='-',alpha=0.5)
return (Ec, scale*Ec*Ec*Flux)
else:
if plot:
ax.loglog(Ec,scale*Ec*Ec*Flux,lw=lwe, color=color, ls='--')
return (Ec, scale*Ec*Ec*Flux)
def plotCrabSpectrumBroad(ax, legend="", color='black', scale=1, fill=True, lwf=0.8, lwe=1, plot=True, fcolor='grey', alpha=0.03):
Emin = 0.1 * GeV
Emax = 1e8 * GeV
nbins = 1500
Ebins = np.logspace(np.log10(Emin), np.log10(Emax), nbins)
Ec = (Ebins[:-1]+Ebins[1:])/2
Flux = CrabSpectrumBroad(Ec)
if fill:
lowedge = [1e-16 for x in Flux]
if plot:
ax.fill_between(Ec,scale*Ec*Ec*Flux, lowedge, lw=0, alpha=alpha, color=fcolor)
ax.loglog(Ec,scale*Ec*Ec*Flux,lw=lwf, color=color, ls='-',alpha=0.5)
return (Ec, scale*Ec*Ec*Flux)
else:
if plot:
ax.loglog(Ec,scale*Ec*Ec*Flux,lw=lwe, color=color, ls='--')
return (Ec, scale*Ec*Ec*Flux)
Ns=1e3
fullsky = 4 * np.pi
def getDMspectrum(option='e',finalstate='b',mass=1000,Jfactor=1.7e19,boost=1):
#Options:
# e: outputs (E, dN/dE)
# e2: outputs (E, E**2 dN/dE)
# x: outputs (x,dN/dx)
# mass in GeV
# Jfactor in GeV2cm-5
sigmav=3*1e-26 # annihilation cross section in cm3s-1
data = np.genfromtxt (filename, names = True ,dtype = None,comments='#')
massvals = data["mDM"]
index = np.where(np.abs( (massvals - mass) / mass) < 1.e-3)
xvals = 10**(data["Log10x"][index])
def branchingratios(m_branon): #<sigmav>_particle / <sigmav>_total
#PhysRevD.68.103505
m_top = 172.44
m_W = 80.4
m_Z = 91.2
m_h = 125.1
m_c = 1.275
m_b = 4.18
m_tau = 1.7768
if m_branon > m_top:
c_0_top = 3.0 / 16 * m_branon ** 2 * m_top ** 2 * (m_branon ** 2 - m_top ** 2) * (1 - m_top ** 2 / m_branon ** 2) ** (1.0 / 2)
else:
c_0_top = 0
if m_branon > m_Z:
c_0_Z = 1.0 / 64 * m_branon ** 2 * (1 - m_Z ** 2 / m_branon ** 2) ** (1.0 / 2) * (4 * m_branon ** 4 - 4 * m_branon ** 2 * m_Z ** 2 + 3 * m_Z ** 4)
else:
c_0_Z = 0
if m_branon > m_W:
c_0_W = 2.0 / 64 * m_branon ** 2 * (1 - m_W ** 2 / m_branon ** 2) ** (1.0 / 2) * (4 * m_branon ** 4 - 4 * m_branon ** 2 * m_W ** 2 + 3 * m_W ** 4)
else:
c_0_W = 0
if m_branon > m_h:
c_0_h = 1.0 / 64 * m_branon ** 2 * (2 * m_branon ** 2 + m_h ** 2) ** 2 * (1 - m_h ** 2 / m_branon ** 2) ** (1.0 / 2)
else:
c_0_h = 0
if m_branon > m_c:
c_0_c = 3.0 / 16 * m_branon ** 2 * m_c ** 2 * (m_branon ** 2 - m_c ** 2) * (1 - m_c ** 2 / m_branon ** 2) ** (1.0 / 2)
else:
c_0_c = 0
if m_branon > m_b:
c_0_b = 3.0 / 16 * m_branon ** 2 * m_b ** 2 * (m_branon ** 2 - m_b ** 2) * (1 - m_b ** 2 / m_branon ** 2) ** (1.0 / 2)
else:
c_0_b = 0
if m_branon > m_tau:
c_0_tau = 1.0 / 16 * m_branon ** 2 * m_tau ** 2 * (m_branon ** 2 - m_tau ** 2) * (1 - m_tau ** 2 / m_branon ** 2) ** (1.0 / 2)
else:
c_0_tau = 0
c_0_T = c_0_top + c_0_Z + c_0_W + c_0_h + c_0_c + c_0_b + c_0_tau
br_t = (c_0_top / c_0_T)
br_Z = c_0_Z / c_0_T
br_W = c_0_W / c_0_T
br_h = c_0_h / c_0_T
br_c = c_0_c / c_0_T
br_b = c_0_b / c_0_T
br_tau = c_0_tau / c_0_T
#f.append((c_0_T/(3*10**(-26)*math.pi**2))**(1./8))
return {'masas': m_branon, 't': br_t, 'Z': br_Z, 'W': br_W, 'h': br_h, 'c': br_c, 'b': br_b, 'Tau': br_tau}
#tau name modified in AtProduction_Gammas.dat
if finalstate == "new":
di = branchingratios(mass)
flux_c = data[di.keys()[1]][index]/(np.log(10)*xvals)
flux_tau = data[di.keys()[2]][index]/(np.log(10)*xvals)
flux_b = data[di.keys()[3]][index]/(np.log(10)*xvals)
flux_t = data[di.keys()[4]][index]/(np.log(10)*xvals)
flux_W = data[di.keys()[5]][index]/(np.log(10)*xvals)
flux_Z = data[di.keys()[7]][index]/(np.log(10)*xvals)
flux_h = data[di.keys()[6]][index]/(np.log(10)*xvals)
loadspec_h = interp1d(xvals,flux_h)
loadspec_Z = interp1d(xvals,flux_Z)
loadspec_t = interp1d(xvals,flux_t)
loadspec_W = interp1d(xvals,flux_W)
loadspec_b = interp1d(xvals,flux_b)
loadspec_c = interp1d(xvals,flux_c)
loadspec_tau = interp1d(xvals,flux_tau)
else:
flux = data[finalstate][index]/(np.log(10)*xvals) #data is given in dN/d(log10(X)) = x ln10 dN/dx
#flux = data[finalstate][index]
loadspec = interp1d(xvals,flux)
def dNdx(x):
fluxval = loadspec(x)
if (x>1 or fluxval<0):
return 0
else:
return fluxval
def dNdx_new(x,di):
fluxval_h = loadspec_h(x)
if (x>1 or fluxval_h<0):
fluxval_h = 0
fluxval_Z = loadspec_Z(x)
if (x>1 or fluxval_Z<0):
fluxval_Z = 0
fluxval_t = loadspec_t(x)
if (x>1 or fluxval_t<0):
fluxval_t = 0
fluxval_W = loadspec_W(x)
if (x>1 or fluxval_W<0):
fluxval_W = 0
fluxval_b = loadspec_b(x)
if (x>1 or fluxval_b<0):
fluxval_b = 0
fluxval_c = loadspec_c(x)
if (x>1 or fluxval_c<0):
fluxval_c = 0
fluxval_tau = loadspec_tau(x)
if (x>1 or fluxval_tau<0):
fluxval_tau = 0
return (di.values()[1]*fluxval_c + di.values()[2]*fluxval_tau +
di.values()[3]*fluxval_b + di.values()[4]*fluxval_t +
di.values()[5]*fluxval_W + di.values()[7]*fluxval_Z +
di.values()[6]*fluxval_h)
vdNdx = []
x2vdNdx = []
dNde = []
e2dNde = []
evals = []
xvals2 = [] #aportacion mia
if option is 'e': #and boost > 1:
#if mass == 5000:
sigmavboost = sigmav * boost #no era necesario
file1 = open("tabla"+str(mass)+str(finalstate)+str(sigmavboost)+".txt","w")
logxvalsnew = np.linspace(-8.9,0,10000)
xvalsnew = 10**logxvalsnew
for i in range(len(xvalsnew)):
x=xvalsnew[i]
xvals2.append(x) #aportacion mia
#vdNdx.append(dNdx(x))
#x2vdNdx.append(x**2*dNdx(x))
#dNde.append(dNdx(x)*Jfactor*GeV**2*sigmav*boost/(8*np.pi*(mass*GeV)**3))
#e2dNde.append((1/erg)*x**2*dNdx(x)*Jfactor*GeV**2*sigmav*boost/(8*np.pi*mass*GeV))
if finalstate == 'new':
aux = dNdx_new(x,di)
else:
aux = dNdx(x)
vdNdx.append(aux)
x2vdNdx.append(x**2*aux)
dNdeaux = aux*Jfactor*GeV**2*sigmav*boost/(8*np.pi*(mass*GeV)**3)
dNde.append(dNdeaux)
e2dNde.append((1/erg)*x**2*aux*Jfactor*GeV**2*sigmav*boost/(8*np.pi*mass*GeV))
evals.append(x*mass*GeV)
if option is 'e': #and boost > 1:
#if mass == 5000 and dNdeaux != 0:
if dNdeaux != 0:
file1.write(str(x*mass*10**3) + " " + str(dNdeaux/(10**6)) + "\n")
#print i
#print(option, boost, mass, x*mass*10**3, dNdeaux/(10**6))
#print(x, vdNdx[i], evals[i], e2dNde[i])
# if x == 1:
# break
if option is 'e':
#if mass == 5000 and boost > 1:
file1.write(str(x*mass*10**3+1) + " " + "1e-99" + "\n")
file1.write(str(x*mass*10**3+5) + " " + "1e-99" + "\n")
file1.write(str(x*mass*10**3+10) + " " + "1e-99" + "\n")
file1.close()
return (evals,dNde)
if option is 'e2':
return (evals,e2dNde)
if option is 'x':
return (xvals2,vdNdx)
if option is 'x2':
return (xvals2,x2vdNdx)
else:
print('Option '+str(option)+' not supported')
fig=pl.figure(figsize=(15,10))
ax=fig.add_subplot(221)
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_xlim(1e-7, 1)
ax.set_ylim(1e-7,1e6)
#ax.set_xlim(1e-5, 1)
#ax.set_ylim(1e-2,1e3)
ax.set_xlabel('$x$')
ax.set_ylabel('$dN/dx$')
(Edm,Fdm) = getDMspectrum('x','new',50)
ax.plot(Edm, Fdm, label="m = 0.05 TeV", color='red', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',100)
ax.plot(Edm, Fdm, label="m = 0.1 TeV", color='blue', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',150)
ax.plot(Edm, Fdm, label="m = 0.15 TeV", color='green', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',250)
ax.plot(Edm, Fdm, label="m = 0.25 TeV", color='pink', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',500)
ax.plot(Edm, Fdm, label="m = 0.5 TeV", color='#00CCFF', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',1000)
ax.plot(Edm, Fdm, label="m = 1 TeV", color='#FF66FF', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',5000)
ax.plot(Edm, Fdm, label="m = 5 TeV", color='#CC0066', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',10000)
ax.plot(Edm, Fdm, label="m = 10 TeV", color='orange', linewidth=1)
(Edm,Fdm) = getDMspectrum('x','new',50000)
ax.plot(Edm, Fdm, label="m = 50 TeV", color='purple', linewidth=1)
plt.legend(loc=3, prop={'size':12})
ax=fig.add_subplot(223)
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_xlim(1e-7, 1)
ax.set_ylim(1e-7,1)
ax.set_xlabel('$x$')
ax.set_ylabel('$x^2 dN/dx$')
#(Edm,Fdm) = getDMspectrum('x2','b',10000)
#ax.plot(Edm, Fdm, label="DM", color='pink', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',50)
ax.plot(Edm, Fdm, label="m = 0.05 TeV", color='red', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',100)
ax.plot(Edm, Fdm, label="m = 0.1 TeV", color='blue', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',150)
ax.plot(Edm, Fdm, label="m = 0.15 TeV", color='green', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',250)
ax.plot(Edm, Fdm, label="m = 0.25 TeV", color='pink', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',500)
ax.plot(Edm, Fdm, label="m = 0.5 TeV", color='#00CCFF', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',1000)
ax.plot(Edm, Fdm, label="m = 1 TeV", color='#FF66FF', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',5000)
ax.plot(Edm, Fdm, label="m = 5 TeV", color='#CC0066', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',10000)
ax.plot(Edm, Fdm, label="m = 10 TeV", color='orange', linewidth=1)
(Edm,Fdm) = getDMspectrum('x2','new',50000)
ax.plot(Edm, Fdm, label="m = 50 TeV", color='purple', linewidth=1)
plt.legend(loc=2, prop={'size':12})
ax=fig.add_subplot(222)
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_xlim(2e-4, 60)
ax.set_ylim(5e-22,1e-5)
ax.set_xlabel('$E$ [TeV]')
ax.set_ylabel('$dN/dE$ [cm$^{-2}$ s$^{-1}$ TeV$^{-1}$]')
#(Edm,Fdm) = getDMspectrum('e','b',10)
#ax.plot(Edm, Fdm, label="DM", color='red', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',50)
ax.plot(Edm, Fdm, label="m = 0.05 TeV", color='red', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',100)
ax.plot(Edm, Fdm, label="m = 0.1 TeV", color='blue', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',150)
ax.plot(Edm, Fdm, label="m = 0.15 TeV", color='green', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',250)
ax.plot(Edm, Fdm, label="m = 0.25 TeV", color='pink', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',500)
ax.plot(Edm, Fdm, label="m = 0.5 TeV", color='#00CCFF', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',1000)
ax.plot(Edm, Fdm, label="m = 1 TeV", color='#FF66FF', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','b',5000,boost=4e4) #######
(Edm,Fdm) = getDMspectrum('e','Tau',5000,boost=2e4) #######
(Edm,Fdm) = getDMspectrum('e','W',5000,boost=4e4) #######
(Edm,Fdm) = getDMspectrum('e','new',5000)
ax.plot(Edm, Fdm, label="m = 5 TeV", color='#CC0066', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',10000)
ax.plot(Edm, Fdm, label="m = 10 TeV", color='orange', linewidth=1)
(Edm,Fdm) = getDMspectrum('e','new',50000)
ax.plot(Edm, Fdm, label="m = 50 TeV", color='purple', linewidth=1)
plt.legend(loc=3, prop={'size':12})
ax=fig.add_subplot(224)
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_xlim(2e-4, 60)
ax.set_ylim(5e-22,1e-12)
ax.set_xlabel('$E$ [TeV]')
ax.set_ylabel('$E^2 dN/dE$ [erg cm$^{-2}$ s$^{-1}$]')
#(Edm,Fdm) = getDMspectrum('e2','b',10)
#ax.plot(Edm, Fdm, label="DM", color='red', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',50)
ax.plot(Edm, Fdm, label="m = 0.05 TeV", color='red', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',100)
ax.plot(Edm, Fdm, label="m = 0.1 TeV", color='blue', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',150)
ax.plot(Edm, Fdm, label="m = 0.15 TeV", color='green', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',250)
ax.plot(Edm, Fdm, label="m = 0.25 TeV", color='pink', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',500)
ax.plot(Edm, Fdm, label="m = 0.5 TeV", color='#00CCFF', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',1000)
ax.plot(Edm, Fdm, label="m = 1 TeV", color='#FF66FF', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',5000)
ax.plot(Edm, Fdm, label="m = 5 TeV", color='#CC0066', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',10000)
ax.plot(Edm, Fdm, label="m = 10 TeV", color='orange', linewidth=1)
(Edm,Fdm) = getDMspectrum('e2','new',50000)
ax.plot(Edm, Fdm, label="m = 50 TeV", color='purple', linewidth=1)
plt.legend(loc=3, prop={'size':12})
#plt.show()
fig=pl.figure()
ax=fig.add_subplot(111)
legends = []
ctain=True
(Emagic, Fmagic) = getMAGIC(magic_file)
ax.plot(Emagic, Fmagic, label="MAGIC", color='#A20025', linewidth=0.7)
#(Evts, Fvts) = getVERITAS(vts_file,index=1)
#vtsplot, = ax.plot(Evts, Fvts, label="VERITAS (50 hr)", color='red', linewidth=2)
#(Ehess, Fhess) = getHESS(hess_file_combined)
#ax.plot(Ehess, Fhess, label="HESS", color='#0050EF')
#(Ehess, Fhess) = getHESS(hess_file_stereo)
#ax.plot(Ehess, Fhess, label="HESS", color="#1BA1E2")
#(Ehess, Fhess) = getHESSEnvelope(hess_file_envelope)
#ax.plot(Ehess, Fhess, label="H.E.S.S.", color="#F0A30A",linewidth=4)
#(Ehawc, Fhawc) = getHAWCFermi(hawc_1yr_file)
#hawcplot1, = ax.plot(Ehawc, Fhawc, label="HAWC-300 - 1yr", color='#008A00', linewidth=2)
#(Ehawc, Fhawc) = getHAWCFermi(hawc_5yr_file)
#hawcplot2, = ax.plot(Ehawc, Fhawc, label="HAWC-300 - 5yr", color='#A4C400', linewidth=2)
#(Efermi, Ffermi) = getHAWCFermi(fermi_b0_file, Escale=MeV)
#ax.plot(Efermi, Ffermi, label="Fermi - b0", color='#bbbbbb')
#(Efermi, Ffermi) = getHAWCFermi(fermi_b30_file, Escale=MeV)
#ax.plot(Efermi, Ffermi, label="Fermi-LAT ($b = 30^{\circ})$ ", color='#00ABA9')
(Efermi, Ffermi) = getHAWCFermi(fermi_b90_file, Escale=MeV)
fermiplot, = ax.plot(Efermi, Ffermi, label="LAT (Pass8) - 10yr", color="#1BA1E2", linewidth=0.7)
#(Ehs, Fhs) = getHAWCFermi(hs_file, Escale=TeV)
#ax.plot(Ehs, Fhs, label="HiSCORE", color="#AA00FF", linestyle='-',linewidth=0.7)
#(Ehs, Fhs) = getHAWCFermi(lhaaso_file, Escale=TeV)
#ax.plot(Ehs, Fhs, label="LHAASO", color="#0050EF", linestyle='-',linewidth=0.7)
(Ecta, Fcta) = getCTA(cta_n_file)
ax.plot(Ecta, Fcta, label="CTA (50h, North)", linestyle='-', color='goldenrod',linewidth=1)
if ctain:
(Ecta, Fcta) = getCTA(cta_s_file)
ctaplot, = ax.plot(Ecta, Fcta, label="CTA (50h, South)", linestyle='-', color='#825A2C',linewidth=1)
#### Fermi IGRB ####
#figrb = np.genfromtxt(pathforaux+"/igrb.dat")
#Emean = 1000*(figrb[:,0] + figrb[:,1])/2.
#Ewidth = (figrb[:,1]-figrb[:,0]) * 1000
#Figrb = [4 * np.pi * (F/Ew) * scale * 1.60218e-6 * 1e-3 * E**2 for (F, scale, E, Ew) in zip(figrb[:,2], figrb[:,5], Emean, Ewidth)]
#Figrb_err = [4 * np.pi * (F_err/Ew) * 1.60218e-6 * 1e-3 * scale * E**2 for (F_err, scale, E, Ew) in zip(figrb[:,3], figrb[:,5], Emean, Ewidth)]
#Figrb_err[-1] = 3e-14
#Flims = figrb[:,3] < 1e-3
#DNC ax.errorbar(Emean/1e6, Figrb, yerr=Figrb_err, xerr=Ewidth/2e6, marker='o',linestyle='',ecolor='red',color='red',mec='red',ms=3,uplims=Flims,capsize=3, linewidth=1)
#DNC ax.fill_between(Emean/1e6, [mean - err for (mean,err) in zip(Figrb, Figrb_err)], [mean + err for (mean,err) in zip(Figrb, Figrb_err)], zorder=0, alpha=0.5, color="#cccccc")
#ax.set_yscale('log')
#ax.set_xscale('log')
#ax.grid('on')
#if ctain:
# first_legend = plt.legend(handles=[fermiplot,vtsplot,hawcplot1,hawcplot2,ctaplot], loc='upper left', bbox_to_anchor=(1.01, 1),fontsize=12)
#else:
# first_legend = plt.legend(handles=[fermiplot,vtsplot,hawcplot1,hawcplot2], loc='upper left', bbox_to_anchor=(1.01, 1),fontsize=12)
#ax.add_artist(first_legend)
#legends.append(first_legend)
#ax.legend(bbox_to_anchor=(1.05, 1), ncol=1, loc=2, fontsize=14)
#ax.plot(hess_file[:,0], hess_file[:,1])
#ax.set_yscale('log')
#ax.set_xscale('log')
#ax.set_xlim(2e-4, 40)
#ax.set_ylim(4e-14,1e-10)
#ax.set_ylim(5e-14,1e-9)
#ax.set_xlabel('$E$ [TeV]')
#ax.set_ylabel('$E^2 d\Phi/dE$ [erg cm$^{-2}$ s$^{-1}$]')
#plotCrabSpectrum(ax, scale=1./(erg*GeV), fill=True, lwf=0.3, fcolor='#009933', alpha=0.05)
#plotCrabSpectrum(ax, scale=1e-1/(erg*GeV), fill=True, lwf=0.3, fcolor='#009933', alpha=0.05)
#plotCrabSpectrum(ax, scale=1e-2/(erg*GeV), fill=True, lwf=0.3, fcolor='#009933', alpha=0.05)
#plotCrabSpectrum(ax, scale=1e-3/(erg*GeV), fill=True, lwf=0.3, fcolor='#009933', alpha=0.05)
# Global fit nu
gamma=2.5
A0= (2/3.) * 6.7e-18 * fullsky / (erg*GeV)
Enorm = 100 * TeV
gamma=2.3
A0=1.5e-18 * fullsky / (GeV*erg)
Enorm = 100 * TeV
#gamma_wb = 2
#A_wb = 1e-8 * fullsky / GeV*erg
#Enorm_wb = 1 * GeV
#gamma=2.0
#A0=1.5e-18 * fullsky / (erg*GeV)
#Enorm = 100 * TeV
#plotSpectrum(ax, color='#009933',redshift=0, scale=1/Ns, gamma=gamma, A=A0, Enorm=Enorm, fcolor='#009933', alpha=0.1)
#plotSpectrum(ax, color='#009933',redshift=0.5, scale=1/Ns, gamma=gamma_wb, A=A_wb, Enorm=Enorm_wb, alpha=0.1, fcolor='#009933')
#plotSpectrum(ax, color='#666666',redshift=0.5, scale=1/Ns, gamma=gamma, A=A0, Enorm=Enorm)
#plotSpectrum(ax, color='#999999',redshift=1, scale=1/Ns, gamma=gamma, A=A0, Enorm=Enorm)
#plotSpectrum(ax, color='#000000',redshift=0, scale=1/Ns, gamma=gamma, A=A0, Enorm=Enorm)
#plotSpectrum(ax, color='#333333',redshift=0.1, scale=1/Ns, gamma=gamma, A=A0, Enorm=Enorm)
#plotSpectrum(ax, color='#666666',redshift=0.5, scale=1/Ns, gamma=gamma, A=A0, Enorm=Enorm)
#plotSpectrum(ax, color='#999999',redshift=1, scale=1/Ns, gamma=gamma, A=A0, Enorm=Enorm)
myalpha=0.015
myfcolor='blue'
mycolor='grey'
annotE=0.5*GeV
myfontsize=10
myrot=30
if 1:
plotCrabSpectrumBroad(ax,color=mycolor,scale=1/erg,alpha=myalpha,fcolor=myfcolor)
ax.annotate('Crab', xy=(annotE,2*annotE**2*CrabSpectrumBroad(annotE)), xycoords='data',
horizontalalignment='center', verticalalignment='center',fontsize=myfontsize,rotation=myrot)
if 1:
plotCrabSpectrumBroad(ax,color=mycolor,scale=1e-1/erg,alpha=myalpha,fcolor=myfcolor)
ax.annotate('10% Crab', xy=(annotE,2e-1*annotE**2*CrabSpectrumBroad(annotE)), xycoords='data',
horizontalalignment='center', verticalalignment='center',fontsize=myfontsize,rotation=myrot)
if 1:
plotCrabSpectrumBroad(ax,color=mycolor,scale=1e-2/erg,alpha=myalpha,fcolor=myfcolor)
ax.annotate('1% Crab', xy=(annotE,2e-2*annotE**2*CrabSpectrumBroad(annotE)), xycoords='data',
horizontalalignment='center', verticalalignment='center',fontsize=myfontsize,rotation=myrot)
if 1:
plotCrabSpectrumBroad(ax,color=mycolor,scale=1e-3/erg,alpha=myalpha,fcolor=myfcolor)
ax.annotate('0.1% Crab', xy=(annotE,2e-3*annotE**2*CrabSpectrumBroad(annotE)), xycoords='data',
horizontalalignment='center', verticalalignment='center',fontsize=myfontsize,rotation=myrot)
if 1:
hdulist1 = pyfits.open('spectrumdmbboost.fits')
hdulist1.info()
datos = hdulist1[1].data
energ = datos['Energy']
ed_energ = datos['ed_Energy']
eu_energ = datos['eu_Energy']
flux = datos['Flux']
e_flux = datos['e_Flux']
plt.errorbar(energ, flux, xerr=(ed_energ, eu_energ), yerr = e_flux, color = 'red', marker = 'o', label = r'spectrum $b\bar b$', fmt = '', zorder = 0)
hdulist1.close()
hdulist2 = pyfits.open('spectrumdmWboost.fits')
hdulist2.info()
datos = hdulist2[1].data
energ = datos['Energy']
ed_energ = datos['ed_Energy']
eu_energ = datos['eu_Energy']
flux = datos['Flux']
e_flux = datos['e_Flux']
plt.errorbar(energ, flux, xerr=(ed_energ, eu_energ), yerr = e_flux, color = 'green', marker = 'o', label = 'spectrum $W^+ W^-$', fmt = '', zorder = 0)
hdulist2.close()
mylinestyle='--'
#mymass=3
myboost=1
(Edm,Fdm) = getDMspectrum('e2','b',5e3,boost=myboost)
dmplot1 = ax.plot(Edm, Fdm, label=r"$m_\chi$ = "+str(5)+r" TeV ($b\bar b$, B$_f$=1e"+str("{:.1f}".format(np.log10(myboost)))+")", color='red', linewidth=1,linestyle=mylinestyle)
(Edm,Fdm) = getDMspectrum('e2','Tau',5e3,boost=myboost)
dmplot2 = ax.plot(Edm, Fdm, label=r"$m_\chi$ = "+str(5)+r" TeV ($\tau^- \tau^+$, B$_f$=1e"+str("{:.1f}".format(np.log10(myboost)))+")", color='blue', linewidth=1,linestyle=mylinestyle)
(Edm,Fdm) = getDMspectrum('e2','W',5e3,boost=myboost)
dmplot3 = ax.plot(Edm, Fdm, label=r"$m_\chi$ = "+str(5)+r" TeV ($W^+ W^-$, B$_f$=1e"+str("{:.1f}".format(np.log10(myboost)))+")", color='green', linewidth=1,linestyle=mylinestyle)
myboost2 = 2e4
myboost3 = 4e4
(Edm,Fdm) = getDMspectrum('e2','b',5e3,boost=myboost3)
dmplot4 = ax.plot(Edm, Fdm, label=r"$m_\chi$ = "+str(5)+r" TeV ($b\bar b$, B$_f$=1e"+str("{:.1f}".format(np.log10(myboost2)))+")", color='pink', linewidth=1,linestyle=mylinestyle)
(Edm,Fdm) = getDMspectrum('e2','Tau',5e3,boost=myboost2)
dmplot5 = ax.plot(Edm, Fdm, label=r"$m_\chi$ = "+str(5)+r" TeV ($\tau^- \tau^+$, B$_f$=1e"+str("{:.1f}".format(np.log10(myboost2)))+")", color='orange', linewidth=1,linestyle=mylinestyle)
(Edm,Fdm) = getDMspectrum('e2','W',5e3,boost=myboost3)
dmplot6 = ax.plot(Edm, Fdm, label=r"$m_\chi$ = "+str(5)+r" TeV ($W^+ W^-$, B$_f$=1e"+str("{:.1f}".format(np.log10(myboost3)))+")", color='purple', linewidth=1,linestyle=mylinestyle)
plt.legend(loc=4, prop={'size':9}) #aportacion mia
dmplots= []
dmplots.append(dmplot1)
dmplots.append(dmplot2)
dmplots.append(dmplot3)
dmplots.append(dmplot4)
dmplots.append(dmplot5)
dmplots.append(dmplot6)
ax.set_xlim(1e-4, 1e3)
ax.set_ylim(1e-16,1e-10)
ax.set_xlabel('$E$ [TeV]')
ax.set_ylabel('$E^2 dN/dE$ [erg cm$^{-2}$ s$^{-1}$]')
#second_legend = plt.legend(handles=dmplots, ncol=1, loc='upper left',bbox_to_anchor=(1.01, .25), fontsize=12)
#ax.add_artist(second_legend)
#legends.append(second_legend)
#dummy = []
#aux_legend = plt.legend(handles=dummy,bbox_to_anchor=(1.5, .2),frameon=False)
#legends.append(aux_legend)
#fig.savefig(pathforfigs+'ic_sensitivities_cta_dm_'+str(mymass)+'_'+str(myboost)+'.pdf',bbox_extra_artists=legends,bbox_inches='tight')
plt.show() #aportacion mia
fig.savefig(pathforfigs+'ic_sensitivities_prueba.pdf',bbox_extra_artists=legends,bbox_inches='tight')
| gpl-3.0 |
cchurch/ansible | lib/ansible/modules/packaging/os/rhsm_repository.py | 20 | 9171 | #!/usr/bin/python
# Copyright: (c) 2017, Giovanni Sciortino (@giovannisciortino)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rhsm_repository
short_description: Manage RHSM repositories using the subscription-manager command
description:
- Manage (Enable/Disable) RHSM repositories to the Red Hat Subscription
Management entitlement platform using the C(subscription-manager) command.
version_added: '2.5'
author: Giovanni Sciortino (@giovannisciortino)
notes:
- In order to manage RHSM repositories the system must be already registered
to RHSM manually or using the Ansible C(redhat_subscription) module.
requirements:
- subscription-manager
options:
state:
description:
- If state is equal to present or disabled, indicates the desired
repository state.
choices: [present, enabled, absent, disabled]
required: True
default: "present"
name:
description:
- The ID of repositories to enable.
- To operate on several repositories this can accept a comma separated
list or a YAML list.
required: True
purge:
description:
- Disable all currently enabled repositories that are not not specified in C(name).
Only set this to C(True) if passing in a list of repositories to the C(name) field.
Using this with C(loop) will most likely not have the desired result.
type: bool
default: False
version_added: "2.8"
'''
EXAMPLES = '''
- name: Enable a RHSM repository
rhsm_repository:
name: rhel-7-server-rpms
- name: Disable all RHSM repositories
rhsm_repository:
name: '*'
state: disabled
- name: Enable all repositories starting with rhel-6-server
rhsm_repository:
name: rhel-6-server*
state: enabled
- name: Disable all repositories except rhel-7-server-rpms
rhsm_repository:
name: rhel-7-server-rpms
purge: True
'''
RETURN = '''
repositories:
description:
- The list of RHSM repositories with their states.
- When this module is used to change the repository states, this list contains the updated states after the changes.
returned: success
type: list
'''
import re
import os
from fnmatch import fnmatch
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
def run_subscription_manager(module, arguments):
# Execute subuscription-manager with arguments and manage common errors
rhsm_bin = module.get_bin_path('subscription-manager')
if not rhsm_bin:
module.fail_json(msg='The executable file subscription-manager was not found in PATH')
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
rc, out, err = module.run_command("%s %s" % (rhsm_bin, " ".join(arguments)), environ_update=lang_env)
if rc == 1 and (err == 'The password you typed is invalid.\nPlease try again.\n' or os.getuid() != 0):
module.fail_json(msg='The executable file subscription-manager must be run using root privileges')
elif rc == 0 and out == 'This system has no repositories available through subscriptions.\n':
module.fail_json(msg='This system has no repositories available through subscriptions')
elif rc == 1:
module.fail_json(msg='subscription-manager failed with the following error: %s' % err)
else:
return rc, out, err
def get_repository_list(module, list_parameter):
# Generate RHSM repository list and return a list of dict
if list_parameter == 'list_enabled':
rhsm_arguments = ['repos', '--list-enabled']
elif list_parameter == 'list_disabled':
rhsm_arguments = ['repos', '--list-disabled']
elif list_parameter == 'list':
rhsm_arguments = ['repos', '--list']
rc, out, err = run_subscription_manager(module, rhsm_arguments)
skip_lines = [
'+----------------------------------------------------------+',
' Available Repositories in /etc/yum.repos.d/redhat.repo'
]
repo_id_re = re.compile(r'Repo ID:\s+(.*)')
repo_name_re = re.compile(r'Repo Name:\s+(.*)')
repo_url_re = re.compile(r'Repo URL:\s+(.*)')
repo_enabled_re = re.compile(r'Enabled:\s+(.*)')
repo_id = ''
repo_name = ''
repo_url = ''
repo_enabled = ''
repo_result = []
for line in out.splitlines():
if line == '' or line in skip_lines:
continue
repo_id_match = repo_id_re.match(line)
if repo_id_match:
repo_id = repo_id_match.group(1)
continue
repo_name_match = repo_name_re.match(line)
if repo_name_match:
repo_name = repo_name_match.group(1)
continue
repo_url_match = repo_url_re.match(line)
if repo_url_match:
repo_url = repo_url_match.group(1)
continue
repo_enabled_match = repo_enabled_re.match(line)
if repo_enabled_match:
repo_enabled = repo_enabled_match.group(1)
repo = {
"id": repo_id,
"name": repo_name,
"url": repo_url,
"enabled": True if repo_enabled == '1' else False
}
repo_result.append(repo)
return repo_result
def repository_modify(module, state, name, purge=False):
name = set(name)
current_repo_list = get_repository_list(module, 'list')
updated_repo_list = deepcopy(current_repo_list)
matched_existing_repo = {}
for repoid in name:
matched_existing_repo[repoid] = []
for idx, repo in enumerate(current_repo_list):
if fnmatch(repo['id'], repoid):
matched_existing_repo[repoid].append(repo)
# Update current_repo_list to return it as result variable
updated_repo_list[idx]['enabled'] = True if state == 'enabled' else False
changed = False
results = []
diff_before = ""
diff_after = ""
rhsm_arguments = ['repos']
for repoid in matched_existing_repo:
if len(matched_existing_repo[repoid]) == 0:
results.append("%s is not a valid repository ID" % repoid)
module.fail_json(results=results, msg="%s is not a valid repository ID" % repoid)
for repo in matched_existing_repo[repoid]:
if state in ['disabled', 'absent']:
if repo['enabled']:
changed = True
diff_before += "Repository '%s' is enabled for this system\n" % repo['id']
diff_after += "Repository '%s' is disabled for this system\n" % repo['id']
results.append("Repository '%s' is disabled for this system" % repo['id'])
rhsm_arguments += ['--disable', repo['id']]
elif state in ['enabled', 'present']:
if not repo['enabled']:
changed = True
diff_before += "Repository '%s' is disabled for this system\n" % repo['id']
diff_after += "Repository '%s' is enabled for this system\n" % repo['id']
results.append("Repository '%s' is enabled for this system" % repo['id'])
rhsm_arguments += ['--enable', repo['id']]
# Disable all enabled repos on the system that are not in the task and not
# marked as disabled by the task
if purge:
enabled_repo_ids = set(repo['id'] for repo in updated_repo_list if repo['enabled'])
matched_repoids_set = set(matched_existing_repo.keys())
difference = enabled_repo_ids.difference(matched_repoids_set)
if len(difference) > 0:
for repoid in difference:
changed = True
diff_before.join("Repository '{repoid}'' is enabled for this system\n".format(repoid=repoid))
diff_after.join("Repository '{repoid}' is disabled for this system\n".format(repoid=repoid))
results.append("Repository '{repoid}' is disabled for this system".format(repoid=repoid))
rhsm_arguments.extend(['--disable', repoid])
diff = {'before': diff_before,
'after': diff_after,
'before_header': "RHSM repositories",
'after_header': "RHSM repositories"}
if not module.check_mode:
rc, out, err = run_subscription_manager(module, rhsm_arguments)
results = out.splitlines()
module.exit_json(results=results, changed=changed, repositories=updated_repo_list, diff=diff)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(type='list', required=True),
state=dict(choices=['enabled', 'disabled', 'present', 'absent'], default='enabled'),
purge=dict(type='bool', default=False),
),
supports_check_mode=True,
)
name = module.params['name']
state = module.params['state']
purge = module.params['purge']
repository_modify(module, state, name, purge)
if __name__ == '__main__':
main()
| gpl-3.0 |
nthiep/global-ssh-server | lib/python2.7/site-packages/django/contrib/admindocs/urls.py | 317 | 1092 | from django.conf.urls import patterns, url
from django.contrib.admindocs import views
urlpatterns = patterns('',
url('^$',
views.doc_index,
name='django-admindocs-docroot'
),
url('^bookmarklets/$',
views.bookmarklets,
name='django-admindocs-bookmarklets'
),
url('^tags/$',
views.template_tag_index,
name='django-admindocs-tags'
),
url('^filters/$',
views.template_filter_index,
name='django-admindocs-filters'
),
url('^views/$',
views.view_index,
name='django-admindocs-views-index'
),
url('^views/(?P<view>[^/]+)/$',
views.view_detail,
name='django-admindocs-views-detail'
),
url('^models/$',
views.model_index,
name='django-admindocs-models-index'
),
url('^models/(?P<app_label>[^\.]+)\.(?P<model_name>[^/]+)/$',
views.model_detail,
name='django-admindocs-models-detail'
),
url('^templates/(?P<template>.*)/$',
views.template_detail,
name='django-admindocs-templates'
),
)
| agpl-3.0 |
SnabbCo/neutron | neutron/plugins/vmware/dbexts/db.py | 7 | 7045 | # Copyright 2012 VMware, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.orm import exc
import neutron.db.api as db
from neutron.openstack.common.db import exception as db_exc
from neutron.openstack.common import excutils
from neutron.openstack.common import log as logging
from neutron.plugins.vmware.dbexts import models
from neutron.plugins.vmware.dbexts import networkgw_db
LOG = logging.getLogger(__name__)
def get_network_bindings(session, network_id):
session = session or db.get_session()
return (session.query(models.TzNetworkBinding).
filter_by(network_id=network_id).
all())
def get_network_bindings_by_vlanid(session, vlan_id):
session = session or db.get_session()
return (session.query(models.TzNetworkBinding).
filter_by(vlan_id=vlan_id).
all())
def add_network_binding(session, network_id, binding_type, phy_uuid, vlan_id):
with session.begin(subtransactions=True):
binding = models.TzNetworkBinding(network_id, binding_type,
phy_uuid, vlan_id)
session.add(binding)
return binding
def add_neutron_nsx_network_mapping(session, neutron_id, nsx_switch_id):
with session.begin(subtransactions=True):
mapping = models.NeutronNsxNetworkMapping(
neutron_id=neutron_id, nsx_id=nsx_switch_id)
session.add(mapping)
return mapping
def add_neutron_nsx_port_mapping(session, neutron_id,
nsx_switch_id, nsx_port_id):
session.begin(subtransactions=True)
try:
mapping = models.NeutronNsxPortMapping(
neutron_id, nsx_switch_id, nsx_port_id)
session.add(mapping)
session.commit()
except db_exc.DBDuplicateEntry:
with excutils.save_and_reraise_exception() as ctxt:
session.rollback()
# do not complain if the same exact mapping is being added,
# otherwise re-raise because even though it is possible for the
# same neutron port to map to different back-end ports over time,
# this should not occur whilst a mapping already exists
current = get_nsx_switch_and_port_id(session, neutron_id)
if current[1] == nsx_port_id:
LOG.debug(_("Port mapping for %s already available"),
neutron_id)
ctxt.reraise = False
except db_exc.DBError:
with excutils.save_and_reraise_exception():
# rollback for any other db error
session.rollback()
return mapping
def add_neutron_nsx_router_mapping(session, neutron_id, nsx_router_id):
with session.begin(subtransactions=True):
mapping = models.NeutronNsxRouterMapping(
neutron_id=neutron_id, nsx_id=nsx_router_id)
session.add(mapping)
return mapping
def add_neutron_nsx_security_group_mapping(session, neutron_id, nsx_id):
"""Map a Neutron security group to a NSX security profile.
:param session: a valid database session object
:param neutron_id: a neutron security group identifier
:param nsx_id: a nsx security profile identifier
"""
with session.begin(subtransactions=True):
mapping = models.NeutronNsxSecurityGroupMapping(
neutron_id=neutron_id, nsx_id=nsx_id)
session.add(mapping)
return mapping
def get_nsx_switch_ids(session, neutron_id):
# This function returns a list of NSX switch identifiers because of
# the possibility of chained logical switches
return [mapping['nsx_id'] for mapping in
session.query(models.NeutronNsxNetworkMapping).filter_by(
neutron_id=neutron_id)]
def get_nsx_switch_and_port_id(session, neutron_id):
try:
mapping = (session.query(models.NeutronNsxPortMapping).
filter_by(neutron_id=neutron_id).
one())
return mapping['nsx_switch_id'], mapping['nsx_port_id']
except exc.NoResultFound:
LOG.debug(_("NSX identifiers for neutron port %s not yet "
"stored in Neutron DB"), neutron_id)
return None, None
def get_nsx_router_id(session, neutron_id):
try:
mapping = (session.query(models.NeutronNsxRouterMapping).
filter_by(neutron_id=neutron_id).one())
return mapping['nsx_id']
except exc.NoResultFound:
LOG.debug(_("NSX identifiers for neutron router %s not yet "
"stored in Neutron DB"), neutron_id)
def get_nsx_security_group_id(session, neutron_id):
"""Return the id of a security group in the NSX backend.
Note: security groups are called 'security profiles' in NSX
"""
try:
mapping = (session.query(models.NeutronNsxSecurityGroupMapping).
filter_by(neutron_id=neutron_id).
one())
return mapping['nsx_id']
except exc.NoResultFound:
LOG.debug(_("NSX identifiers for neutron security group %s not yet "
"stored in Neutron DB"), neutron_id)
return None
def _delete_by_neutron_id(session, model, neutron_id):
return session.query(model).filter_by(neutron_id=neutron_id).delete()
def delete_neutron_nsx_port_mapping(session, neutron_id):
return _delete_by_neutron_id(
session, models.NeutronNsxPortMapping, neutron_id)
def delete_neutron_nsx_router_mapping(session, neutron_id):
return _delete_by_neutron_id(
session, models.NeutronNsxRouterMapping, neutron_id)
def unset_default_network_gateways(session):
with session.begin(subtransactions=True):
session.query(networkgw_db.NetworkGateway).update(
{networkgw_db.NetworkGateway.default: False})
def set_default_network_gateway(session, gw_id):
with session.begin(subtransactions=True):
gw = (session.query(networkgw_db.NetworkGateway).
filter_by(id=gw_id).one())
gw['default'] = True
def set_multiprovider_network(session, network_id):
with session.begin(subtransactions=True):
multiprovider_network = models.MultiProviderNetworks(
network_id)
session.add(multiprovider_network)
return multiprovider_network
def is_multiprovider_network(session, network_id):
with session.begin(subtransactions=True):
return bool(
session.query(models.MultiProviderNetworks).filter_by(
network_id=network_id).first())
| apache-2.0 |
kashif/scikit-learn | sklearn/svm/bounds.py | 280 | 2911 | """Determination of parameter bounds"""
# Author: Paolo Losi
# License: BSD 3 clause
from warnings import warn
import numpy as np
from ..preprocessing import LabelBinarizer
from ..utils.validation import check_consistent_length, check_array
from ..utils.extmath import safe_sparse_dot
def l1_min_c(X, y, loss='squared_hinge', fit_intercept=True,
intercept_scaling=1.0):
"""
Return the lowest bound for C such that for C in (l1_min_C, infinity)
the model is guaranteed not to be empty. This applies to l1 penalized
classifiers, such as LinearSVC with penalty='l1' and
linear_model.LogisticRegression with penalty='l1'.
This value is valid if class_weight parameter in fit() is not set.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target vector relative to X
loss : {'squared_hinge', 'log'}, default 'squared_hinge'
Specifies the loss function.
With 'squared_hinge' it is the squared hinge loss (a.k.a. L2 loss).
With 'log' it is the loss of logistic regression models.
'l2' is accepted as an alias for 'squared_hinge', for backward
compatibility reasons, but should not be used in new code.
fit_intercept : bool, default: True
Specifies if the intercept should be fitted by the model.
It must match the fit() method parameter.
intercept_scaling : float, default: 1
when fit_intercept is True, instance vector x becomes
[x, intercept_scaling],
i.e. a "synthetic" feature with constant value equals to
intercept_scaling is appended to the instance vector.
It must match the fit() method parameter.
Returns
-------
l1_min_c: float
minimum value for C
"""
if loss == "l2":
warn("loss='l2' will be impossible from 0.18 onwards."
" Use loss='squared_hinge' instead.",
DeprecationWarning)
loss = "squared_hinge"
if loss not in ('squared_hinge', 'log'):
raise ValueError('loss type not in ("squared_hinge", "log", "l2")')
X = check_array(X, accept_sparse='csc')
check_consistent_length(X, y)
Y = LabelBinarizer(neg_label=-1).fit_transform(y).T
# maximum absolute value over classes and features
den = np.max(np.abs(safe_sparse_dot(Y, X)))
if fit_intercept:
bias = intercept_scaling * np.ones((np.size(y), 1))
den = max(den, abs(np.dot(Y, bias)).max())
if den == 0.0:
raise ValueError('Ill-posed l1_min_c calculation: l1 will always '
'select zero coefficients for this data')
if loss == 'squared_hinge':
return 0.5 / den
else: # loss == 'log':
return 2.0 / den
| bsd-3-clause |
Juniper/ceilometer | ceilometer/tests/pipeline_base.py | 2 | 72675 | # -*- coding: utf-8 -*-
#
# Copyright 2013 Intel Corp.
#
# Authors: Yunhong Jiang <yunhong.jiang@intel.com>
# Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import datetime
import traceback
import mock
from oslo_utils import timeutils
from oslotest import base
from oslotest import mockpatch
import six
from stevedore import extension
from ceilometer import pipeline
from ceilometer import publisher
from ceilometer.publisher import test as test_publisher
from ceilometer import sample
from ceilometer import transformer
from ceilometer.transformer import accumulator
from ceilometer.transformer import arithmetic
from ceilometer.transformer import conversions
@six.add_metaclass(abc.ABCMeta)
class BasePipelineTestCase(base.BaseTestCase):
@staticmethod
def fake_tem_init():
"""Fake a transformerManager for pipeline.
The faked entry point setting is below:
update: TransformerClass
except: TransformerClassException
drop: TransformerClassDrop
"""
pass
def fake_tem_get_ext(self, name):
class_name_ext = {
'update': self.TransformerClass,
'except': self.TransformerClassException,
'drop': self.TransformerClassDrop,
'cache': accumulator.TransformerAccumulator,
'aggregator': conversions.AggregatorTransformer,
'unit_conversion': conversions.ScalingTransformer,
'rate_of_change': conversions.RateOfChangeTransformer,
'arithmetic': arithmetic.ArithmeticTransformer,
}
if name in class_name_ext:
return extension.Extension(name, None,
class_name_ext[name],
None,
)
raise KeyError(name)
def get_publisher(self, url, namespace=''):
fake_drivers = {'test://': test_publisher.TestPublisher,
'new://': test_publisher.TestPublisher,
'except://': self.PublisherClassException}
return fake_drivers[url](url)
class PublisherClassException(publisher.PublisherBase):
def publish_samples(self, ctxt, samples):
raise Exception()
def publish_events(self, ctxt, events):
raise Exception()
class TransformerClass(transformer.TransformerBase):
samples = []
def __init__(self, append_name='_update'):
self.__class__.samples = []
self.append_name = append_name
def flush(self, ctxt):
return []
def handle_sample(self, ctxt, counter):
self.__class__.samples.append(counter)
newname = getattr(counter, 'name') + self.append_name
return sample.Sample(
name=newname,
type=counter.type,
volume=counter.volume,
unit=counter.unit,
user_id=counter.user_id,
project_id=counter.project_id,
resource_id=counter.resource_id,
timestamp=counter.timestamp,
resource_metadata=counter.resource_metadata,
)
class TransformerClassDrop(transformer.TransformerBase):
samples = []
def __init__(self):
self.__class__.samples = []
def handle_sample(self, ctxt, counter):
self.__class__.samples.append(counter)
class TransformerClassException(object):
@staticmethod
def handle_sample(ctxt, counter):
raise Exception()
def setUp(self):
super(BasePipelineTestCase, self).setUp()
self.test_counter = sample.Sample(
name='a',
type=sample.TYPE_GAUGE,
volume=1,
unit='B',
user_id="test_user",
project_id="test_proj",
resource_id="test_resource",
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
)
self.useFixture(mockpatch.PatchObject(
transformer.TransformerExtensionManager, "__init__",
side_effect=self.fake_tem_init))
self.useFixture(mockpatch.PatchObject(
transformer.TransformerExtensionManager, "get_ext",
side_effect=self.fake_tem_get_ext))
self.useFixture(mockpatch.PatchObject(
publisher, 'get_publisher', side_effect=self.get_publisher))
self.transformer_manager = transformer.TransformerExtensionManager()
self._setup_pipeline_cfg()
self._reraise_exception = True
self.useFixture(mockpatch.Patch(
'ceilometer.pipeline.LOG.exception',
side_effect=self._handle_reraise_exception))
def _handle_reraise_exception(self, msg):
if self._reraise_exception:
raise Exception(traceback.format_exc())
@abc.abstractmethod
def _setup_pipeline_cfg(self):
"""Setup the appropriate form of pipeline config."""
@abc.abstractmethod
def _augment_pipeline_cfg(self):
"""Augment the pipeline config with an additional element."""
@abc.abstractmethod
def _break_pipeline_cfg(self):
"""Break the pipeline config with a malformed element."""
@abc.abstractmethod
def _dup_pipeline_name_cfg(self):
"""Break the pipeline config with duplicate pipeline name."""
@abc.abstractmethod
def _set_pipeline_cfg(self, field, value):
"""Set a field to a value in the pipeline config."""
@abc.abstractmethod
def _extend_pipeline_cfg(self, field, value):
"""Extend an existing field in the pipeline config with a value."""
@abc.abstractmethod
def _unset_pipeline_cfg(self, field):
"""Clear an existing field in the pipeline config."""
def _exception_create_pipelinemanager(self):
self.assertRaises(pipeline.PipelineException,
pipeline.PipelineManager,
self.pipeline_cfg,
self.transformer_manager)
def test_no_counters(self):
self._unset_pipeline_cfg('counters')
self._exception_create_pipelinemanager()
def test_no_transformers(self):
self._unset_pipeline_cfg('transformers')
pipeline.PipelineManager(self.pipeline_cfg, self.transformer_manager)
def test_no_name(self):
self._unset_pipeline_cfg('name')
self._exception_create_pipelinemanager()
def test_no_interval(self):
self._unset_pipeline_cfg('interval')
self._exception_create_pipelinemanager()
def test_no_publishers(self):
self._unset_pipeline_cfg('publishers')
self._exception_create_pipelinemanager()
def test_invalid_resources(self):
invalid_resource = {'invalid': 1}
self._set_pipeline_cfg('resources', invalid_resource)
self._exception_create_pipelinemanager()
def test_check_counters_include_exclude_same(self):
counter_cfg = ['a', '!a']
self._set_pipeline_cfg('counters', counter_cfg)
self._exception_create_pipelinemanager()
def test_check_counters_include_exclude(self):
counter_cfg = ['a', '!b']
self._set_pipeline_cfg('counters', counter_cfg)
self._exception_create_pipelinemanager()
def test_check_counters_wildcard_included(self):
counter_cfg = ['a', '*']
self._set_pipeline_cfg('counters', counter_cfg)
self._exception_create_pipelinemanager()
def test_check_publishers_invalid_publisher(self):
publisher_cfg = ['test_invalid']
self._set_pipeline_cfg('publishers', publisher_cfg)
def test_invalid_string_interval(self):
self._set_pipeline_cfg('interval', 'string')
self._exception_create_pipelinemanager()
def test_check_transformer_invalid_transformer(self):
transformer_cfg = [
{'name': "test_invalid",
'parameters': {}}
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._exception_create_pipelinemanager()
def test_get_interval(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
self.assertEqual(5, pipe.get_interval())
def test_publisher_transformer_invoked(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, len(self.TransformerClass.samples))
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
self.assertEqual('a',
getattr(self.TransformerClass.samples[0], "name"))
def test_multiple_included_counters(self):
counter_cfg = ['a', 'b']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.test_counter = sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.assertEqual(2, len(publisher.samples))
self.assertEqual(2, len(self.TransformerClass.samples))
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
self.assertEqual('b_update', getattr(publisher.samples[1], "name"))
def test_counter_dont_match(self):
counter_cfg = ['nomatch']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
self.assertEqual(0, publisher.calls)
def test_wildcard_counter(self):
counter_cfg = ['*']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, len(self.TransformerClass.samples))
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
def test_wildcard_excluded_counters(self):
counter_cfg = ['*', '!a']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].support_meter('a'))
def test_wildcard_excluded_counters_not_excluded(self):
counter_cfg = ['*', '!b']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, len(self.TransformerClass.samples))
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
def test_all_excluded_counters_not_excluded(self):
counter_cfg = ['!b', '!c']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, len(self.TransformerClass.samples))
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
self.assertEqual('a',
getattr(self.TransformerClass.samples[0], "name"))
def test_all_excluded_counters_is_excluded(self):
counter_cfg = ['!a', '!c']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].support_meter('a'))
self.assertTrue(pipeline_manager.pipelines[0].support_meter('b'))
self.assertFalse(pipeline_manager.pipelines[0].support_meter('c'))
def test_wildcard_and_excluded_wildcard_counters(self):
counter_cfg = ['*', '!disk.*']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].
support_meter('disk.read.bytes'))
self.assertTrue(pipeline_manager.pipelines[0].support_meter('cpu'))
def test_included_counter_and_wildcard_counters(self):
counter_cfg = ['cpu', 'disk.*']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertTrue(pipeline_manager.pipelines[0].
support_meter('disk.read.bytes'))
self.assertTrue(pipeline_manager.pipelines[0].support_meter('cpu'))
self.assertFalse(pipeline_manager.pipelines[0].
support_meter('instance'))
def test_excluded_counter_and_excluded_wildcard_counters(self):
counter_cfg = ['!cpu', '!disk.*']
self._set_pipeline_cfg('counters', counter_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertFalse(pipeline_manager.pipelines[0].
support_meter('disk.read.bytes'))
self.assertFalse(pipeline_manager.pipelines[0].support_meter('cpu'))
self.assertTrue(pipeline_manager.pipelines[0].
support_meter('instance'))
def test_multiple_pipeline(self):
self._augment_pipeline_cfg()
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.test_counter = sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, publisher.calls)
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
new_publisher = pipeline_manager.pipelines[1].publishers[0]
self.assertEqual(1, len(new_publisher.samples))
self.assertEqual(1, new_publisher.calls)
self.assertEqual('b_new', getattr(new_publisher.samples[0], "name"))
self.assertEqual(2, len(self.TransformerClass.samples))
self.assertEqual('a',
getattr(self.TransformerClass.samples[0], "name"))
self.assertEqual('b',
getattr(self.TransformerClass.samples[1], "name"))
def test_multiple_pipeline_exception(self):
self._reraise_exception = False
self._break_pipeline_cfg()
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.test_counter = sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, publisher.calls)
self.assertEqual(1, len(publisher.samples))
self.assertEqual('a_update', getattr(publisher.samples[0], "name"))
self.assertEqual(2, len(self.TransformerClass.samples))
self.assertEqual('a',
getattr(self.TransformerClass.samples[0], "name"))
self.assertEqual('b',
getattr(self.TransformerClass.samples[1], "name"))
def test_none_transformer_pipeline(self):
self._set_pipeline_cfg('transformers', None)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, publisher.calls)
self.assertEqual('a', getattr(publisher.samples[0], 'name'))
def test_empty_transformer_pipeline(self):
self._set_pipeline_cfg('transformers', [])
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, publisher.calls)
self.assertEqual('a', getattr(publisher.samples[0], 'name'))
def test_multiple_transformer_same_class(self):
transformer_cfg = [
{
'name': 'update',
'parameters': {}
},
{
'name': 'update',
'parameters': {}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, publisher.calls)
self.assertEqual(1, len(publisher.samples))
self.assertEqual('a_update_update',
getattr(publisher.samples[0], 'name'))
self.assertEqual(2, len(self.TransformerClass.samples))
self.assertEqual('a',
getattr(self.TransformerClass.samples[0], 'name'))
self.assertEqual('a_update',
getattr(self.TransformerClass.samples[1], 'name'))
def test_multiple_transformer_same_class_different_parameter(self):
transformer_cfg = [
{
'name': 'update',
'parameters':
{
"append_name": "_update",
}
},
{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.assertEqual(2, len(self.TransformerClass.samples))
self.assertEqual('a',
getattr(self.TransformerClass.samples[0], 'name'))
self.assertEqual('a_update',
getattr(self.TransformerClass.samples[1], 'name'))
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1,
len(publisher.samples))
self.assertEqual('a_update_new',
getattr(publisher.samples[0], 'name'))
def test_multiple_transformer_drop_transformer(self):
transformer_cfg = [
{
'name': 'update',
'parameters':
{
"append_name": "_update",
}
},
{
'name': 'drop',
'parameters': {}
},
{
'name': 'update',
'parameters':
{
"append_name": "_new",
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
self.assertEqual(1, len(self.TransformerClass.samples))
self.assertEqual('a',
getattr(self.TransformerClass.samples[0], 'name'))
self.assertEqual(1,
len(self.TransformerClassDrop.samples))
self.assertEqual('a_update',
getattr(self.TransformerClassDrop.samples[0], 'name'))
def test_multiple_publisher(self):
self._set_pipeline_cfg('publishers', ['test://', 'new://'])
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
new_publisher = pipeline_manager.pipelines[0].publishers[1]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, len(new_publisher.samples))
self.assertEqual('a_update',
getattr(new_publisher.samples[0], 'name'))
self.assertEqual('a_update',
getattr(publisher.samples[0], 'name'))
def test_multiple_publisher_isolation(self):
self._reraise_exception = False
self._set_pipeline_cfg('publishers', ['except://', 'new://'])
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
new_publisher = pipeline_manager.pipelines[0].publishers[1]
self.assertEqual(1, len(new_publisher.samples))
self.assertEqual('a_update',
getattr(new_publisher.samples[0], 'name'))
def test_multiple_counter_pipeline(self):
self._set_pipeline_cfg('counters', ['a', 'b'])
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter,
sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(2, len(publisher.samples))
self.assertEqual('a_update', getattr(publisher.samples[0], 'name'))
self.assertEqual('b_update', getattr(publisher.samples[1], 'name'))
def test_flush_pipeline_cache(self):
CACHE_SIZE = 10
extra_transformer_cfg = [
{
'name': 'cache',
'parameters': {
'size': CACHE_SIZE,
}
},
{
'name': 'update',
'parameters':
{
'append_name': '_new'
}
},
]
self._extend_pipeline_cfg('transformers', extra_transformer_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, self.test_counter)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
pipe.flush(None)
self.assertEqual(0, len(publisher.samples))
pipe.publish_data(None, self.test_counter)
pipe.flush(None)
self.assertEqual(0, len(publisher.samples))
for i in range(CACHE_SIZE - 2):
pipe.publish_data(None, self.test_counter)
pipe.flush(None)
self.assertEqual(CACHE_SIZE, len(publisher.samples))
self.assertEqual('a_update_new', getattr(publisher.samples[0], 'name'))
def test_flush_pipeline_cache_multiple_counter(self):
CACHE_SIZE = 3
extra_transformer_cfg = [
{
'name': 'cache',
'parameters': {
'size': CACHE_SIZE
}
},
{
'name': 'update',
'parameters':
{
'append_name': '_new'
}
},
]
self._extend_pipeline_cfg('transformers', extra_transformer_cfg)
self._set_pipeline_cfg('counters', ['a', 'b'])
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
with pipeline_manager.publisher(None) as p:
p([self.test_counter,
sample.Sample(
name='b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
self.assertEqual(CACHE_SIZE, len(publisher.samples))
self.assertEqual('a_update_new',
getattr(publisher.samples[0], 'name'))
self.assertEqual('b_update_new',
getattr(publisher.samples[1], 'name'))
def test_flush_pipeline_cache_before_publisher(self):
extra_transformer_cfg = [{
'name': 'cache',
'parameters': {}
}]
self._extend_pipeline_cfg('transformers', extra_transformer_cfg)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
publisher = pipe.publishers[0]
pipe.publish_data(None, self.test_counter)
self.assertEqual(0, len(publisher.samples))
pipe.flush(None)
self.assertEqual(1, len(publisher.samples))
self.assertEqual('a_update',
getattr(publisher.samples[0], 'name'))
def test_variable_counter(self):
self.pipeline_cfg = [{
'name': "test_pipeline",
'interval': 5,
'counters': ['a:*'],
'transformers': [
{'name': "update",
'parameters': {}}
],
'publishers': ["test://"],
}, ]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.test_counter = sample.Sample(
name='a:b',
type=self.test_counter.type,
volume=self.test_counter.volume,
unit=self.test_counter.unit,
user_id=self.test_counter.user_id,
project_id=self.test_counter.project_id,
resource_id=self.test_counter.resource_id,
timestamp=self.test_counter.timestamp,
resource_metadata=self.test_counter.resource_metadata,
)
with pipeline_manager.publisher(None) as p:
p([self.test_counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1, len(self.TransformerClass.samples))
self.assertEqual('a:b_update',
getattr(publisher.samples[0], "name"))
self.assertEqual('a:b',
getattr(self.TransformerClass.samples[0], "name"))
def test_global_unit_conversion(self):
scale = 'volume / ((10**6) * 60)'
transformer_cfg = [
{
'name': 'unit_conversion',
'parameters': {
'source': {},
'target': {'name': 'cpu_mins',
'unit': 'min',
'scale': scale},
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['cpu'])
counters = [
sample.Sample(
name='cpu',
type=sample.TYPE_CUMULATIVE,
volume=1200000000,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
pipe.flush(None)
self.assertEqual(1, len(publisher.samples))
cpu_mins = publisher.samples[-1]
self.assertEqual('cpu_mins', getattr(cpu_mins, 'name'))
self.assertEqual('min', getattr(cpu_mins, 'unit'))
self.assertEqual(sample.TYPE_CUMULATIVE, getattr(cpu_mins, 'type'))
self.assertEqual(20, getattr(cpu_mins, 'volume'))
def test_unit_identified_source_unit_conversion(self):
transformer_cfg = [
{
'name': 'unit_conversion',
'parameters': {
'source': {'unit': '°C'},
'target': {'unit': '°F',
'scale': '(volume * 1.8) + 32'},
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['core_temperature',
'ambient_temperature'])
counters = [
sample.Sample(
name='core_temperature',
type=sample.TYPE_GAUGE,
volume=36.0,
unit='°C',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
),
sample.Sample(
name='ambient_temperature',
type=sample.TYPE_GAUGE,
volume=88.8,
unit='°F',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(2, len(publisher.samples))
core_temp = publisher.samples[0]
self.assertEqual('core_temperature', getattr(core_temp, 'name'))
self.assertEqual('°F', getattr(core_temp, 'unit'))
self.assertEqual(96.8, getattr(core_temp, 'volume'))
amb_temp = publisher.samples[1]
self.assertEqual('ambient_temperature', getattr(amb_temp, 'name'))
self.assertEqual('°F', getattr(amb_temp, 'unit'))
self.assertEqual(88.8, getattr(amb_temp, 'volume'))
self.assertEqual(96.8, getattr(core_temp, 'volume'))
def _do_test_rate_of_change_conversion(self, prev, curr, type, expected,
offset=1, weight=None):
s = ("(resource_metadata.user_metadata.autoscaling_weight or 1.0)"
"* (resource_metadata.non.existent or 1.0)"
"* (100.0 / (10**9 * (resource_metadata.cpu_number or 1)))")
transformer_cfg = [
{
'name': 'rate_of_change',
'parameters': {
'source': {},
'target': {'name': 'cpu_util',
'unit': '%',
'type': sample.TYPE_GAUGE,
'scale': s},
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['cpu'])
now = timeutils.utcnow()
later = now + datetime.timedelta(minutes=offset)
um = {'autoscaling_weight': weight} if weight else {}
counters = [
sample.Sample(
name='cpu',
type=type,
volume=prev,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=now.isoformat(),
resource_metadata={'cpu_number': 4,
'user_metadata': um},
),
sample.Sample(
name='cpu',
type=type,
volume=prev,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource2',
timestamp=now.isoformat(),
resource_metadata={'cpu_number': 2,
'user_metadata': um},
),
sample.Sample(
name='cpu',
type=type,
volume=curr,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=later.isoformat(),
resource_metadata={'cpu_number': 4,
'user_metadata': um},
),
sample.Sample(
name='cpu',
type=type,
volume=curr,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource2',
timestamp=later.isoformat(),
resource_metadata={'cpu_number': 2,
'user_metadata': um},
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(2, len(publisher.samples))
pipe.flush(None)
self.assertEqual(2, len(publisher.samples))
cpu_util = publisher.samples[0]
self.assertEqual('cpu_util', getattr(cpu_util, 'name'))
self.assertEqual('test_resource', getattr(cpu_util, 'resource_id'))
self.assertEqual('%', getattr(cpu_util, 'unit'))
self.assertEqual(sample.TYPE_GAUGE, getattr(cpu_util, 'type'))
self.assertEqual(expected, getattr(cpu_util, 'volume'))
cpu_util = publisher.samples[1]
self.assertEqual('cpu_util', getattr(cpu_util, 'name'))
self.assertEqual('test_resource2', getattr(cpu_util, 'resource_id'))
self.assertEqual('%', getattr(cpu_util, 'unit'))
self.assertEqual(sample.TYPE_GAUGE, getattr(cpu_util, 'type'))
self.assertEqual(expected * 2, getattr(cpu_util, 'volume'))
def test_rate_of_change_conversion(self):
self._do_test_rate_of_change_conversion(120000000000,
180000000000,
sample.TYPE_CUMULATIVE,
25.0)
def test_rate_of_change_conversion_weight(self):
self._do_test_rate_of_change_conversion(120000000000,
180000000000,
sample.TYPE_CUMULATIVE,
27.5,
weight=1.1)
def test_rate_of_change_conversion_negative_cumulative_delta(self):
self._do_test_rate_of_change_conversion(180000000000,
120000000000,
sample.TYPE_CUMULATIVE,
50.0)
def test_rate_of_change_conversion_negative_gauge_delta(self):
self._do_test_rate_of_change_conversion(180000000000,
120000000000,
sample.TYPE_GAUGE,
-25.0)
def test_rate_of_change_conversion_zero_delay(self):
self._do_test_rate_of_change_conversion(120000000000,
120000000000,
sample.TYPE_CUMULATIVE,
0.0,
offset=0)
def test_rate_of_change_no_predecessor(self):
s = "100.0 / (10**9 * resource_metadata.get('cpu_number', 1))"
transformer_cfg = [
{
'name': 'rate_of_change',
'parameters': {
'source': {},
'target': {'name': 'cpu_util',
'unit': '%',
'type': sample.TYPE_GAUGE,
'scale': s}
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['cpu'])
now = timeutils.utcnow()
counters = [
sample.Sample(
name='cpu',
type=sample.TYPE_CUMULATIVE,
volume=120000000000,
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=now.isoformat(),
resource_metadata={'cpu_number': 4}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
pipe.flush(None)
self.assertEqual(0, len(publisher.samples))
def test_resources(self):
resources = ['test1://', 'test2://']
self._set_pipeline_cfg('resources', resources)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertEqual(resources,
pipeline_manager.pipelines[0].resources)
def test_no_resources(self):
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
self.assertEqual(0, len(pipeline_manager.pipelines[0].resources))
def _do_test_rate_of_change_mapping(self, pipe, meters, units):
now = timeutils.utcnow()
base = 1000
offset = 7
rate = 42
later = now + datetime.timedelta(minutes=offset)
counters = []
for v, ts in [(base, now.isoformat()),
(base + (offset * 60 * rate), later.isoformat())]:
for n, u, r in [(meters[0], units[0], 'resource1'),
(meters[1], units[1], 'resource2')]:
s = sample.Sample(
name=n,
type=sample.TYPE_CUMULATIVE,
volume=v,
unit=u,
user_id='test_user',
project_id='test_proj',
resource_id=r,
timestamp=ts,
resource_metadata={},
)
counters.append(s)
pipe.publish_data(None, counters)
publisher = pipe.publishers[0]
self.assertEqual(2, len(publisher.samples))
pipe.flush(None)
self.assertEqual(2, len(publisher.samples))
bps = publisher.samples[0]
self.assertEqual('%s.rate' % meters[0], getattr(bps, 'name'))
self.assertEqual('resource1', getattr(bps, 'resource_id'))
self.assertEqual('%s/s' % units[0], getattr(bps, 'unit'))
self.assertEqual(sample.TYPE_GAUGE, getattr(bps, 'type'))
self.assertEqual(rate, getattr(bps, 'volume'))
rps = publisher.samples[1]
self.assertEqual('%s.rate' % meters[1], getattr(rps, 'name'))
self.assertEqual('resource2', getattr(rps, 'resource_id'))
self.assertEqual('%s/s' % units[1], getattr(rps, 'unit'))
self.assertEqual(sample.TYPE_GAUGE, getattr(rps, 'type'))
self.assertEqual(rate, getattr(rps, 'volume'))
def test_rate_of_change_mapping(self):
map_from = {'name': 'disk\\.(read|write)\\.(bytes|requests)',
'unit': '(B|request)'}
map_to = {'name': 'disk.\\1.\\2.rate',
'unit': '\\1/s'}
transformer_cfg = [
{
'name': 'rate_of_change',
'parameters': {
'source': {
'map_from': map_from
},
'target': {
'map_to': map_to,
'type': sample.TYPE_GAUGE
},
},
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['disk.read.bytes',
'disk.write.requests'])
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
meters = ('disk.read.bytes', 'disk.write.requests')
units = ('B', 'request')
self._do_test_rate_of_change_mapping(pipe, meters, units)
def _do_test_aggregator(self, parameters, expected_length):
transformer_cfg = [
{
'name': 'aggregator',
'parameters': parameters,
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['storage.objects.incoming.bytes'])
counters = [
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=26,
unit='B',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '1.0'}
),
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=16,
unit='B',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '2.0'}
),
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=53,
unit='B',
user_id='test_user_bis',
project_id='test_proj_bis',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '1.0'}
),
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=42,
unit='B',
user_id='test_user_bis',
project_id='test_proj_bis',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '2.0'}
),
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=15,
unit='B',
user_id='test_user',
project_id='test_proj_bis',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '2.0'}
),
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=2,
unit='B',
user_id='test_user_bis',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '3.0'}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
pipe.flush(None)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(expected_length, len(publisher.samples))
return sorted(publisher.samples, key=lambda s: s.volume)
def test_aggregator_meter_type(self):
volumes = [1.0, 2.0, 3.0]
transformer_cfg = [
{
'name': 'aggregator',
'parameters': {'size': len(volumes) * len(sample.TYPES)}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters',
['testgauge', 'testcumulative', 'testdelta'])
counters = []
for sample_type in sample.TYPES:
for volume in volumes:
counters.append(sample.Sample(
name='test' + sample_type,
type=sample_type,
volume=volume,
unit='B',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '1.0'}
))
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
pipe.flush(None)
publisher = pipeline_manager.pipelines[0].publishers[0]
actual = sorted(s.volume for s in publisher.samples)
self.assertEqual([2.0, 3.0, 6.0], actual)
def test_aggregator_input_validation(self):
aggregator = conversions.AggregatorTransformer("1", "15", None,
None, None)
self.assertEqual(1, aggregator.size)
self.assertEqual(15, aggregator.retention_time)
self.assertRaises(ValueError, conversions.AggregatorTransformer,
"abc", "cde", None, None, None)
def test_aggregator_metadata(self):
for conf, expected_version in [('last', '2.0'), ('first', '1.0')]:
samples = self._do_test_aggregator({
'resource_metadata': conf,
'target': {'name': 'aggregated-bytes'}
}, expected_length=4)
s = samples[0]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(2, s.volume)
self.assertEqual('test_user_bis', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': '3.0'},
s.resource_metadata)
s = samples[1]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(15, s.volume)
self.assertEqual('test_user', s.user_id)
self.assertEqual('test_proj_bis', s.project_id)
self.assertEqual({'version': '2.0'},
s.resource_metadata)
s = samples[2]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(42, s.volume)
self.assertEqual('test_user', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': expected_version},
s.resource_metadata)
s = samples[3]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(95, s.volume)
self.assertEqual('test_user_bis', s.user_id)
self.assertEqual('test_proj_bis', s.project_id)
self.assertEqual({'version': expected_version},
s.resource_metadata)
def test_aggregator_user_last_and_metadata_last(self):
samples = self._do_test_aggregator({
'resource_metadata': 'last',
'user_id': 'last',
'target': {'name': 'aggregated-bytes'}
}, expected_length=2)
s = samples[0]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(44, s.volume)
self.assertEqual('test_user_bis', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': '3.0'},
s.resource_metadata)
s = samples[1]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(110, s.volume)
self.assertEqual('test_user', s.user_id)
self.assertEqual('test_proj_bis', s.project_id)
self.assertEqual({'version': '2.0'},
s.resource_metadata)
def test_aggregator_user_first_and_metadata_last(self):
samples = self._do_test_aggregator({
'resource_metadata': 'last',
'user_id': 'first',
'target': {'name': 'aggregated-bytes'}
}, expected_length=2)
s = samples[0]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(44, s.volume)
self.assertEqual('test_user', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': '3.0'},
s.resource_metadata)
s = samples[1]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(110, s.volume)
self.assertEqual('test_user_bis', s.user_id)
self.assertEqual('test_proj_bis', s.project_id)
self.assertEqual({'version': '2.0'},
s.resource_metadata)
def test_aggregator_all_first(self):
samples = self._do_test_aggregator({
'resource_metadata': 'first',
'user_id': 'first',
'project_id': 'first',
'target': {'name': 'aggregated-bytes'}
}, expected_length=1)
s = samples[0]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(154, s.volume)
self.assertEqual('test_user', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': '1.0'},
s.resource_metadata)
def test_aggregator_all_last(self):
samples = self._do_test_aggregator({
'resource_metadata': 'last',
'user_id': 'last',
'project_id': 'last',
'target': {'name': 'aggregated-bytes'}
}, expected_length=1)
s = samples[0]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(154, s.volume)
self.assertEqual('test_user_bis', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': '3.0'},
s.resource_metadata)
def test_aggregator_all_mixed(self):
samples = self._do_test_aggregator({
'resource_metadata': 'drop',
'user_id': 'first',
'project_id': 'last',
'target': {'name': 'aggregated-bytes'}
}, expected_length=1)
s = samples[0]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(154, s.volume)
self.assertEqual('test_user', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({}, s.resource_metadata)
def test_aggregator_metadata_default(self):
samples = self._do_test_aggregator({
'user_id': 'last',
'project_id': 'last',
'target': {'name': 'aggregated-bytes'}
}, expected_length=1)
s = samples[0]
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(154, s.volume)
self.assertEqual('test_user_bis', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': '3.0'},
s.resource_metadata)
@mock.patch('ceilometer.transformer.conversions.LOG')
def test_aggregator_metadata_invalid(self, mylog):
samples = self._do_test_aggregator({
'resource_metadata': 'invalid',
'user_id': 'last',
'project_id': 'last',
'target': {'name': 'aggregated-bytes'}
}, expected_length=1)
s = samples[0]
self.assertTrue(mylog.warn.called)
self.assertEqual('aggregated-bytes', s.name)
self.assertEqual(154, s.volume)
self.assertEqual('test_user_bis', s.user_id)
self.assertEqual('test_proj', s.project_id)
self.assertEqual({'version': '3.0'},
s.resource_metadata)
def test_aggregator_sized_flush(self):
transformer_cfg = [
{
'name': 'aggregator',
'parameters': {'size': 2},
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['storage.objects.incoming.bytes'])
counters = [
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=26,
unit='B',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '1.0'}
),
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=16,
unit='B',
user_id='test_user_bis',
project_id='test_proj_bis',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '2.0'}
)
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, [counters[0]])
pipe.flush(None)
publisher = pipe.publishers[0]
self.assertEqual(0, len(publisher.samples))
pipe.publish_data(None, [counters[1]])
pipe.flush(None)
publisher = pipe.publishers[0]
self.assertEqual(2, len(publisher.samples))
def test_aggregator_timed_flush(self):
timeutils.set_time_override()
transformer_cfg = [
{
'name': 'aggregator',
'parameters': {'size': 900, 'retention_time': 60},
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['storage.objects.incoming.bytes'])
counters = [
sample.Sample(
name='storage.objects.incoming.bytes',
type=sample.TYPE_DELTA,
volume=26,
unit='B',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '1.0'}
),
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
pipe.flush(None)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
timeutils.advance_time_seconds(120)
pipe.flush(None)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(1, len(publisher.samples))
def test_aggregator_without_authentication(self):
transformer_cfg = [
{
'name': 'aggregator',
'parameters': {'size': 2},
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['storage.objects.outgoing.bytes'])
counters = [
sample.Sample(
name='storage.objects.outgoing.bytes',
type=sample.TYPE_DELTA,
volume=26,
unit='B',
user_id=None,
project_id=None,
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '1.0'}
),
sample.Sample(
name='storage.objects.outgoing.bytes',
type=sample.TYPE_DELTA,
volume=16,
unit='B',
user_id=None,
project_id=None,
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata={'version': '2.0'}
)
]
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, [counters[0]])
pipe.flush(None)
publisher = pipe.publishers[0]
self.assertEqual(0, len(publisher.samples))
pipe.publish_data(None, [counters[1]])
pipe.flush(None)
publisher = pipe.publishers[0]
self.assertEqual(1, len(publisher.samples))
self.assertEqual(42, getattr(publisher.samples[0], 'volume'))
self.assertEqual("test_resource", getattr(publisher.samples[0],
'resource_id'))
def _do_test_arithmetic_expr_parse(self, expr, expected):
actual = arithmetic.ArithmeticTransformer.parse_expr(expr)
self.assertEqual(expected, actual)
def test_arithmetic_expr_parse(self):
expr = '$(cpu) + $(cpu.util)'
expected = ('cpu.volume + _cpu_util_ESC.volume',
{
'cpu': 'cpu',
'cpu.util': '_cpu_util_ESC'
})
self._do_test_arithmetic_expr_parse(expr, expected)
def test_arithmetic_expr_parse_parameter(self):
expr = '$(cpu) + $(cpu.util).resource_metadata'
expected = ('cpu.volume + _cpu_util_ESC.resource_metadata',
{
'cpu': 'cpu',
'cpu.util': '_cpu_util_ESC'
})
self._do_test_arithmetic_expr_parse(expr, expected)
def test_arithmetic_expr_parse_reserved_keyword(self):
expr = '$(class) + $(cpu.util)'
expected = ('_class_ESC.volume + _cpu_util_ESC.volume',
{
'class': '_class_ESC',
'cpu.util': '_cpu_util_ESC'
})
self._do_test_arithmetic_expr_parse(expr, expected)
def test_arithmetic_expr_parse_already_escaped(self):
expr = '$(class) + $(_class_ESC)'
expected = ('_class_ESC.volume + __class_ESC_ESC.volume',
{
'class': '_class_ESC',
'_class_ESC': '__class_ESC_ESC'
})
self._do_test_arithmetic_expr_parse(expr, expected)
def _do_test_arithmetic(self, expression, scenario, expected):
transformer_cfg = [
{
'name': 'arithmetic',
'parameters': {
'target': {'name': 'new_meter',
'unit': '%',
'type': sample.TYPE_GAUGE,
'expr': expression},
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters',
list(set(s['name'] for s in scenario)))
counters = []
test_resources = ['test_resource1', 'test_resource2']
for resource_id in test_resources:
for s in scenario:
counters.append(sample.Sample(
name=s['name'],
type=sample.TYPE_CUMULATIVE,
volume=s['volume'],
unit='ns',
user_id='test_user',
project_id='test_proj',
resource_id=resource_id,
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=s.get('metadata')
))
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, counters)
publisher = pipeline_manager.pipelines[0].publishers[0]
expected_len = len(test_resources) * len(expected)
self.assertEqual(0, len(publisher.samples))
pipe.flush(None)
self.assertEqual(expected_len, len(publisher.samples))
# bucket samples by resource first
samples_by_resource = dict((r, []) for r in test_resources)
for s in publisher.samples:
samples_by_resource[s.resource_id].append(s)
for resource_id in samples_by_resource:
self.assertEqual(len(expected),
len(samples_by_resource[resource_id]))
for i, s in enumerate(samples_by_resource[resource_id]):
self.assertEqual('new_meter', getattr(s, 'name'))
self.assertEqual(resource_id, getattr(s, 'resource_id'))
self.assertEqual('%', getattr(s, 'unit'))
self.assertEqual(sample.TYPE_GAUGE, getattr(s, 'type'))
self.assertEqual(expected[i], getattr(s, 'volume'))
def test_arithmetic_transformer(self):
expression = '100.0 * $(memory.usage) / $(memory)'
scenario = [
dict(name='memory', volume=1024.0),
dict(name='memory.usage', volume=512.0),
]
expected = [50.0]
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_expr_empty(self):
expression = ''
scenario = [
dict(name='memory', volume=1024.0),
dict(name='memory.usage', volume=512.0),
]
expected = []
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_expr_misconfigured(self):
expression = '512.0 * 3'
scenario = [
dict(name='memory', volume=1024.0),
dict(name='memory.usage', volume=512.0),
]
expected = []
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_nan(self):
expression = 'float(\'nan\') * $(memory.usage) / $(memory)'
scenario = [
dict(name='memory', volume=1024.0),
dict(name='memory.usage', volume=512.0),
]
expected = []
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_exception(self):
expression = '$(memory) / 0'
scenario = [
dict(name='memory', volume=1024.0),
dict(name='memory.usage', volume=512.0),
]
expected = []
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_multiple_samples(self):
expression = '100.0 * $(memory.usage) / $(memory)'
scenario = [
dict(name='memory', volume=2048.0),
dict(name='memory.usage', volume=512.0),
dict(name='memory', volume=1024.0),
]
expected = [50.0]
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_missing(self):
expression = '100.0 * $(memory.usage) / $(memory)'
scenario = [dict(name='memory.usage', volume=512.0)]
expected = []
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_more_than_needed(self):
expression = '100.0 * $(memory.usage) / $(memory)'
scenario = [
dict(name='memory', volume=1024.0),
dict(name='memory.usage', volume=512.0),
dict(name='cpu_util', volume=90.0),
]
expected = [50.0]
self._do_test_arithmetic(expression, scenario, expected)
def test_arithmetic_transformer_cache_cleared(self):
transformer_cfg = [
{
'name': 'arithmetic',
'parameters': {
'target': {'name': 'new_meter',
'expr': '$(memory.usage) + 2'}
}
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['memory.usage'])
counter = sample.Sample(
name='memory.usage',
type=sample.TYPE_GAUGE,
volume=1024.0,
unit='MB',
user_id='test_user',
project_id='test_proj',
resource_id='test_resource',
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=None
)
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
pipe = pipeline_manager.pipelines[0]
pipe.publish_data(None, [counter])
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
pipe.flush(None)
self.assertEqual(1, len(publisher.samples))
self.assertEqual(1026.0, publisher.samples[0].volume)
pipe.flush(None)
self.assertEqual(1, len(publisher.samples))
counter.volume = 2048.0
pipe.publish_data(None, [counter])
pipe.flush(None)
self.assertEqual(2, len(publisher.samples))
self.assertEqual(2050.0, publisher.samples[1].volume)
def test_aggregator_timed_flush_no_matching_samples(self):
timeutils.set_time_override()
transformer_cfg = [
{
'name': 'aggregator',
'parameters': {'size': 900, 'retention_time': 60},
},
]
self._set_pipeline_cfg('transformers', transformer_cfg)
self._set_pipeline_cfg('counters', ['unrelated-sample'])
pipeline_manager = pipeline.PipelineManager(self.pipeline_cfg,
self.transformer_manager)
timeutils.advance_time_seconds(200)
pipe = pipeline_manager.pipelines[0]
pipe.flush(None)
publisher = pipeline_manager.pipelines[0].publishers[0]
self.assertEqual(0, len(publisher.samples))
def test_unique_pipeline_names(self):
self._dup_pipeline_name_cfg()
self._exception_create_pipelinemanager()
| apache-2.0 |
dou800/php-buildpack-legacy | builds/runtimes/python-2.7.6/lib/python2.7/test/test_wait3.py | 136 | 1062 | """This test checks for correct wait3() behavior.
"""
import os
import time
import unittest
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children
try:
os.fork
except AttributeError:
raise unittest.SkipTest, "os.fork not defined -- skipping test_wait3"
try:
os.wait3
except AttributeError:
raise unittest.SkipTest, "os.wait3 not defined -- skipping test_wait3"
class Wait3Test(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# wait3() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status, rusage = os.wait3(os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
self.assertTrue(rusage)
def test_main():
run_unittest(Wait3Test)
reap_children()
if __name__ == "__main__":
test_main()
| mit |
cryptobanana/ansible | lib/ansible/modules/storage/netapp/netapp_e_storage_system.py | 42 | 11136 | #!/usr/bin/python
# (c) 2016, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: netapp_e_storage_system
version_added: "2.2"
short_description: Add/remove arrays from the Web Services Proxy
description:
- Manage the arrays accessible via a NetApp Web Services Proxy for NetApp E-series storage arrays.
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API.
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
ssid:
required: true
description:
- The ID of the array to manage. This value must be unique for each array.
state:
required: true
description:
- Whether the specified array should be configured on the Web Services Proxy or not.
choices: ['present', 'absent']
controller_addresses:
required: true
description:
- The list addresses for the out-of-band management adapter or the agent host. Mutually exclusive of array_wwn parameter.
array_wwn:
required: false
description:
- The WWN of the array to manage. Only necessary if in-band managing multiple arrays on the same agent host. Mutually exclusive of
controller_addresses parameter.
array_password:
required: false
description:
- The management password of the array to manage, if set.
enable_trace:
required: false
default: false
description:
- Enable trace logging for SYMbol calls to the storage system.
meta_tags:
required: false
default: None
description:
- Optional meta tags to associate to this storage system
author: Kevin Hulquest (@hulquest)
'''
EXAMPLES = '''
---
- name: Presence of storage system
netapp_e_storage_system:
ssid: "{{ item.key }}"
state: present
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
validate_certs: "{{ netapp_api_validate_certs }}"
controller_addresses:
- "{{ item.value.address1 }}"
- "{{ item.value.address2 }}"
with_dict: "{{ storage_systems }}"
when: check_storage_system
'''
RETURN = '''
msg:
description: State of request
type: string
returned: always
sample: 'Storage system removed.'
'''
import json
from datetime import datetime as dt, timedelta
from time import sleep
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.urls import open_url
from ansible.module_utils.six.moves.urllib.error import HTTPError
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError:
err = get_exception()
r = err.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
raw_data = None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
def do_post(ssid, api_url, post_headers, api_usr, api_pwd, validate_certs, request_body, timeout):
(rc, resp) = request(api_url + "/storage-systems", data=request_body, headers=post_headers,
method='POST', url_username=api_usr, url_password=api_pwd,
validate_certs=validate_certs)
status = None
return_resp = resp
if 'status' in resp:
status = resp['status']
if rc == 201:
status = 'neverContacted'
fail_after_time = dt.utcnow() + timedelta(seconds=timeout)
while status == 'neverContacted':
if dt.utcnow() > fail_after_time:
raise Exception("web proxy timed out waiting for array status")
sleep(1)
(rc, system_resp) = request(api_url + "/storage-systems/%s" % ssid,
headers=dict(Accept="application/json"), url_username=api_usr,
url_password=api_pwd, validate_certs=validate_certs,
ignore_errors=True)
status = system_resp['status']
return_resp = system_resp
return status, return_resp
def main():
argument_spec = basic_auth_argument_spec()
argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
ssid=dict(required=True, type='str'),
controller_addresses=dict(type='list'),
array_wwn=dict(required=False, type='str'),
array_password=dict(required=False, type='str', no_log=True),
array_status_timeout_sec=dict(default=60, type='int'),
enable_trace=dict(default=False, type='bool'),
meta_tags=dict(type='list')
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[['controller_addresses', 'array_wwn']],
required_if=[('state', 'present', ['controller_addresses'])]
)
p = module.params
state = p['state']
ssid = p['ssid']
controller_addresses = p['controller_addresses']
array_wwn = p['array_wwn']
array_password = p['array_password']
array_status_timeout_sec = p['array_status_timeout_sec']
validate_certs = p['validate_certs']
meta_tags = p['meta_tags']
enable_trace = p['enable_trace']
api_usr = p['api_username']
api_pwd = p['api_password']
api_url = p['api_url']
changed = False
array_exists = False
try:
(rc, resp) = request(api_url + "/storage-systems/%s" % ssid, headers=dict(Accept="application/json"),
url_username=api_usr, url_password=api_pwd, validate_certs=validate_certs,
ignore_errors=True)
except:
err = get_exception()
module.fail_json(msg="Error accessing storage-system with id [%s]. Error [%s]" % (ssid, str(err)))
array_exists = True
array_detail = resp
if rc == 200:
if state == 'absent':
changed = True
array_exists = False
elif state == 'present':
current_addresses = frozenset(i for i in (array_detail['ip1'], array_detail['ip2']) if i)
if set(controller_addresses) != current_addresses:
changed = True
if array_detail['wwn'] != array_wwn and array_wwn is not None:
module.fail_json(
msg='It seems you may have specified a bad WWN. The storage system ID you specified, %s, currently has the WWN of %s' %
(ssid, array_detail['wwn'])
)
elif rc == 404:
if state == 'present':
changed = True
array_exists = False
else:
changed = False
module.exit_json(changed=changed, msg="Storage system was not present.")
if changed and not module.check_mode:
if state == 'present':
if not array_exists:
# add the array
array_add_req = dict(
id=ssid,
controllerAddresses=controller_addresses,
metaTags=meta_tags,
enableTrace=enable_trace
)
if array_wwn:
array_add_req['wwn'] = array_wwn
if array_password:
array_add_req['password'] = array_password
post_headers = dict(Accept="application/json")
post_headers['Content-Type'] = 'application/json'
request_data = json.dumps(array_add_req)
try:
(rc, resp) = do_post(ssid, api_url, post_headers, api_usr, api_pwd, validate_certs, request_data,
array_status_timeout_sec)
except:
err = get_exception()
module.fail_json(msg="Failed to add storage system. Id[%s]. Request body [%s]. Error[%s]." %
(ssid, request_data, str(err)))
else: # array exists, modify...
post_headers = dict(Accept="application/json")
post_headers['Content-Type'] = 'application/json'
post_body = dict(
controllerAddresses=controller_addresses,
removeAllTags=True,
enableTrace=enable_trace,
metaTags=meta_tags
)
try:
(rc, resp) = do_post(ssid, api_url, post_headers, api_usr, api_pwd, validate_certs, post_body,
array_status_timeout_sec)
except:
err = get_exception()
module.fail_json(msg="Failed to update storage system. Id[%s]. Request body [%s]. Error[%s]." %
(ssid, post_body, str(err)))
elif state == 'absent':
# delete the array
try:
(rc, resp) = request(api_url + "/storage-systems/%s" % ssid, method='DELETE',
url_username=api_usr,
url_password=api_pwd, validate_certs=validate_certs)
except:
err = get_exception()
module.fail_json(msg="Failed to remove storage array. Id[%s]. Error[%s]." % (ssid, str(err)))
if rc == 422:
module.exit_json(changed=changed, msg="Storage system was not presnt.")
if rc == 204:
module.exit_json(changed=changed, msg="Storage system removed.")
module.exit_json(changed=changed, **resp)
if __name__ == '__main__':
main()
| gpl-3.0 |
pombredanne/invenio | modules/bibindex/lib/bibindex_engine_stemmer_tests.py | 1 | 2708 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the indexing engine."""
__revision__ = "$Id$"
import unittest
from invenio import bibindex_engine_stemmer
from invenio.testutils import make_test_suite, run_test_suite
class TestStemmer(unittest.TestCase):
"""Test stemmer."""
def test_stemmer_none(self):
"""bibindex engine - no stemmer"""
self.assertEqual("information",
bibindex_engine_stemmer.stem("information", None))
def test_stemmer_english(self):
"""bibindex engine - English stemmer"""
english_test_cases = [['information', 'inform'],
['experiment', 'experi'],
['experiments', 'experi'],
['experimented', 'experi'],
['experimenting', 'experi'],
['experimental', 'experiment'],
['experimentally', 'experiment'],
['experimentation', 'experiment'],
['experimentalism', 'experiment'],
['experimenter', 'experiment'],
['experimentalise', 'experimentalis'],
['experimentalist', 'experimentalist'],
['experimentalists', 'experimentalist'],
['GeV', 'GeV'],
['$\Omega$', '$\Omega$'],
['e^-', 'e^-'],
['C#', 'C#'],
['C++', 'C++']]
for test_word, expected_result in english_test_cases:
self.assertEqual(expected_result,
bibindex_engine_stemmer.stem(test_word, "en"))
TEST_SUITE = make_test_suite(TestStemmer,)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
| gpl-2.0 |
paulthulstrup/moose | framework/contrib/nsiqcppstyle/rules/RULE_5_2_C_provide_doxygen_class_comment_on_class_def.py | 43 | 2531 | """
Provide the class doxygen comment.
It checks if there is doxygen sytle comment in front of each class definition.
== Violation ==
class A { <== Violation. No doxygen comment.
};
/* <== Violation. It's not a doxygen comment
*
*/
class B {
};
== Good ==
/**
* blar blar
*/
class A { <== OK
};
class B; <== Don't care. It's forward decl.
"""
from nsiqcppstyle_rulehelper import *
from nsiqcppstyle_reporter import *
from nsiqcppstyle_rulemanager import *
def RunRule(lexer, currentType, fullName, decl, contextStack, typeContext) :
if not decl and currentType == "CLASS" and typeContext != None:
t = lexer.GetCurToken()
lexer.PushTokenIndex()
t2 = lexer.GetPrevTokenInType("COMMENT")
lexer.PopTokenIndex()
lexer.PushTokenIndex()
t3 = lexer.GetPrevTokenInTypeList(["LBRACE", "SEMI", "PREPROCESSOR"], False, True)
lexer.PopTokenIndex()
if t2 != None and t2.additional == "DOXYGEN" :
if t3 == None or t2.lexpos > t3.lexpos :
return
nsiqcppstyle_reporter.Error(t, __name__, "Doxygen Comment should be provided in front of class def(%s)." % fullName)
ruleManager.AddTypeNameRule(RunRule)
###########################################################################################
# Unit Test
###########################################################################################
from nsiqunittest.nsiqcppstyle_unittestbase import *
class testRule(nct):
def setUpRule(self):
ruleManager.AddTypeNameRule(RunRule)
def test1(self):
self.Analyze("thisfile.c",
"""
class A {
}
""")
assert CheckErrorContent(__name__)
def test2(self):
self.Analyze("thisfile.c",
"""
/*
*/
class K {
}
""")
assert CheckErrorContent(__name__)
def test3(self):
self.Analyze("thisfile.c",
"""
/**
*/
class K {
class T {
}
}
""")
assert CheckErrorContent(__name__)
assert CheckErrorContent(__name__)
def test4(self):
self.Analyze("thisfile.c",
"""
/**
*
*/
class J {
int k;
/**
*/
class T {
}
}
class T;
""")
assert not CheckErrorContent(__name__)
def test5(self):
self.Analyze("thisfile.c",
"""
/*
*/
struct K {
}
""")
assert not CheckErrorContent(__name__)
def test6(self):
self.Analyze("thisfile.c",
"""
/**
*/
template<class A, class B>
class K {
}
""")
assert not CheckErrorContent(__name__)
| lgpl-2.1 |
michaelaye/vispy | vispy/visuals/line/line.py | 3 | 18295 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Line visual implementing Agg- and GL-based drawing modes.
"""
from __future__ import division
import numpy as np
from ... import gloo, glsl
from ...color import Color, ColorArray, get_colormap
from ...ext.six import string_types
from ..shaders import Function
from ..visual import Visual, CompoundVisual
from ...util.profiler import Profiler
from .dash_atlas import DashAtlas
vec2to4 = Function("""
vec4 vec2to4(vec2 inp) {
return vec4(inp, 0, 1);
}
""")
vec3to4 = Function("""
vec4 vec3to4(vec3 inp) {
return vec4(inp, 1);
}
""")
"""
TODO:
* Agg support is very minimal; needs attention.
* Optimization--avoid creating new buffers, avoid triggering program
recompile.
"""
joins = {'miter': 0, 'round': 1, 'bevel': 2}
caps = {'': 0, 'none': 0, '.': 0,
'round': 1, ')': 1, '(': 1, 'o': 1,
'triangle in': 2, '<': 2,
'triangle out': 3, '>': 3,
'square': 4, '=': 4, 'butt': 4,
'|': 5}
class LineVisual(CompoundVisual):
"""Line visual
Parameters
----------
pos : array
Array of shape (..., 2) or (..., 3) specifying vertex coordinates.
color : Color, tuple, or array
The color to use when drawing the line. If an array is given, it
must be of shape (..., 4) and provide one rgba color per vertex.
Can also be a colormap name, or appropriate `Function`.
width:
The width of the line in px. Line widths > 1px are only
guaranteed to work when using 'agg' method.
connect : str or array
Determines which vertices are connected by lines.
* "strip" causes the line to be drawn with each vertex
connected to the next.
* "segments" causes each pair of vertices to draw an
independent line segment
* numpy arrays specify the exact set of segment pairs to
connect.
method : str
Mode to use for drawing.
* "agg" uses anti-grain geometry to draw nicely antialiased lines
with proper joins and endcaps.
* "gl" uses OpenGL's built-in line rendering. This is much faster,
but produces much lower-quality results and is not guaranteed to
obey the requested line width or join/endcap styles.
antialias : bool
Enables or disables antialiasing.
For method='gl', this specifies whether to use GL's line smoothing,
which may be unavailable or inconsistent on some platforms.
"""
def __init__(self, pos=None, color=(0.5, 0.5, 0.5, 1), width=1,
connect='strip', method='gl', antialias=False):
self._line_visual = None
self._changed = {'pos': False, 'color': False, 'width': False,
'connect': False}
self._pos = None
self._color = None
self._width = None
self._connect = None
self._bounds = None
self._antialias = None
self._method = 'none'
CompoundVisual.__init__(self, [])
# don't call subclass set_data; these often have different
# signatures.
LineVisual.set_data(self, pos=pos, color=color, width=width,
connect=connect)
self.antialias = antialias
self.method = method
@property
def antialias(self):
return self._antialias
@antialias.setter
def antialias(self, aa):
self._antialias = bool(aa)
self.update()
@property
def method(self):
"""The current drawing method"""
return self._method
@method.setter
def method(self, method):
if method not in ('agg', 'gl'):
raise ValueError('method argument must be "agg" or "gl".')
if method == self._method:
return
self._method = method
if self._line_visual is not None:
self.remove_subvisual(self._line_visual)
if method == 'gl':
self._line_visual = _GLLineVisual(self)
elif method == 'agg':
self._line_visual = _AggLineVisual(self)
self.add_subvisual(self._line_visual)
for k in self._changed:
self._changed[k] = True
def set_data(self, pos=None, color=None, width=None, connect=None):
""" Set the data used to draw this visual.
Parameters
----------
pos : array
Array of shape (..., 2) or (..., 3) specifying vertex coordinates.
color : Color, tuple, or array
The color to use when drawing the line. If an array is given, it
must be of shape (..., 4) and provide one rgba color per vertex.
width:
The width of the line in px. Line widths < 1 px will be rounded up
to 1 px when using the 'gl' method.
connect : str or array
Determines which vertices are connected by lines.
* "strip" causes the line to be drawn with each vertex
connected to the next.
* "segments" causes each pair of vertices to draw an
independent line segment
* int numpy arrays specify the exact set of segment pairs to
connect.
* bool numpy arrays specify which _adjacent_ pairs to connect.
"""
if pos is not None:
self._bounds = None
self._pos = pos
self._changed['pos'] = True
if color is not None:
self._color = color
self._changed['color'] = True
if width is not None:
self._width = width
self._changed['width'] = True
if connect is not None:
self._connect = connect
self._changed['connect'] = True
self.update()
@property
def color(self):
return self._color
@property
def width(self):
return self._width
@property
def connect(self):
return self._connect
@property
def pos(self):
return self._pos
def _interpret_connect(self):
if isinstance(self._connect, np.ndarray):
# Convert a boolean connection array to a vertex index array
if self._connect.ndim == 1 and self._connect.dtype == bool:
index = np.empty((len(self._connect), 2), dtype=np.uint32)
index[:] = np.arange(len(self._connect))[:, np.newaxis]
index[:, 1] += 1
return index[self._connect]
elif self._connect.ndim == 2 and self._connect.shape[1] == 2:
return self._connect.astype(np.uint32)
else:
raise TypeError("Got invalid connect array of shape %r and "
"dtype %r" % (self._connect.shape,
self._connect.dtype))
else:
return self._connect
def _interpret_color(self):
if isinstance(self._color, string_types):
try:
colormap = get_colormap(self._color)
color = Function(colormap.glsl_map)
except KeyError:
color = Color(self._color).rgba
elif isinstance(self._color, Function):
color = Function(self._color)
else:
color = ColorArray(self._color).rgba
if len(color) == 1:
color = color[0]
return color
def _compute_bounds(self, axis, view):
"""Get the bounds
Parameters
----------
mode : str
Describes the type of boundary requested. Can be "visual", "data",
or "mouse".
axis : 0, 1, 2
The axis along which to measure the bounding values, in
x-y-z order.
"""
# Can and should we calculate bounds?
if (self._bounds is None) and self._pos is not None:
pos = self._pos
self._bounds = [(pos[:, d].min(), pos[:, d].max())
for d in range(pos.shape[1])]
# Return what we can
if self._bounds is None:
return
else:
if axis < len(self._bounds):
return self._bounds[axis]
else:
return (0, 0)
def _prepare_draw(self, view):
if self._width == 0:
return False
CompoundVisual._prepare_draw(self, view)
class _GLLineVisual(Visual):
VERTEX_SHADER = """
varying vec4 v_color;
void main(void) {
gl_Position = $transform($to_vec4($position));
v_color = $color;
}
"""
FRAGMENT_SHADER = """
varying vec4 v_color;
void main() {
gl_FragColor = v_color;
}
"""
def __init__(self, parent):
self._parent = parent
self._pos_vbo = gloo.VertexBuffer()
self._color_vbo = gloo.VertexBuffer()
self._connect_ibo = gloo.IndexBuffer()
self._connect = None
Visual.__init__(self, vcode=self.VERTEX_SHADER,
fcode=self.FRAGMENT_SHADER)
self.set_gl_state('translucent')
def _prepare_transforms(self, view):
xform = view.transforms.get_transform()
view.view_program.vert['transform'] = xform
def _prepare_draw(self, view):
prof = Profiler()
if self._parent._changed['pos']:
if self._parent._pos is None:
return False
# todo: does this result in unnecessary copies?
pos = np.ascontiguousarray(self._parent._pos.astype(np.float32))
self._pos_vbo.set_data(pos)
self._program.vert['position'] = self._pos_vbo
if pos.shape[-1] == 2:
self._program.vert['to_vec4'] = vec2to4
elif pos.shape[-1] == 3:
self._program.vert['to_vec4'] = vec3to4
else:
raise TypeError("Got bad position array shape: %r"
% (pos.shape,))
if self._parent._changed['color']:
color = self._parent._interpret_color()
# If color is not visible, just quit now
if isinstance(color, Color) and color.is_blank:
return False
if isinstance(color, Function):
# TODO: Change to the parametric coordinate once that is done
self._program.vert['color'] = color(
'(gl_Position.x + 1.0) / 2.0')
else:
if color.ndim == 1:
self._program.vert['color'] = color
else:
self._color_vbo.set_data(color)
self._program.vert['color'] = self._color_vbo
# Do we want to use OpenGL, and can we?
GL = None
from ...app._default_app import default_app
if default_app is not None and \
default_app.backend_name != 'ipynb_webgl':
try:
import OpenGL.GL as GL
except Exception: # can be other than ImportError sometimes
pass
# Turn on line smooth and/or line width
if GL:
if self._parent._antialias:
GL.glEnable(GL.GL_LINE_SMOOTH)
else:
GL.glDisable(GL.GL_LINE_SMOOTH)
px_scale = self.transforms.pixel_scale
width = px_scale * self._parent._width
GL.glLineWidth(max(width, 1.))
if self._parent._changed['connect']:
self._connect = self._parent._interpret_connect()
if isinstance(self._connect, np.ndarray):
self._connect_ibo.set_data(self._connect)
if self._connect is None:
return False
prof('prepare')
# Draw
if isinstance(self._connect, string_types) and \
self._connect == 'strip':
self._draw_mode = 'line_strip'
self._index_buffer = None
elif isinstance(self._connect, string_types) and \
self._connect == 'segments':
self._draw_mode = 'lines'
self._index_buffer = None
elif isinstance(self._connect, np.ndarray):
self._draw_mode = 'lines'
self._index_buffer = self._connect_ibo
else:
raise ValueError("Invalid line connect mode: %r" % self._connect)
prof('draw')
class _AggLineVisual(Visual):
_agg_vtype = np.dtype([('a_position', np.float32, 2),
('a_tangents', np.float32, 4),
('a_segment', np.float32, 2),
('a_angles', np.float32, 2),
('a_texcoord', np.float32, 2),
('alength', np.float32, 1),
('color', np.float32, 4)])
VERTEX_SHADER = glsl.get('lines/agg.vert')
FRAGMENT_SHADER = glsl.get('lines/agg.frag')
def __init__(self, parent):
self._parent = parent
self._vbo = gloo.VertexBuffer()
self._pos = None
self._color = None
self._da = DashAtlas()
dash_index, dash_period = self._da['solid']
self._U = dict(dash_index=dash_index, dash_period=dash_period,
linejoin=joins['round'],
linecaps=(caps['round'], caps['round']),
dash_caps=(caps['round'], caps['round']),
antialias=1.0)
self._dash_atlas = gloo.Texture2D(self._da._data)
Visual.__init__(self, vcode=self.VERTEX_SHADER,
fcode=self.FRAGMENT_SHADER)
self._index_buffer = gloo.IndexBuffer()
self.set_gl_state('translucent', depth_test=False)
self._draw_mode = 'triangles'
def _prepare_transforms(self, view):
data_doc = view.get_transform('visual', 'document')
doc_px = view.get_transform('document', 'framebuffer')
px_ndc = view.get_transform('framebuffer', 'render')
vert = view.view_program.vert
vert['transform'] = data_doc
vert['doc_px_transform'] = doc_px
vert['px_ndc_transform'] = px_ndc
def _prepare_draw(self, view):
bake = False
if self._parent._changed['pos']:
if self._parent._pos is None:
return False
# todo: does this result in unnecessary copies?
self._pos = np.ascontiguousarray(
self._parent._pos.astype(np.float32))
bake = True
if self._parent._changed['color']:
self._color = self._parent._interpret_color()
bake = True
if self._parent._changed['connect']:
if self._parent._connect not in [None, 'strip']:
raise NotImplementedError("Only 'strip' connection mode "
"allowed for agg-method lines.")
if bake:
V, I = self._agg_bake(self._pos, self._color)
self._vbo.set_data(V)
self._index_buffer.set_data(I)
#self._program.prepare()
self.shared_program.bind(self._vbo)
uniforms = dict(closed=False, miter_limit=4.0, dash_phase=0.0,
linewidth=self._parent._width)
for n, v in uniforms.items():
self.shared_program[n] = v
for n, v in self._U.items():
self.shared_program[n] = v
self.shared_program['u_dash_atlas'] = self._dash_atlas
@classmethod
def _agg_bake(cls, vertices, color, closed=False):
"""
Bake a list of 2D vertices for rendering them as thick line. Each line
segment must have its own vertices because of antialias (this means no
vertex sharing between two adjacent line segments).
"""
n = len(vertices)
P = np.array(vertices).reshape(n, 2).astype(float)
idx = np.arange(n) # used to eventually tile the color array
dx, dy = P[0] - P[-1]
d = np.sqrt(dx*dx+dy*dy)
# If closed, make sure first vertex = last vertex (+/- epsilon=1e-10)
if closed and d > 1e-10:
P = np.append(P, P[0]).reshape(n+1, 2)
idx = np.append(idx, idx[-1])
n += 1
V = np.zeros(len(P), dtype=cls._agg_vtype)
V['a_position'] = P
# Tangents & norms
T = P[1:] - P[:-1]
N = np.sqrt(T[:, 0]**2 + T[:, 1]**2)
# T /= N.reshape(len(T),1)
V['a_tangents'][+1:, :2] = T
V['a_tangents'][0, :2] = T[-1] if closed else T[0]
V['a_tangents'][:-1, 2:] = T
V['a_tangents'][-1, 2:] = T[0] if closed else T[-1]
# Angles
T1 = V['a_tangents'][:, :2]
T2 = V['a_tangents'][:, 2:]
A = np.arctan2(T1[:, 0]*T2[:, 1]-T1[:, 1]*T2[:, 0],
T1[:, 0]*T2[:, 0]+T1[:, 1]*T2[:, 1])
V['a_angles'][:-1, 0] = A[:-1]
V['a_angles'][:-1, 1] = A[+1:]
# Segment
L = np.cumsum(N)
V['a_segment'][+1:, 0] = L
V['a_segment'][:-1, 1] = L
# V['a_lengths'][:,2] = L[-1]
# Step 1: A -- B -- C => A -- B, B' -- C
V = np.repeat(V, 2, axis=0)[1:-1]
V['a_segment'][1:] = V['a_segment'][:-1]
V['a_angles'][1:] = V['a_angles'][:-1]
V['a_texcoord'][0::2] = -1
V['a_texcoord'][1::2] = +1
idx = np.repeat(idx, 2)[1:-1]
# Step 2: A -- B, B' -- C -> A0/A1 -- B0/B1, B'0/B'1 -- C0/C1
V = np.repeat(V, 2, axis=0)
V['a_texcoord'][0::2, 1] = -1
V['a_texcoord'][1::2, 1] = +1
idx = np.repeat(idx, 2)
I = np.resize(np.array([0, 1, 2, 1, 2, 3], dtype=np.uint32),
(n-1)*(2*3))
I += np.repeat(4*np.arange(n-1, dtype=np.uint32), 6)
# Length
V['alength'] = L[-1] * np.ones(len(V))
# Color
if color.ndim == 1:
color = np.tile(color, (len(V), 1))
elif color.ndim == 2 and len(color) == n:
color = color[idx]
else:
raise ValueError('Color length %s does not match number of '
'vertices %s' % (len(color), n))
V['color'] = color
return V, I
| bsd-3-clause |
t-hey/QGIS-Original | python/plugins/processing/algs/grass7/ext/r_li_richness_ascii.py | 5 | 1412 | # -*- coding: utf-8 -*-
"""
***************************************************************************
r_li_richness_ascii.py
----------------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .r_li import checkMovingWindow, configFile, moveOutputTxtFile
def checkParameterValuesBeforeExecuting(alg):
return checkMovingWindow(alg, True)
def processCommand(alg, parameters):
configFile(alg, parameters, True)
def processOutputs(alg):
moveOutputTxtFile(alg)
| gpl-2.0 |
raymondxyang/tensorflow | tensorflow/contrib/keras/python/keras/layers/convolutional_test.py | 5 | 28168 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for convolutional layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.keras.python import keras
from tensorflow.contrib.keras.python.keras import testing_utils
from tensorflow.python.platform import test
class Convolution1DTest(test.TestCase):
def test_dilated_conv1d(self):
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Conv1D,
input_data=np.reshape(np.arange(4, dtype='float32'), (1, 4, 1)),
kwargs={
'filters': 1,
'kernel_size': 2,
'dilation_rate': 1,
'padding': 'valid',
'kernel_initializer': 'ones',
'use_bias': False,
},
expected_output=[[[1], [3], [5]]])
def test_conv_1d(self):
batch_size = 2
steps = 8
input_dim = 2
kernel_size = 3
filters = 3
for padding in ['valid', 'same']:
for strides in [1, 2]:
if padding == 'same' and strides != 1:
continue
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Conv1D,
kwargs={
'filters': filters,
'kernel_size': kernel_size,
'padding': padding,
'strides': strides
},
input_shape=(batch_size, steps, input_dim))
def test_conv_1d_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv1D(**kwargs)
layer.build((None, 5, 2))
self.assertEqual(len(layer.losses), 2)
layer(keras.backend.variable(np.ones((1, 5, 2))))
self.assertEqual(len(layer.losses), 3)
def test_conv_1d_constraints(self):
k_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_constraint': k_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv1D(**kwargs)
layer.build((None, 5, 2))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
class Conv2DTest(test.TestCase):
def test_convolution_2d(self):
num_samples = 2
filters = 2
stack_size = 3
kernel_size = (3, 2)
num_row = 7
num_col = 6
for padding in ['valid', 'same']:
for strides in [(1, 1), (2, 2)]:
if padding == 'same' and strides != (1, 1):
continue
with self.test_session(use_gpu=True):
# Only runs on GPU with CUDA, channels_first is not supported on CPU.
# TODO(b/62340061): Support channels_first on CPU.
if test.is_gpu_available(cuda_only=True):
testing_utils.layer_test(
keras.layers.Conv2D,
kwargs={
'filters': filters,
'kernel_size': kernel_size,
'padding': padding,
'strides': strides,
'data_format': 'channels_first'
},
input_shape=(num_samples, stack_size, num_row, num_col))
def test_convolution_2d_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv2D(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(len(layer.losses), 2)
layer(keras.backend.variable(np.ones((1, 5, 5, 2))))
self.assertEqual(len(layer.losses), 3)
def test_convolution_2d_constraints(self):
k_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_constraint': k_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv2D(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
def test_dilated_conv_2d(self):
num_samples = 2
filters = 2
stack_size = 3
kernel_size = (3, 2)
num_row = 7
num_col = 6
# Test dilation
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Conv2D,
kwargs={
'filters': filters,
'kernel_size': kernel_size,
'dilation_rate': (2, 2)
},
input_shape=(num_samples, num_row, num_col, stack_size))
class Conv2DTransposeTest(test.TestCase):
def test_conv2d_transpose(self):
num_samples = 2
filters = 2
stack_size = 3
num_row = 5
num_col = 6
for padding in ['valid', 'same']:
for strides in [(1, 1), (2, 2)]:
if padding == 'same' and strides != (1, 1):
continue
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Conv2DTranspose,
kwargs={
'filters': filters,
'kernel_size': 3,
'padding': padding,
'strides': strides,
'data_format': 'channels_last'
},
input_shape=(num_samples, num_row, num_col, stack_size))
def test_conv2dtranspose_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv2DTranspose(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(len(layer.losses), 2)
layer(keras.backend.variable(np.ones((1, 5, 5, 2))))
self.assertEqual(len(layer.losses), 3)
def test_conv2dtranspose_constraints(self):
k_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_constraint': k_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv2DTranspose(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
class Conv3DTransposeTest(test.TestCase):
def test_conv3d_transpose(self):
num_samples = 2
filters = 2
stack_size = 3
num_row = 5
num_col = 6
depth = 4
for padding in ['valid', 'same']:
for strides in [(1, 1, 1), (2, 2, 2)]:
if padding == 'same' and strides != (1, 1, 1):
continue
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Conv3DTranspose,
kwargs={
'filters': filters,
'kernel_size': 3,
'padding': padding,
'strides': strides,
'data_format': 'channels_last'
},
input_shape=(num_samples, depth, num_row, num_col, stack_size))
def test_conv3dtranspose_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv3DTranspose(**kwargs)
layer.build((None, 5, 5, 5, 2))
self.assertEqual(len(layer.losses), 2)
layer(keras.backend.variable(np.ones((1, 5, 5, 5, 2))))
self.assertEqual(len(layer.losses), 3)
def test_conv3dtranspose_constraints(self):
k_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_constraint': k_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv3DTranspose(**kwargs)
layer.build((None, 5, 5, 5, 2))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
class SeparableConv2DTest(test.TestCase):
def test_separable_conv_2d(self):
num_samples = 2
filters = 6
stack_size = 3
num_row = 7
num_col = 6
for padding in ['valid', 'same']:
for strides in [(1, 1), (2, 2)]:
for multiplier in [1, 2]:
if padding == 'same' and strides != (1, 1):
continue
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.SeparableConv2D,
kwargs={
'filters': filters,
'kernel_size': (3, 3),
'padding': padding,
'strides': strides,
'depth_multiplier': multiplier
},
input_shape=(num_samples, num_row, num_col, stack_size))
def test_separable_conv2d_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'depthwise_regularizer': 'l2',
'pointwise_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.SeparableConv2D(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(len(layer.losses), 3)
layer(keras.backend.variable(np.ones((1, 5, 5, 2))))
self.assertEqual(len(layer.losses), 4)
def test_separable_conv2d_constraints(self):
d_constraint = lambda x: x
p_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'pointwise_constraint': p_constraint,
'depthwise_constraint': d_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.SeparableConv2D(**kwargs)
layer.build((None, 5, 5, 2))
self.assertEqual(layer.depthwise_kernel.constraint, d_constraint)
self.assertEqual(layer.pointwise_kernel.constraint, p_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
class Conv3DTest(test.TestCase):
def test_convolution_3d(self):
num_samples = 2
filters = 2
stack_size = 3
input_len_dim1 = 9
input_len_dim2 = 8
input_len_dim3 = 8
for padding in ['valid', 'same']:
for strides in [(1, 1, 1), (2, 2, 2)]:
if padding == 'same' and strides != (1, 1, 1):
continue
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Convolution3D,
kwargs={
'filters': filters,
'kernel_size': 3,
'padding': padding,
'strides': strides
},
input_shape=(num_samples, input_len_dim1, input_len_dim2,
input_len_dim3, stack_size))
def test_convolution_3d_regularizers(self):
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_regularizer': 'l2',
'bias_regularizer': 'l2',
'activity_regularizer': 'l2',
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv3D(**kwargs)
layer.build((None, 5, 5, 5, 2))
self.assertEqual(len(layer.losses), 2)
self.assertEqual(len(layer.losses), 2)
layer(keras.backend.variable(np.ones((1, 5, 5, 5, 2))))
self.assertEqual(len(layer.losses), 3)
def test_convolution_3d_constraints(self):
k_constraint = lambda x: x
b_constraint = lambda x: x
kwargs = {
'filters': 3,
'kernel_size': 3,
'padding': 'valid',
'kernel_constraint': k_constraint,
'bias_constraint': b_constraint,
'strides': 1
}
with self.test_session(use_gpu=True):
layer = keras.layers.Conv3D(**kwargs)
layer.build((None, 5, 5, 5, 2))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
class ZeroPaddingTest(test.TestCase):
def test_zero_padding_1d(self):
num_samples = 2
input_dim = 2
num_steps = 5
shape = (num_samples, num_steps, input_dim)
inputs = np.ones(shape)
# basic test
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.ZeroPadding1D,
kwargs={'padding': 2},
input_shape=inputs.shape)
testing_utils.layer_test(
keras.layers.ZeroPadding1D,
kwargs={'padding': (1, 2)},
input_shape=inputs.shape)
# correctness test
with self.test_session(use_gpu=True):
layer = keras.layers.ZeroPadding1D(padding=2)
layer.build(shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
for offset in [0, 1, -1, -2]:
np.testing.assert_allclose(np_output[:, offset, :], 0.)
np.testing.assert_allclose(np_output[:, 2:-2, :], 1.)
layer = keras.layers.ZeroPadding1D(padding=(1, 2))
layer.build(shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
for left_offset in [0]:
np.testing.assert_allclose(np_output[:, left_offset, :], 0.)
for right_offset in [-1, -2]:
np.testing.assert_allclose(np_output[:, right_offset, :], 0.)
np.testing.assert_allclose(np_output[:, 1:-2, :], 1.)
layer.get_config()
def test_zero_padding_2d(self):
num_samples = 2
stack_size = 2
input_num_row = 4
input_num_col = 5
for data_format in ['channels_first', 'channels_last']:
inputs = np.ones((num_samples, input_num_row, input_num_col, stack_size))
inputs = np.ones((num_samples, stack_size, input_num_row, input_num_col))
# basic test
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.ZeroPadding2D,
kwargs={'padding': (2, 2),
'data_format': data_format},
input_shape=inputs.shape)
testing_utils.layer_test(
keras.layers.ZeroPadding2D,
kwargs={'padding': ((1, 2), (3, 4)),
'data_format': data_format},
input_shape=inputs.shape)
# correctness test
with self.test_session(use_gpu=True):
layer = keras.layers.ZeroPadding2D(
padding=(2, 2), data_format=data_format)
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
if data_format == 'channels_last':
for offset in [0, 1, -1, -2]:
np.testing.assert_allclose(np_output[:, offset, :, :], 0.)
np.testing.assert_allclose(np_output[:, :, offset, :], 0.)
np.testing.assert_allclose(np_output[:, 2:-2, 2:-2, :], 1.)
elif data_format == 'channels_first':
for offset in [0, 1, -1, -2]:
np.testing.assert_allclose(np_output[:, :, offset, :], 0.)
np.testing.assert_allclose(np_output[:, :, :, offset], 0.)
np.testing.assert_allclose(np_output[:, 2:-2, 2:-2, :], 1.)
layer = keras.layers.ZeroPadding2D(
padding=((1, 2), (3, 4)), data_format=data_format)
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
if data_format == 'channels_last':
for top_offset in [0]:
np.testing.assert_allclose(np_output[:, top_offset, :, :], 0.)
for bottom_offset in [-1, -2]:
np.testing.assert_allclose(np_output[:, bottom_offset, :, :], 0.)
for left_offset in [0, 1, 2]:
np.testing.assert_allclose(np_output[:, :, left_offset, :], 0.)
for right_offset in [-1, -2, -3, -4]:
np.testing.assert_allclose(np_output[:, :, right_offset, :], 0.)
np.testing.assert_allclose(np_output[:, 1:-2, 3:-4, :], 1.)
elif data_format == 'channels_first':
for top_offset in [0]:
np.testing.assert_allclose(np_output[:, :, top_offset, :], 0.)
for bottom_offset in [-1, -2]:
np.testing.assert_allclose(np_output[:, :, bottom_offset, :], 0.)
for left_offset in [0, 1, 2]:
np.testing.assert_allclose(np_output[:, :, :, left_offset], 0.)
for right_offset in [-1, -2, -3, -4]:
np.testing.assert_allclose(np_output[:, :, :, right_offset], 0.)
np.testing.assert_allclose(np_output[:, :, 1:-2, 3:-4], 1.)
def test_zero_padding_3d(self):
num_samples = 2
stack_size = 2
input_len_dim1 = 4
input_len_dim2 = 5
input_len_dim3 = 3
inputs = np.ones((num_samples, input_len_dim1, input_len_dim2,
input_len_dim3, stack_size))
# basic test
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.ZeroPadding3D,
kwargs={'padding': (2, 2, 2)},
input_shape=inputs.shape)
# correctness test
with self.test_session(use_gpu=True):
layer = keras.layers.ZeroPadding3D(padding=(2, 2, 2))
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
for offset in [0, 1, -1, -2]:
np.testing.assert_allclose(np_output[:, offset, :, :, :], 0.)
np.testing.assert_allclose(np_output[:, :, offset, :, :], 0.)
np.testing.assert_allclose(np_output[:, :, :, offset, :], 0.)
np.testing.assert_allclose(np_output[:, 2:-2, 2:-2, 2:-2, :], 1.)
class UpSamplingTest(test.TestCase):
def test_upsampling_1d(self):
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.UpSampling1D, kwargs={'size': 2}, input_shape=(3, 5, 4))
def test_upsampling_2d(self):
num_samples = 2
stack_size = 2
input_num_row = 11
input_num_col = 12
for data_format in ['channels_first', 'channels_last']:
if data_format == 'channels_first':
inputs = np.random.rand(num_samples, stack_size, input_num_row,
input_num_col)
else:
inputs = np.random.rand(num_samples, input_num_row, input_num_col,
stack_size)
# basic test
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.UpSampling2D,
kwargs={'size': (2, 2),
'data_format': data_format},
input_shape=inputs.shape)
for length_row in [2]:
for length_col in [2, 3]:
layer = keras.layers.UpSampling2D(
size=(length_row, length_col), data_format=data_format)
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
if data_format == 'channels_first':
assert np_output.shape[2] == length_row * input_num_row
assert np_output.shape[3] == length_col * input_num_col
else: # tf
assert np_output.shape[1] == length_row * input_num_row
assert np_output.shape[2] == length_col * input_num_col
# compare with numpy
if data_format == 'channels_first':
expected_out = np.repeat(inputs, length_row, axis=2)
expected_out = np.repeat(expected_out, length_col, axis=3)
else: # tf
expected_out = np.repeat(inputs, length_row, axis=1)
expected_out = np.repeat(expected_out, length_col, axis=2)
np.testing.assert_allclose(np_output, expected_out)
def test_upsampling_3d(self):
num_samples = 2
stack_size = 2
input_len_dim1 = 10
input_len_dim2 = 11
input_len_dim3 = 12
for data_format in ['channels_first', 'channels_last']:
if data_format == 'channels_first':
inputs = np.random.rand(num_samples, stack_size, input_len_dim1,
input_len_dim2, input_len_dim3)
else:
inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2,
input_len_dim3, stack_size)
# basic test
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.UpSampling3D,
kwargs={'size': (2, 2, 2),
'data_format': data_format},
input_shape=inputs.shape)
for length_dim1 in [2, 3]:
for length_dim2 in [2]:
for length_dim3 in [3]:
layer = keras.layers.UpSampling3D(
size=(length_dim1, length_dim2, length_dim3),
data_format=data_format)
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
if data_format == 'channels_first':
assert np_output.shape[2] == length_dim1 * input_len_dim1
assert np_output.shape[3] == length_dim2 * input_len_dim2
assert np_output.shape[4] == length_dim3 * input_len_dim3
else: # tf
assert np_output.shape[1] == length_dim1 * input_len_dim1
assert np_output.shape[2] == length_dim2 * input_len_dim2
assert np_output.shape[3] == length_dim3 * input_len_dim3
# compare with numpy
if data_format == 'channels_first':
expected_out = np.repeat(inputs, length_dim1, axis=2)
expected_out = np.repeat(expected_out, length_dim2, axis=3)
expected_out = np.repeat(expected_out, length_dim3, axis=4)
else: # tf
expected_out = np.repeat(inputs, length_dim1, axis=1)
expected_out = np.repeat(expected_out, length_dim2, axis=2)
expected_out = np.repeat(expected_out, length_dim3, axis=3)
np.testing.assert_allclose(np_output, expected_out)
class CroppingTest(test.TestCase):
def test_cropping_1d(self):
num_samples = 2
time_length = 4
input_len_dim1 = 2
inputs = np.random.rand(num_samples, time_length, input_len_dim1)
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Cropping1D,
kwargs={'cropping': (2, 2)},
input_shape=inputs.shape)
def test_cropping_2d(self):
num_samples = 2
stack_size = 2
input_len_dim1 = 9
input_len_dim2 = 9
cropping = ((2, 2), (3, 3))
for data_format in ['channels_first', 'channels_last']:
if data_format == 'channels_first':
inputs = np.random.rand(num_samples, stack_size, input_len_dim1,
input_len_dim2)
else:
inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2,
stack_size)
# basic test
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Cropping2D,
kwargs={'cropping': cropping,
'data_format': data_format},
input_shape=inputs.shape)
# correctness test
with self.test_session(use_gpu=True):
layer = keras.layers.Cropping2D(
cropping=cropping, data_format=data_format)
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
# compare with numpy
if data_format == 'channels_first':
expected_out = inputs[:, :, cropping[0][0]:-cropping[0][1], cropping[
1][0]:-cropping[1][1]]
else:
expected_out = inputs[:, cropping[0][0]:-cropping[0][1], cropping[1][
0]:-cropping[1][1], :]
np.testing.assert_allclose(np_output, expected_out)
for data_format in ['channels_first', 'channels_last']:
if data_format == 'channels_first':
inputs = np.random.rand(num_samples, stack_size, input_len_dim1,
input_len_dim2)
else:
inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2,
stack_size)
# another correctness test (no cropping)
with self.test_session(use_gpu=True):
cropping = ((0, 0), (0, 0))
layer = keras.layers.Cropping2D(
cropping=cropping, data_format=data_format)
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
# compare with input
np.testing.assert_allclose(np_output, inputs)
def test_cropping_3d(self):
num_samples = 2
stack_size = 2
input_len_dim1 = 8
input_len_dim2 = 8
input_len_dim3 = 8
cropping = ((2, 2), (1, 1), (2, 3))
for data_format in ['channels_last', 'channels_first']:
if data_format == 'channels_first':
inputs = np.random.rand(num_samples, stack_size, input_len_dim1,
input_len_dim2, input_len_dim3)
else:
inputs = np.random.rand(num_samples, input_len_dim1, input_len_dim2,
input_len_dim3, stack_size)
# basic test
with self.test_session(use_gpu=True):
testing_utils.layer_test(
keras.layers.Cropping3D,
kwargs={'cropping': cropping,
'data_format': data_format},
input_shape=inputs.shape)
# correctness test
with self.test_session(use_gpu=True):
layer = keras.layers.Cropping3D(
cropping=cropping, data_format=data_format)
layer.build(inputs.shape)
output = layer(keras.backend.variable(inputs))
np_output = keras.backend.eval(output)
# compare with numpy
if data_format == 'channels_first':
expected_out = inputs[:, :,
cropping[0][0]:-cropping[0][1],
cropping[1][0]:-cropping[1][1],
cropping[2][0]:-cropping[2][1]]
else:
expected_out = inputs[:,
cropping[0][0]:-cropping[0][1],
cropping[1][0]:-cropping[1][1],
cropping[2][0]:-cropping[2][1], :]
print(expected_out.shape)
np.testing.assert_allclose(np_output, expected_out)
if __name__ == '__main__':
test.main()
| apache-2.0 |
axbaretto/beam | sdks/python/.tox/lint/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py | 3133 | 34872 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
| apache-2.0 |
mach0/QGIS | python/plugins/db_manager/dlg_create_table.py | 30 | 12123 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : Oct 13, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : brush.tyler@gmail.com
The content of this file is based on
- PG_Manager by Martin Dobias (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from builtins import str
from builtins import range
from qgis.PyQt.QtCore import Qt, QModelIndex
from qgis.PyQt.QtWidgets import QItemDelegate, QComboBox, QDialog, QPushButton, QDialogButtonBox, QMessageBox, QApplication
from qgis.PyQt.QtCore import QItemSelectionModel, pyqtSignal
from qgis.utils import OverrideCursor
from .db_plugins.data_model import TableFieldsModel
from .db_plugins.plugin import DbError, ConnectionError
from .dlg_db_error import DlgDbError
from .ui.ui_DlgCreateTable import Ui_DbManagerDlgCreateTable as Ui_Dialog
class TableFieldsDelegate(QItemDelegate):
""" delegate with some special item editors """
columnNameChanged = pyqtSignal()
def __init__(self, field_types, parent=None):
QItemDelegate.__init__(self, parent)
self.fieldTypes = field_types
def createEditor(self, parent, option, index):
# special combobox for field type
if index.column() == 1:
cbo = QComboBox(parent)
cbo.setEditable(True)
cbo.setFrame(False)
for item in self.fieldTypes:
cbo.addItem(item)
return cbo
return QItemDelegate.createEditor(self, parent, option, index)
def setEditorData(self, editor, index):
""" load data from model to editor """
m = index.model()
if index.column() == 1:
txt = m.data(index, Qt.DisplayRole)
editor.setEditText(txt)
else:
# use default
QItemDelegate.setEditorData(self, editor, index)
def setModelData(self, editor, model, index):
""" save data from editor back to model """
if index.column() == 1:
model.setData(index, editor.currentText())
else:
# use default
QItemDelegate.setModelData(self, editor, model, index)
if index.column() == 0:
self.columnNameChanged.emit()
class DlgCreateTable(QDialog, Ui_Dialog):
GEOM_TYPES = ["POINT", "LINESTRING", "POLYGON", "MULTIPOINT", "MULTILINESTRING", "MULTIPOLYGON",
"GEOMETRYCOLLECTION"]
def __init__(self, item, parent=None):
QDialog.__init__(self, parent)
self.item = item
self.setupUi(self)
self.db = self.item.database()
self.schemas = self.db.schemas()
self.hasSchemas = self.schemas is not None
self.fieldTypes = self.db.connector.fieldTypes()
m = TableFieldsModel(self, True) # it's editable
self.fields.setModel(m)
self.fields.setColumnHidden(3, True) # hide Default column
d = TableFieldsDelegate(self.fieldTypes, self)
self.fields.setItemDelegate(d)
self.fields.setColumnWidth(0, 140)
self.fields.setColumnWidth(1, 140)
self.fields.setColumnWidth(2, 50)
b = QPushButton(self.tr("&Create"))
self.buttonBox.addButton(b, QDialogButtonBox.ActionRole)
self.btnAddField.clicked.connect(self.addField)
self.btnDeleteField.clicked.connect(self.deleteField)
self.btnFieldUp.clicked.connect(self.fieldUp)
self.btnFieldDown.clicked.connect(self.fieldDown)
b.clicked.connect(self.createTable)
self.chkGeomColumn.clicked.connect(self.updateUi)
self.fields.selectionModel().selectionChanged.connect(self.updateUiFields)
d.columnNameChanged.connect(self.updatePkeyCombo)
self.populateSchemas()
self.updateUi()
self.updateUiFields()
def populateSchemas(self):
self.cboSchema.clear()
if not self.hasSchemas:
self.hideSchemas()
return
index = -1
for schema in self.schemas:
self.cboSchema.addItem(schema.name)
if hasattr(self.item, 'schema') and schema.name == self.item.schema().name:
index = self.cboSchema.count() - 1
self.cboSchema.setCurrentIndex(index)
def hideSchemas(self):
self.cboSchema.setEnabled(False)
def updateUi(self):
useGeom = self.chkGeomColumn.isChecked()
self.cboGeomType.setEnabled(useGeom)
self.editGeomColumn.setEnabled(useGeom)
self.spinGeomDim.setEnabled(useGeom)
self.editGeomSrid.setEnabled(useGeom)
self.chkSpatialIndex.setEnabled(useGeom)
def updateUiFields(self):
fld = self.selectedField()
if fld is not None:
up_enabled = (fld != 0)
down_enabled = (fld != self.fields.model().rowCount() - 1)
del_enabled = True
else:
up_enabled, down_enabled, del_enabled = False, False, False
self.btnFieldUp.setEnabled(up_enabled)
self.btnFieldDown.setEnabled(down_enabled)
self.btnDeleteField.setEnabled(del_enabled)
def updatePkeyCombo(self, selRow=None):
""" called when list of columns changes. if 'sel' is None, it keeps current index """
if selRow is None:
selRow = self.cboPrimaryKey.currentIndex()
self.cboPrimaryKey.clear()
m = self.fields.model()
for row in range(m.rowCount()):
name = m.data(m.index(row, 0))
self.cboPrimaryKey.addItem(name)
self.cboPrimaryKey.setCurrentIndex(selRow)
def addField(self):
"""Adds new field to the end of field table """
m = self.fields.model()
newRow = m.rowCount()
m.insertRows(newRow, 1)
indexName = m.index(newRow, 0, QModelIndex())
indexType = m.index(newRow, 1, QModelIndex())
indexNull = m.index(newRow, 2, QModelIndex())
m.setData(indexName, "new_field")
colType = self.fieldTypes[0]
if newRow == 0:
# adding the first row, use auto-incrementing column type if any
if "serial" in self.fieldTypes: # PostgreSQL
colType = "serial"
m.setData(indexType, colType)
m.setData(indexNull, None, Qt.DisplayRole)
m.setData(indexNull, Qt.Unchecked, Qt.CheckStateRole)
# selects the new row
sel = self.fields.selectionModel()
sel.select(indexName, QItemSelectionModel.Rows | QItemSelectionModel.ClearAndSelect)
# starts editing
self.fields.edit(indexName)
self.updatePkeyCombo(0 if newRow == 0 else None)
def selectedField(self):
sel = self.fields.selectedIndexes()
if len(sel) < 1:
return None
return sel[0].row()
def deleteField(self):
"""Deletes selected field """
row = self.selectedField()
if row is None:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("No field selected."))
else:
self.fields.model().removeRows(row, 1)
self.updatePkeyCombo()
def fieldUp(self):
""" move selected field up """
row = self.selectedField()
if row is None:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("No field selected."))
return
if row == 0:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("Field is already at the top."))
return
# take row and reinsert it
rowdata = self.fields.model().takeRow(row)
self.fields.model().insertRow(row - 1, rowdata)
# set selection again
index = self.fields.model().index(row - 1, 0, QModelIndex())
self.fields.selectionModel().select(index, QItemSelectionModel.Rows | QItemSelectionModel.ClearAndSelect)
self.updatePkeyCombo()
def fieldDown(self):
""" move selected field down """
row = self.selectedField()
if row is None:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("No field selected."))
return
if row == self.fields.model().rowCount() - 1:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("Field is already at the bottom."))
return
# take row and reinsert it
rowdata = self.fields.model().takeRow(row)
self.fields.model().insertRow(row + 1, rowdata)
# set selection again
index = self.fields.model().index(row + 1, 0, QModelIndex())
self.fields.selectionModel().select(index, QItemSelectionModel.Rows | QItemSelectionModel.ClearAndSelect)
self.updatePkeyCombo()
def createTable(self):
"""Creates table with chosen fields, optionally add a geometry column """
if not self.hasSchemas:
schema = None
else:
schema = str(self.cboSchema.currentText())
if len(schema) == 0:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("A valid schema must be selected first."))
return
table = str(self.editName.text())
if len(table) == 0:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("A valid table name is required."))
return
m = self.fields.model()
if m.rowCount() == 0:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("At least one field is required."))
return
useGeomColumn = self.chkGeomColumn.isChecked()
if useGeomColumn:
geomColumn = str(self.editGeomColumn.text())
if len(geomColumn) == 0:
QMessageBox.information(self, self.tr("DB Manager"), self.tr("A name is required for the geometry column."))
return
geomType = self.GEOM_TYPES[self.cboGeomType.currentIndex()]
geomDim = self.spinGeomDim.value()
try:
geomSrid = int(self.editGeomSrid.text())
except ValueError:
geomSrid = 0
useSpatialIndex = self.chkSpatialIndex.isChecked()
flds = m.getFields()
pk_index = self.cboPrimaryKey.currentIndex()
if pk_index >= 0:
flds[pk_index].primaryKey = True
# commit to DB
with OverrideCursor(Qt.WaitCursor):
try:
if not useGeomColumn:
self.db.createTable(table, flds, schema)
else:
geom = geomColumn, geomType, geomSrid, geomDim, useSpatialIndex
self.db.createVectorTable(table, flds, geom, schema)
except (ConnectionError, DbError) as e:
DlgDbError.showError(e, self)
# clear UI
self.editName.clear()
self.fields.model().removeRows(0, self.fields.model().rowCount())
self.cboPrimaryKey.clear()
self.chkGeomColumn.setChecked(False)
self.chkSpatialIndex.setChecked(False)
self.editGeomSrid.clear()
self.cboGeomType.setEnabled(False)
self.editGeomColumn.setEnabled(False)
self.spinGeomDim.setEnabled(False)
self.editGeomSrid.setEnabled(False)
self.chkSpatialIndex.setEnabled(False)
QMessageBox.information(self, self.tr("DB Manager"), self.tr("Table created successfully."))
| gpl-2.0 |
antoviaque/edx-platform | pavelib/utils/test/bokchoy_utils.py | 44 | 4827 | """
Helper functions for bok_choy test tasks
"""
import sys
import os
import time
import httplib
import subprocess
from paver.easy import sh
from pavelib.utils.envs import Env
from pavelib.utils.process import run_background_process
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text
__test__ = False # do not collect
def start_servers(default_store, coveragerc=None):
"""
Start the servers we will run tests on, returns PIDs for servers.
"""
coveragerc = coveragerc or Env.BOK_CHOY_COVERAGERC
def start_server(cmd, logfile, cwd=None):
"""
Starts a single server.
"""
print cmd, logfile
run_background_process(cmd, out_log=logfile, err_log=logfile, cwd=cwd)
for service, info in Env.BOK_CHOY_SERVERS.iteritems():
address = "0.0.0.0:{}".format(info['port'])
cmd = (
"DEFAULT_STORE={default_store} "
"coverage run --rcfile={coveragerc} -m "
"manage {service} --settings bok_choy runserver "
"{address} --traceback --noreload".format(
default_store=default_store,
coveragerc=coveragerc,
service=service,
address=address,
)
)
start_server(cmd, info['log'])
for service, info in Env.BOK_CHOY_STUBS.iteritems():
cmd = (
"python -m stubs.start {service} {port} "
"{config}".format(
service=service,
port=info['port'],
config=info.get('config', ''),
)
)
start_server(cmd, info['log'], cwd=Env.BOK_CHOY_STUB_DIR)
def wait_for_server(server, port):
"""
Wait for a server to respond with status 200
"""
print(
"Checking server {server} on port {port}".format(
server=server,
port=port,
)
)
attempts = 0
server_ok = False
while attempts < 20:
try:
connection = httplib.HTTPConnection(server, port, timeout=10)
connection.request('GET', '/')
response = connection.getresponse()
if int(response.status) == 200:
server_ok = True
break
except: # pylint: disable=bare-except
pass
attempts += 1
time.sleep(1)
return server_ok
def wait_for_test_servers():
"""
Wait until we get a successful response from the servers or time out
"""
for service, info in Env.BOK_CHOY_SERVERS.iteritems():
ready = wait_for_server("0.0.0.0", info['port'])
if not ready:
msg = colorize(
"red",
"Could not contact {} test server".format(service)
)
print msg
sys.exit(1)
def is_mongo_running():
"""
Returns True if mongo is running, False otherwise.
"""
# The mongo command will connect to the service,
# failing with a non-zero exit code if it cannot connect.
output = os.popen('mongo --eval "print(\'running\')"').read()
return output and "running" in output
def is_memcache_running():
"""
Returns True if memcache is running, False otherwise.
"""
# Attempt to set a key in memcache. If we cannot do so because the
# service is not available, then this will return False.
return Env.BOK_CHOY_CACHE.set('test', 'test')
def is_mysql_running():
"""
Returns True if mysql is running, False otherwise.
"""
# We need to check whether or not mysql is running as a process
# even if it is not daemonized.
with open(os.devnull, 'w') as os_devnull:
#pgrep returns the PID, which we send to /dev/null
returncode = subprocess.call("pgrep mysqld", stdout=os_devnull, shell=True)
return returncode == 0
def clear_mongo():
"""
Clears mongo database.
"""
sh(
"mongo {} --eval 'db.dropDatabase()' > /dev/null".format(
Env.BOK_CHOY_MONGO_DATABASE,
)
)
def check_mongo():
"""
Check that mongo is running
"""
if not is_mongo_running():
msg = colorize('red', "Mongo is not running locally.")
print msg
sys.exit(1)
def check_memcache():
"""
Check that memcache is running
"""
if not is_memcache_running():
msg = colorize('red', "Memcache is not running locally.")
print msg
sys.exit(1)
def check_mysql():
"""
Check that mysql is running
"""
if not is_mysql_running():
msg = colorize('red', "MySQL is not running locally.")
print msg
sys.exit(1)
def check_services():
"""
Check that all required services are running
"""
check_mongo()
check_memcache()
check_mysql()
| agpl-3.0 |
roadmapper/ansible | test/units/modules/cloud/kubevirt/test_kubevirt_vm.py | 24 | 4091 | import pytest
openshiftdynamic = pytest.importorskip("openshift.dynamic")
from units.modules.utils import set_module_args
from units.utils.kubevirt_fixtures import base_fixture, RESOURCE_DEFAULT_ARGS, AnsibleExitJson
from ansible.module_utils.kubevirt import KubeVirtRawModule
from ansible.modules.cloud.kubevirt import kubevirt_vm as mymodule
KIND = 'VirtulMachine'
@pytest.mark.usefixtures("base_fixture")
def test_create_vm_with_multus_nowait():
# Desired state:
args = dict(
state='present', name='testvm',
namespace='vms',
interfaces=[
{'bridge': {}, 'name': 'default', 'network': {'pod': {}}},
{'bridge': {}, 'name': 'mynet', 'network': {'multus': {'networkName': 'mynet'}}},
],
wait=False,
)
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert result.value['changed']
assert result.value['method'] == 'create'
@pytest.mark.usefixtures("base_fixture")
@pytest.mark.parametrize("_wait", (False, True))
def test_vm_is_absent(_wait):
# Desired state:
args = dict(
state='absent', name='testvmi',
namespace='vms',
wait=_wait,
)
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert not result.value['kubevirt_vm']
assert result.value['method'] == 'delete'
# Note: nothing actually gets deleted, as we mock that there's not object in the cluster present,
# so if the method changes to something other than 'delete' at some point, that's fine
@pytest.mark.usefixtures("base_fixture")
def test_vmpreset_create():
KIND = 'VirtulMachineInstancePreset'
# Desired state:
args = dict(state='present', name='testvmipreset', namespace='vms', memory='1024Mi', wait=False)
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert result.value['changed']
assert result.value['method'] == 'create'
@pytest.mark.usefixtures("base_fixture")
def test_vmpreset_is_absent():
KIND = 'VirtulMachineInstancePreset'
# Desired state:
args = dict(state='absent', name='testvmipreset', namespace='vms')
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert not result.value['kubevirt_vm']
assert result.value['method'] == 'delete'
# Note: nothing actually gets deleted, as we mock that there's not object in the cluster present,
# so if the method changes to something other than 'delete' at some point, that's fine
| gpl-3.0 |
stanley-cheung/grpc | src/benchmark/gen_build_yaml.py | 8 | 1147 | #!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import glob
import yaml
os.chdir(os.path.dirname(sys.argv[0]) + '/../..')
out = {}
out['libs'] = [{
'name':
'benchmark',
'build':
'private',
'language':
'c++',
'secure':
False,
'defaults':
'benchmark',
'src':
sorted(glob.glob('third_party/benchmark/src/*.cc')),
'headers':
sorted(
glob.glob('third_party/benchmark/src/*.h') +
glob.glob('third_party/benchmark/include/benchmark/*.h')),
}]
print(yaml.dump(out))
| apache-2.0 |
drawquest/drawquest-web | website/canvas/migrations/0195_migrate_followers.py | 1 | 21491 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
from drawquest.apps.drawquest_auth.models import User
from canvas.redis_models import redis
completed_user_ids = [int(e) for e in redis.smembers('following_migration_completed_user_ids')]
for user in User.objects.all():
if user.id in completed_user_ids:
continue
print user.username,
followers = user.redis.followers.smembers()
followers = User.objects.filter(id__in=followers).order_by('username').values_list('id', flat=True)
for id_ in reversed(followers):
user.redis.new_followers.bump(id_)
following = user.redis.followers.smembers()
following = User.objects.filter(id__in=following).order_by('username').values_list('id', flat=True)
for id_ in reversed(following):
user.redis.new_following.bump(id_)
redis.sadd('following_migration_completed_user_ids', user.id)
redis.delete('following_migration_completed_user_ids')
print
def backwards(self, orm):
"Write your backwards methods here."
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'canvas.apiapp': {
'Meta': {'object_name': 'APIApp'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'canvas.apiauthtoken': {
'Meta': {'unique_together': "(('user', 'app'),)", 'object_name': 'APIAuthToken'},
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.APIApp']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'canvas.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}),
'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'founded_groups'", 'null': 'True', 'blank': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'moderated_categories'", 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'attribution_copy': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'attribution_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': u"orm['auth.User']"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': u"orm['canvas.Category']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'replies'", 'null': 'True', 'blank': 'True', 'to': u"orm['canvas.Comment']"}),
'posted_on_quest_of_the_day': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'used_in_comments'", 'null': 'True', 'to': u"orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}),
'skip_moderation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'star_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True', 'blank': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'ugq': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'canvas.commentflag': {
'Meta': {'object_name': 'CommentFlag'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': u"orm['canvas.Comment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': u"orm['auth.User']"})
},
u'canvas.commentmoderationlog': {
'Meta': {'object_name': 'CommentModerationLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.Comment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderated_comments_log'", 'to': u"orm['auth.User']"}),
'visibility': ('django.db.models.fields.IntegerField', [], {})
},
u'canvas.commentpin': {
'Meta': {'object_name': 'CommentPin'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.Comment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': u"orm['canvas.Comment']"}),
'epic_message': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '140', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'canvas.commentstickerlog': {
'Meta': {'object_name': 'CommentStickerLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.Comment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'remixes'", 'null': 'True', 'to': u"orm['canvas.Content']"}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'used_as_stamp'", 'blank': 'True', 'to': u"orm['canvas.Content']"}),
'stroke_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'canvas.emailunsubscribe': {
'Meta': {'object_name': 'EmailUnsubscribe'},
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'canvas.facebookinvite': {
'Meta': {'object_name': 'FacebookInvite'},
'fb_message_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invited_fbid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_invited_from'", 'null': 'True', 'blank': 'True', 'to': u"orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'facebook_sent_invites'", 'null': 'True', 'blank': 'True', 'to': u"orm['auth.User']"})
},
u'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invited_by': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['canvas.FacebookUser']", 'symmetrical': 'False', 'blank': 'True'}),
'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
u'canvas.followcategory': {
'Meta': {'unique_together': "(('user', 'category'),)", 'object_name': 'FollowCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': u"orm['canvas.Category']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'following'", 'to': u"orm['auth.User']"})
},
u'canvas.friendjoinednotificationreceipt': {
'Meta': {'unique_together': "(('actor', 'recipient'),)", 'object_name': 'FriendJoinedNotificationReceipt'},
'actor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['auth.User']"})
},
u'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.Content']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'avatar': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.Content']", 'null': 'True'}),
'bio_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'email_hash': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'enable_timeline': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'enable_timeline_posts': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'follower_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_bypass': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'is_qa': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'profile_image': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['canvas.Comment']", 'null': 'True'}),
'trust_changed': ('canvas.util.UnixTimestampField', [], {'null': 'True', 'blank': 'True'}),
'trusted': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'canvas.usermoderationlog': {
'Meta': {'object_name': 'UserModerationLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderation_log'", 'to': u"orm['auth.User']"})
},
u'canvas.userwarning': {
'Meta': {'object_name': 'UserWarning'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'confirmed': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'custom_message': ('django.db.models.fields.TextField', [], {}),
'disable_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issued': ('canvas.util.UnixTimestampField', [], {}),
'stock_message': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_warnings'", 'to': u"orm['auth.User']"}),
'viewed': ('canvas.util.UnixTimestampField', [], {'default': '0'})
},
u'canvas.welcomeemailrecipient': {
'Meta': {'object_name': 'WelcomeEmailRecipient'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'recipient': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
symmetrical = True
| bsd-3-clause |
sem-geologist/hyperspy | hyperspy/misc/machine_learning/orthomax.py | 7 | 2196 | import numpy as np
def orthomax(A, gamma=1, reltol=1.4901e-07, maxit=256):
# ORTHOMAX Orthogonal rotation of FA or PCA loadings.
# Taken from metpy
d, m = A.shape
B = np.copy(A)
T = np.eye(m)
converged = False
if (0 <= gamma) & (gamma <= 1):
while converged is False:
# Use Lawley and Maxwell's fast version
D = 0
for k in range(1, maxit + 1):
Dold = D
tmp11 = np.sum(np.power(B, 2), axis=0)
tmp1 = np.matrix(np.diag(np.array(tmp11).flatten()))
tmp2 = gamma * B
tmp3 = d * np.power(B, 3)
L, D, M = np.linalg.svd(
np.dot(A.transpose(), tmp3 - np.dot(tmp2, tmp1)))
T = np.dot(L, M)
D = np.sum(np.diag(D))
B = np.dot(A, T)
if np.abs(D - Dold) / D < reltol:
converged = True
break
else:
# Use a sequence of bivariate rotations
for iter in range(1, maxit + 1):
maxTheta = 0
for i in range(0, m - 1):
for j in range(i, m):
Bi = B[:, i]
Bj = B[:, j]
u = np.multiply(Bi, Bi) - np.multiply(Bj, Bj)
v = 2 * np.multiply(Bi, Bj)
usum = u.sum()
vsum = v.sum()
numer = 2 * np.dot(u.transpose(), v) - \
2 * gamma * usum * vsum / d
denom = (np.dot(u.transpose(), u) -
np.dot(v.transpose(), v) -
gamma * (usum ** 2 - vsum ** 2) / d)
theta = np.arctan2(numer, denom) / 4
maxTheta = max(maxTheta, np.abs(theta))
Tij = np.array([[np.cos(theta), -np.sin(theta)],
[np.sin(theta), np.cos(theta)]])
B[:, [i, j]] = np.dot(B[:, [i, j]], Tij)
T[:, [i, j]] = np.dot(T[:, [i, j]], Tij)
if maxTheta < reltol:
converged = True
break
return B, T
| gpl-3.0 |
proversity-org/edx-platform | openedx/core/djangoapps/waffle_utils/models.py | 12 | 2358 | """
Models for configuring waffle utils.
"""
from django.db.models import CharField
from django.utils.translation import ugettext_lazy as _
from model_utils import Choices
from opaque_keys.edx.django.models import CourseKeyField
from six import text_type
from config_models.models import ConfigurationModel
from openedx.core.djangoapps.request_cache.middleware import request_cached
class WaffleFlagCourseOverrideModel(ConfigurationModel):
"""
Used to force a waffle flag on or off for a course.
"""
OVERRIDE_CHOICES = Choices(('on', _('Force On')), ('off', _('Force Off')))
ALL_CHOICES = OVERRIDE_CHOICES + Choices('unset')
KEY_FIELDS = ('waffle_flag', 'course_id')
# The course that these features are attached to.
waffle_flag = CharField(max_length=255, db_index=True)
course_id = CourseKeyField(max_length=255, db_index=True)
override_choice = CharField(choices=OVERRIDE_CHOICES, default=OVERRIDE_CHOICES.on, max_length=3)
@classmethod
@request_cached
def override_value(cls, waffle_flag, course_id):
"""
Returns whether the waffle flag was overridden (on or off) for the
course, or is unset.
Arguments:
waffle_flag (String): The name of the flag.
course_id (CourseKey): The course id for which the flag may have
been overridden.
If the current config is not set or disabled for this waffle flag and
course id, returns ALL_CHOICES.unset.
Otherwise, returns ALL_CHOICES.on or ALL_CHOICES.off as configured for
the override_choice.
"""
if not course_id or not waffle_flag:
return cls.ALL_CHOICES.unset
effective = cls.objects.filter(waffle_flag=waffle_flag, course_id=course_id).order_by('-change_date').first()
if effective and effective.enabled:
return effective.override_choice
return cls.ALL_CHOICES.unset
class Meta(object):
app_label = "waffle_utils"
verbose_name = 'Waffle flag course override'
verbose_name_plural = 'Waffle flag course overrides'
def __unicode__(self):
enabled_label = "Enabled" if self.enabled else "Not Enabled"
# pylint: disable=no-member
return u"Course '{}': Persistent Grades {}".format(text_type(self.course_id), enabled_label)
| agpl-3.0 |
beav/pulp | server/pulp/server/webservices/serialization/dispatch.py | 3 | 2688 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from pulp.server.webservices.serialization.link import link_obj
def task_result_href(task):
if task.get('task_id'):
return {'_href': '/pulp/api/v2/tasks/%s/' % task['task_id']}
return {}
def task_href(call_report):
if call_report.call_request_id is None:
return {}
return {'_href': '/pulp/api/v2/tasks/%s/' % call_report.call_request_id}
def task_group_href(call_report):
if call_report.call_request_group_id is None:
return {}
return {'_href': '/pulp/api/v2/task_groups/%s/' % call_report.call_request_group_id}
def scheduled_unit_management_obj(scheduled_call):
scheduled_call['options'] = scheduled_call['kwargs']['options']
scheduled_call['units'] = scheduled_call['kwargs']['units']
return scheduled_call
def spawned_tasks(task):
"""
For a given Task dictionary convert the spawned tasks list of ids to
a list of link objects
:param task: The dictionary representation of a task object in the database
:type task: dict
"""
spawned_tasks = []
spawned = task.get('spawned_tasks')
if spawned:
for spawned_task_id in spawned:
link = link_obj('/pulp/api/v2/tasks/%s/' % spawned_task_id)
link['task_id'] = spawned_task_id
spawned_tasks.append(link)
return {'spawned_tasks': spawned_tasks}
def task_status(task):
"""
Return serialized version of given TaskStatus document.
:param task_status: Task status document object
:type task_status: pulp.server.db.model.dispatch.TaskStatus
:return: serialized task status
:rtype: dict
"""
task_dict = {}
attributes = ['task_id', 'worker_name', 'tags', 'state', 'error', 'spawned_tasks',
'progress_report', 'task_type', 'start_time', 'finish_time', 'result',
'exception', 'traceback', '_ns']
for attribute in attributes:
task_dict[attribute] = task[attribute]
# This is to preserve backward compatibility for semantic versioning.
task_dict['_id'] = task['id']
task_dict['id'] = str(task['id'])
return task_dict
| gpl-2.0 |
oppia/oppia | core/domain/beam_job_domain_test.py | 2 | 5940 | # coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for core.domain.beam_job_domain."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import datetime
from core.domain import beam_job_domain
from core.platform import models
from core.tests import test_utils
from jobs.batch_jobs import validation_jobs
import utils
(beam_job_models,) = models.Registry.import_models([models.NAMES.beam_job])
class BeamJobTests(test_utils.TestBase):
NOW = datetime.datetime.utcnow()
def test_usage(self):
job = beam_job_domain.BeamJob(validation_jobs.AuditAllStorageModelsJob)
self.assertEqual(job.name, 'AuditAllStorageModelsJob')
self.assertEqual(job.argument_names, [])
def test_in_terminal_state(self):
cancelled_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.CANCELLED.value,
[], self.NOW, self.NOW, True)
drained_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DRAINED.value,
[], self.NOW, self.NOW, True)
updated_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.UPDATED.value,
[], self.NOW, self.NOW, True)
done_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DONE.value,
[], self.NOW, self.NOW, True)
failed_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.FAILED.value,
[], self.NOW, self.NOW, True)
cancelling_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.CANCELLING.value,
[], self.NOW, self.NOW, True)
draining_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DRAINING.value,
[], self.NOW, self.NOW, True)
pending_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.PENDING.value,
[], self.NOW, self.NOW, True)
running_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.RUNNING.value,
[], self.NOW, self.NOW, True)
stopped_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.STOPPED.value,
[], self.NOW, self.NOW, True)
unknown_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.UNKNOWN.value,
[], self.NOW, self.NOW, True)
self.assertTrue(cancelled_beam_job_run.in_terminal_state)
self.assertTrue(drained_beam_job_run.in_terminal_state)
self.assertTrue(updated_beam_job_run.in_terminal_state)
self.assertTrue(done_beam_job_run.in_terminal_state)
self.assertTrue(failed_beam_job_run.in_terminal_state)
self.assertFalse(cancelling_beam_job_run.in_terminal_state)
self.assertFalse(draining_beam_job_run.in_terminal_state)
self.assertFalse(pending_beam_job_run.in_terminal_state)
self.assertFalse(running_beam_job_run.in_terminal_state)
self.assertFalse(stopped_beam_job_run.in_terminal_state)
self.assertFalse(unknown_beam_job_run.in_terminal_state)
def test_to_dict(self):
job = beam_job_domain.BeamJob(validation_jobs.AuditAllStorageModelsJob)
self.assertEqual(job.to_dict(), {
'name': 'AuditAllStorageModelsJob',
'argument_names': [],
})
class BeamJobRunTests(test_utils.TestBase):
NOW = datetime.datetime.utcnow()
def test_usage(self):
run = beam_job_domain.BeamJobRun(
'123', 'FooJob', 'RUNNING', ['abc', 'def'], self.NOW, self.NOW,
True)
self.assertEqual(run.job_id, '123')
self.assertEqual(run.job_name, 'FooJob')
self.assertEqual(run.job_state, 'RUNNING')
self.assertEqual(run.job_arguments, ['abc', 'def'])
self.assertEqual(run.job_started_on, self.NOW)
self.assertEqual(run.job_updated_on, self.NOW)
self.assertTrue(run.job_is_synchronous)
def test_to_dict(self):
run = beam_job_domain.BeamJobRun(
'123', 'FooJob', 'RUNNING', ['abc', 'def'], self.NOW, self.NOW,
True)
self.assertEqual(run.to_dict(), {
'job_id': '123',
'job_name': 'FooJob',
'job_state': 'RUNNING',
'job_arguments': ['abc', 'def'],
'job_started_on_msecs': utils.get_time_in_millisecs(self.NOW),
'job_updated_on_msecs': utils.get_time_in_millisecs(self.NOW),
'job_is_synchronous': True,
})
class AggregateBeamJobRunResultTests(test_utils.TestBase):
def test_usage(self):
result = beam_job_domain.AggregateBeamJobRunResult('abc', '123')
self.assertEqual(result.stdout, 'abc')
self.assertEqual(result.stderr, '123')
def test_to_dict(self):
result = beam_job_domain.AggregateBeamJobRunResult('abc', '123')
self.assertEqual(result.to_dict(), {
'stdout': 'abc',
'stderr': '123',
})
| apache-2.0 |
Universal-Model-Converter/UMC3.0a | data/Python/x86/Lib/site-packages/scipy/sparse/csgraph/_components.py | 8 | 2316 | from __future__ import division, print_function, absolute_import
import numpy as np
from scipy.sparse.sparsetools import cs_graph_components as _cs_graph_components
from scipy.sparse.csr import csr_matrix
from scipy.sparse.base import isspmatrix
_msg0 = 'x must be a symmetric square matrix!'
_msg1 = _msg0 + '(has shape %s)'
def cs_graph_components(x):
"""
Determine connected components of a graph stored as a compressed
sparse row or column matrix.
For speed reasons, the symmetry of the matrix x is not checked. A
nonzero at index `(i, j)` means that node `i` is connected to node
`j` by an edge. The number of rows/columns of the matrix thus
corresponds to the number of nodes in the graph.
Parameters
-----------
x : array_like or sparse matrix, 2 dimensions
The adjacency matrix of the graph. Only the upper triangular part
is used.
Returns
--------
n_comp : int
The number of connected components.
label : ndarray (ints, 1 dimension):
The label array of each connected component (-2 is used to
indicate empty rows in the matrix: 0 everywhere, including
diagonal). This array has the length of the number of nodes,
i.e. one label for each node of the graph. Nodes having the same
label belong to the same connected component.
Notes
------
The matrix is assumed to be symmetric and the upper triangular part
of the matrix is used. The matrix is converted to a CSR matrix unless
it is already a CSR.
Examples
--------
>>> from scipy.sparse.csgraph import connected_components
>>> D = np.eye(4)
>>> D[0,1] = D[1,0] = 1
>>> cs_graph_components(D)
(3, array([0, 0, 1, 2]))
>>> from scipy.sparse import dok_matrix
>>> cs_graph_components(dok_matrix(D))
(3, array([0, 0, 1, 2]))
"""
try:
shape = x.shape
except AttributeError:
raise ValueError(_msg0)
if not ((len(x.shape) == 2) and (x.shape[0] == x.shape[1])):
raise ValueError(_msg1 % x.shape)
if isspmatrix(x):
x = x.tocsr()
else:
x = csr_matrix(x)
label = np.empty((shape[0],), dtype=x.indptr.dtype)
n_comp = _cs_graph_components(shape[0], x.indptr, x.indices, label)
return n_comp, label
| mit |
zouyapeng/horizon_change | openstack_dashboard/dashboards/admin/networks/urls.py | 6 | 2283 | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import include # noqa
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.dashboards.admin.networks.agents \
import views as agent_views
from openstack_dashboard.dashboards.admin.networks.ports \
import urls as port_urls
from openstack_dashboard.dashboards.admin.networks.ports \
import views as port_views
from openstack_dashboard.dashboards.admin.networks.subnets \
import urls as subnet_urls
from openstack_dashboard.dashboards.admin.networks.subnets \
import views as subnet_views
from openstack_dashboard.dashboards.admin.networks import views
NETWORKS = r'^(?P<network_id>[^/]+)/%s$'
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^create/$', views.CreateView.as_view(), name='create'),
url(NETWORKS % 'update', views.UpdateView.as_view(), name='update'),
# for detail view
url(NETWORKS % 'detail', views.DetailView.as_view(), name='detail'),
url(NETWORKS % 'agents/add',
agent_views.AddView.as_view(), name='adddhcpagent'),
url(NETWORKS % 'subnets/create',
subnet_views.CreateView.as_view(), name='addsubnet'),
url(NETWORKS % 'ports/create',
port_views.CreateView.as_view(), name='addport'),
url(r'^(?P<network_id>[^/]+)/subnets/(?P<subnet_id>[^/]+)/update$',
subnet_views.UpdateView.as_view(), name='editsubnet'),
url(r'^(?P<network_id>[^/]+)/ports/(?P<port_id>[^/]+)/update$',
port_views.UpdateView.as_view(), name='editport'),
url(r'^subnets/', include(subnet_urls, namespace='subnets')),
url(r'^ports/', include(port_urls, namespace='ports')))
| apache-2.0 |
EmmanuelJohnson/ssquiz | flask/lib/python2.7/site-packages/sqlalchemy/event/registry.py | 60 | 7786 | # event/registry.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provides managed registration services on behalf of :func:`.listen`
arguments.
By "managed registration", we mean that event listening functions and
other objects can be added to various collections in such a way that their
membership in all those collections can be revoked at once, based on
an equivalent :class:`._EventKey`.
"""
from __future__ import absolute_import
import weakref
import collections
import types
from .. import exc, util
_key_to_collection = collections.defaultdict(dict)
"""
Given an original listen() argument, can locate all
listener collections and the listener fn contained
(target, identifier, fn) -> {
ref(listenercollection) -> ref(listener_fn)
ref(listenercollection) -> ref(listener_fn)
ref(listenercollection) -> ref(listener_fn)
}
"""
_collection_to_key = collections.defaultdict(dict)
"""
Given a _ListenerCollection or _ClsLevelListener, can locate
all the original listen() arguments and the listener fn contained
ref(listenercollection) -> {
ref(listener_fn) -> (target, identifier, fn),
ref(listener_fn) -> (target, identifier, fn),
ref(listener_fn) -> (target, identifier, fn),
}
"""
def _collection_gced(ref):
# defaultdict, so can't get a KeyError
if not _collection_to_key or ref not in _collection_to_key:
return
listener_to_key = _collection_to_key.pop(ref)
for key in listener_to_key.values():
if key in _key_to_collection:
# defaultdict, so can't get a KeyError
dispatch_reg = _key_to_collection[key]
dispatch_reg.pop(ref)
if not dispatch_reg:
_key_to_collection.pop(key)
def _stored_in_collection(event_key, owner):
key = event_key._key
dispatch_reg = _key_to_collection[key]
owner_ref = owner.ref
listen_ref = weakref.ref(event_key._listen_fn)
if owner_ref in dispatch_reg:
return False
dispatch_reg[owner_ref] = listen_ref
listener_to_key = _collection_to_key[owner_ref]
listener_to_key[listen_ref] = key
return True
def _removed_from_collection(event_key, owner):
key = event_key._key
dispatch_reg = _key_to_collection[key]
listen_ref = weakref.ref(event_key._listen_fn)
owner_ref = owner.ref
dispatch_reg.pop(owner_ref, None)
if not dispatch_reg:
del _key_to_collection[key]
if owner_ref in _collection_to_key:
listener_to_key = _collection_to_key[owner_ref]
listener_to_key.pop(listen_ref)
def _stored_in_collection_multi(newowner, oldowner, elements):
if not elements:
return
oldowner = oldowner.ref
newowner = newowner.ref
old_listener_to_key = _collection_to_key[oldowner]
new_listener_to_key = _collection_to_key[newowner]
for listen_fn in elements:
listen_ref = weakref.ref(listen_fn)
key = old_listener_to_key[listen_ref]
dispatch_reg = _key_to_collection[key]
if newowner in dispatch_reg:
assert dispatch_reg[newowner] == listen_ref
else:
dispatch_reg[newowner] = listen_ref
new_listener_to_key[listen_ref] = key
def _clear(owner, elements):
if not elements:
return
owner = owner.ref
listener_to_key = _collection_to_key[owner]
for listen_fn in elements:
listen_ref = weakref.ref(listen_fn)
key = listener_to_key[listen_ref]
dispatch_reg = _key_to_collection[key]
dispatch_reg.pop(owner, None)
if not dispatch_reg:
del _key_to_collection[key]
class _EventKey(object):
"""Represent :func:`.listen` arguments.
"""
__slots__ = (
'target', 'identifier', 'fn', 'fn_key', 'fn_wrap', 'dispatch_target'
)
def __init__(self, target, identifier,
fn, dispatch_target, _fn_wrap=None):
self.target = target
self.identifier = identifier
self.fn = fn
if isinstance(fn, types.MethodType):
self.fn_key = id(fn.__func__), id(fn.__self__)
else:
self.fn_key = id(fn)
self.fn_wrap = _fn_wrap
self.dispatch_target = dispatch_target
@property
def _key(self):
return (id(self.target), self.identifier, self.fn_key)
def with_wrapper(self, fn_wrap):
if fn_wrap is self._listen_fn:
return self
else:
return _EventKey(
self.target,
self.identifier,
self.fn,
self.dispatch_target,
_fn_wrap=fn_wrap
)
def with_dispatch_target(self, dispatch_target):
if dispatch_target is self.dispatch_target:
return self
else:
return _EventKey(
self.target,
self.identifier,
self.fn,
dispatch_target,
_fn_wrap=self.fn_wrap
)
def listen(self, *args, **kw):
once = kw.pop("once", False)
named = kw.pop("named", False)
target, identifier, fn = \
self.dispatch_target, self.identifier, self._listen_fn
dispatch_collection = getattr(target.dispatch, identifier)
adjusted_fn = dispatch_collection._adjust_fn_spec(fn, named)
self = self.with_wrapper(adjusted_fn)
if once:
self.with_wrapper(
util.only_once(self._listen_fn)).listen(*args, **kw)
else:
self.dispatch_target.dispatch._listen(self, *args, **kw)
def remove(self):
key = self._key
if key not in _key_to_collection:
raise exc.InvalidRequestError(
"No listeners found for event %s / %r / %s " %
(self.target, self.identifier, self.fn)
)
dispatch_reg = _key_to_collection.pop(key)
for collection_ref, listener_ref in dispatch_reg.items():
collection = collection_ref()
listener_fn = listener_ref()
if collection is not None and listener_fn is not None:
collection.remove(self.with_wrapper(listener_fn))
def contains(self):
"""Return True if this event key is registered to listen.
"""
return self._key in _key_to_collection
def base_listen(self, propagate=False, insert=False,
named=False):
target, identifier, fn = \
self.dispatch_target, self.identifier, self._listen_fn
dispatch_collection = getattr(target.dispatch, identifier)
if insert:
dispatch_collection.\
for_modify(target.dispatch).insert(self, propagate)
else:
dispatch_collection.\
for_modify(target.dispatch).append(self, propagate)
@property
def _listen_fn(self):
return self.fn_wrap or self.fn
def append_to_list(self, owner, list_):
if _stored_in_collection(self, owner):
list_.append(self._listen_fn)
return True
else:
return False
def remove_from_list(self, owner, list_):
_removed_from_collection(self, owner)
list_.remove(self._listen_fn)
def prepend_to_list(self, owner, list_):
if _stored_in_collection(self, owner):
list_.appendleft(self._listen_fn)
return True
else:
return False
| bsd-3-clause |
Hope6537/hope-tactical-equipment | hope-note-module/hope-python-2.7-note/org/Userinfo.py | 2 | 10471 | # encoding:utf-8
__author__ = 'Hope6537'
class Userinfo(object):
def __init__(self, username, password):
self._username = username
self._password = password
__slots__ = ('_username', '_password', "_score")
# 用tuple定义允许绑定的属性名称
# 使用__slots__要注意,__slots__定义的属性仅对当前类起作用,对继承的子类是不起作用的
def toString(self):
print("Userinfo")
print("username is %s" % self._username)
print("password is %s" % self._password)
# 默认的toString
def __str__(self):
return 'Userinfo object (username: %s)' % self._username
# 有時候不可用 还需要覆盖一个repr
_repr__ = __str__
def getUsername(self):
if hasattr(self, '_username'):
return self._username
return None
# 注意到这个神奇的@property,我们在对实例属性操作的时候,就知道该属性很可能不是直接暴露的,而是通过getter和setter方法来实现的。
# 还可以定义只读属性,只定义getter方法,不定义setter方法就是一个只读属性
class Student(object):
@property
def score(self):
return self._score
@score.setter
def score(self, value):
if not isinstance(value, int):
raise ValueError('score must be an integer!')
if value < 0 or value > 100:
raise ValueError('score must between 0 ~ 100!')
self._score = value
# 当我们取一个不存在的属性的时候,就会抛出一个异常
# 要避免这个错误,除了可以加上一个score属性外,Python还有另一个机制,那就是写一个__getattr__()方法,动态返回一个属性。修改如下:
def __getattr__(self, attr):
if attr == 'money':
return 0
def __init__(self, name):
self._name = name
# 任何类,只需要定义一个__call__()方法,就可以直接对实例进行调用。请看示例:
def __call__(self):
print('My name is %s.' % self._name)
class Fib(object):
# 定制类
# 如果一个类想被用于for ... in循环,类似list或tuple那样,就必须实现一个__iter__()方法,该方法返回一个迭代对象,
# 然后,Python的for循环就会不断调用该迭代对象的next()方法拿到循环的下一个值,直到遇到StopIteration错误时退出循环。
def __init__(self):
self.a, self.b = 0, 1 # 初始化两个计数器a,b
def __iter__(self):
return self # 实例本身就是迭代对象,故返回自己
# 覆盖写入next
def next(self):
self.a, self.b = self.b, self.a + self.b # 计算下一个值
if self.a > 100000: # 退出循环的条件
raise StopIteration()
return self.a # 返回下一个值
# def __getitem__(self, n):
# a, b = 1, 1
# for x in range(n):
# a, b = b, a + b
# return a
# 可以使用切片的方法
def __getitem__(self, n):
# 如果要的只是一個数
if isinstance(n, int):
a, b = 1, 1
for x in range(n):
a, b = b, a + b
return a
# 当传入的是一个切片的时候
if isinstance(n, slice):
start = n.start
stop = n.stop
a, b = 1, 1
L = []
for x in range(stop):
if x >= start:
# 在list中添加数据
L.append(a)
a, b = b, a + b
return L
# 也没有对负数作处理,所以,要正确实现一个__getitem__()还是有很多工作要做的。
for n in Fib():
print n
# 但是iter只是可以迭代而已
# 要表现得像list那样按照下标取出元素,需要实现__getitem__()方法:
# 与之对应的是__setitem__()方法,把对象视作list或dict来对集合赋值。最后,还有一个__delitem__()方法,用于删除某个元素。
print(Fib()[3])
print(Fib()[0:3])
s = Student("user")
print(s.money) # result 0
class Chain(object):
def __init__(self, path=''):
self._path = path
def __getattr__(self, path):
return Chain('%s/%s' % (self._path, path))
def __str__(self):
return self._path
c = Chain().status.user.timeline.list
print(c)
# c = Chain().users('michael').repos
# print(c)
s = Student('Michael')
s()
print(callable(s))
# 要创建一个class对象,type()函数依次传入3个参数:
def fn(self, name='world'): # 先定义函数
print('Hello, %s.' % name)
# class的名称;
# 继承的父类集合,注意Python支持多重继承,如果只有一个父类,别忘了tuple的单元素写法;
# class的方法名称与函数绑定,这里我们把函数fn绑定到方法名hello上。
# 通过type()函数创建的类和直接写class是完全一样的,因为Python解释器遇到class定义时,仅仅是扫描一下class定义的语法,然后调用type()函数创建出cla
Hello = type('Hello', (object,), dict(hello=fn)) # 创建Hello class
h = Hello()
h.hello()
# 如果我们想创建出类呢?那就必须根据metaclass创建出类,所以:先定义metaclass,然后创建类。
# 先定义metaclass,就可以创建类,最后创建实例。
# metaclass是创建类,所以必须从`type`类型派生:
class ListMetaclass(type): # 继承与type
# new方法接收到的参数依次是:
# 当前准备创建的类的对象;
# 类的名字;
# 类继承的父类集合;
# 类的方法集合。
def __new__(cls, name, bases, attrs): # 使用默認__new__函數
attrs['add'] = lambda self, value: self.append(value)
return type.__new__(cls, name, bases, attrs)
# 添加的attr是要绑定的参数和方法
class MyList(list):
__metaclass__ = ListMetaclass # 指示使用ListMetaclass来定制类
# 它指示Python解释器在创建MyList时,要通过ListMetaclass.__new__()来创建,在此,我们可以修改类的定义,比如,加上新的方法,然后,返回修改后的定义。
L = MyList()
L.add(1)
print(L)
# 直接在MyList定义中写上add()方法不是更简单吗?正常情况下,确实应该直接写,通过metaclass修改纯属变态
# 但是,总会遇到需要通过metaclass修改类定义的。ORM就是一个典型的例子。
# ORM全称“Object Relational Mapping”,即对象-关系映射,就是把关系数据库的一行映射为一个对象,也就是一个类对应一个表,
# 这样,写代码更简单,不用直接操作SQL语句
# 要编写一个ORM框架,所有的类都只能动态定义,因为只有使用者才能根据表的结构定义出对应的类来
# 让我们来尝试编写一个ORM框架。
# 编写底层模块的第一步,就是先把调用接口写出来。比如,使用者如果使用这个ORM框架,
# 想定义一个User类来操作对应的数据库表User,我们期待他写出这样的代码:
# 首先来定义Field类,它负责保存数据库表的字段名和字段类型:
class Field(object):
def __init__(self, name, column_type):
self.name = name
self.column_type = column_type
def __str__(self):
return '<%s:%s>' % (self.__class__.__name__, self.name)
# 在Field的基础上,分别添加根據不同类型定义的field
class StringField(Field):
def __init__(self, name):
super(StringField, self).__init__(name, 'varchar(100)')
class IntegerField(Field):
def __init__(self, name):
super(IntegerField, self).__init__(name, 'bigint')
# 就是编写最复杂的ModelMetaclass了:
# 在ModelMetaclass中,一共做了几件事情:
# 排除掉对Model类的修改;
# 在当前类(比如User)中查找定义的类的所有属性,如果找到一个Field属性,就把它保存到一个__mappings__的dict中,
# 同时从类属性中删除该Field属性,否则,容易造成运行时错误;
# 把表名保存到__table__中,这里简化为表名默认为类名。
class ModelMetaclass(type):
def __new__(cls, name, bases, attrs):
if name == 'Model':
return type.__new__(cls, name, bases, attrs)
mappings = dict()
for k, v in attrs.iteritems():
if isinstance(v, Field): # 转移Field属性
print('Found mapping: %s==>%s' % (k, v))
mappings[k] = v
for k in mappings.iterkeys():
attrs.pop(k)
attrs['__table__'] = name # 假设表名和类名一致
attrs['__mappings__'] = mappings # 保存属性和列的映射关系
return type.__new__(cls, name, bases, attrs)
# 当用户定义一个class User(Model)时,
# Python解释器首先在当前类User的定义中查找__metaclass__,
# 如果没有找到,就继续在父类Model中查找__metaclass__,
# 找到了,就使用Model中定义的__metaclass__的ModelMetaclass来创建User类,
# 也就是说,metaclass可以隐式地继承到子类,但子类自己却感觉不到
# 在Model类中,就可以定义各种操作数据库的方法,比如save(),delete(),find(),update等等。
class Model(dict):
__metaclass__ = ModelMetaclass
def __init__(self, **kw):
super(Model, self).__init__(**kw)
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Model' object has no attribute '%s'" % key)
def __setattr__(self, key, value):
self[key] = value
def save(self):
fields = []
params = []
args = []
for k, v in self.__mappings__.iteritems():
fields.append(v.name)
params.append('?')
args.append(getattr(self, k, None))
sql = 'insert into %s (%s) values (%s)' % (self.__table__, ','.join(fields), ','.join(params))
print('SQL: %s' % sql)
print('ARGS: %s' % str(args))
class User(Model):
# 定义类的属性到列的映射:
id = IntegerField('id')
name = StringField('username')
email = StringField('email')
password = StringField('password')
# 创建一个实例:
u = User(id=12345, name='Michael', email='test@orm.org', password='my-pwd')
# 保存到数据库:
u.save()
# 因此,在编写程序的时候,千万不要把实例属性和类属性使用相同的名字。
| apache-2.0 |
arjankroontolsma/post2 | tests/test_rewrite.py | 12 | 5005 | import os
import os.path
import tempfile
import shutil
import subprocess
import imp
from nose.tools import eq_
class BaseRewriteScript(object):
def __init__(self):
info = imp.find_module('runner', ['lib/build_pack_utils'])
self.run = imp.load_module('runner', *info)
def setUp(self):
self.rewrite = os.path.abspath("bin/rewrite")
self.env = {'PYTHONPATH': os.path.abspath('lib')}
self.env.update(os.environ)
# setup config
self.cfg_dir = tempfile.mkdtemp(prefix='config-')
os.rmdir(self.cfg_dir)
# setup directory to run from
self.run_dir = tempfile.mkdtemp(prefix='run-')
os.makedirs(os.path.join(self.run_dir, 'logs'))
os.makedirs(os.path.join(self.run_dir, 'bin'))
def tearDown(self):
if os.path.exists(self.cfg_dir):
shutil.rmtree(self.cfg_dir)
if os.path.exists(self.run_dir):
shutil.rmtree(self.run_dir)
class TestRewriteScriptPhp(BaseRewriteScript):
def __init__(self):
BaseRewriteScript.__init__(self)
def setUp(self):
BaseRewriteScript.setUp(self)
shutil.copytree('defaults/config/php/5.5.x', self.cfg_dir)
def tearDown(self):
BaseRewriteScript.tearDown(self)
def test_rewrite_no_args(self):
try:
self.run.check_output(self.rewrite,
cwd=self.run_dir,
env=self.env,
stderr=subprocess.STDOUT,
shell=True)
assert False
except self.run.CalledProcessError, e:
eq_('Argument required! Specify path to configuration '
'directory.\n', e.output)
eq_(255, e.returncode)
def test_rewrite_arg_file(self):
cfg_file = os.path.join(self.cfg_dir, 'php.ini')
res = self.run.check_output("%s %s" % (self.rewrite, cfg_file),
env=self.env,
cwd=self.run_dir,
stderr=subprocess.STDOUT,
shell=True)
eq_('', res)
with open(os.path.join(self.cfg_dir, 'php.ini')) as fin:
cfgFile = fin.read()
eq_(-1, cfgFile.find('@{HOME}'))
eq_(-1, cfgFile.find('@{TMPDIR}'))
def test_rewrite_arg_dir(self):
res = self.run.check_output("%s %s" % (self.rewrite, self.cfg_dir),
env=self.env,
cwd=self.run_dir,
stderr=subprocess.STDOUT,
shell=True)
eq_('', res)
with open(os.path.join(self.cfg_dir, 'php.ini')) as fin:
cfgFile = fin.read()
eq_(-1, cfgFile.find('@{HOME}'))
eq_(-1, cfgFile.find('@{TMPDIR}'))
with open(os.path.join(self.cfg_dir, 'php-fpm.conf')) as fin:
cfgFile = fin.read()
eq_(-1, cfgFile.find('@{HOME}'))
eq_(-1, cfgFile.find('@{TMPDIR}'))
eq_(True, cfgFile.find('www@my.domain.com') >= 0)
class TestRewriteScriptWithHttpd(BaseRewriteScript):
def __init__(self):
BaseRewriteScript.__init__(self)
def setUp(self):
BaseRewriteScript.setUp(self)
shutil.copytree('defaults/config/httpd', self.cfg_dir)
def tearDown(self):
BaseRewriteScript.tearDown(self)
def test_rewrite_with_sub_dirs(self):
res = self.run.check_output("%s %s" % (self.rewrite, self.cfg_dir),
env=self.env,
cwd=self.run_dir,
stderr=subprocess.STDOUT,
shell=True)
eq_('', res)
for root, dirs, files in os.walk(self.cfg_dir):
for f in files:
with open(os.path.join(root, f)) as fin:
eq_(-1, fin.read().find('@{'))
class TestRewriteScriptWithNginx(BaseRewriteScript):
def __init__(self):
BaseRewriteScript.__init__(self)
def setUp(self):
BaseRewriteScript.setUp(self)
self.env = {'PYTHONPATH': os.path.abspath('lib'),
'PORT': '80'}
self.env.update(os.environ)
shutil.copytree('defaults/config/nginx', self.cfg_dir)
def tearDown(self):
BaseRewriteScript.tearDown(self)
def test_rewrite(self):
res = self.run.check_output("%s %s" % (self.rewrite, self.cfg_dir),
env=self.env,
cwd=self.run_dir,
stderr=subprocess.STDOUT,
shell=True)
eq_('', res)
for root, dirs, files in os.walk(self.cfg_dir):
for f in files:
with open(os.path.join(root, f)) as fin:
eq_(-1, fin.read().find('@{'), f)
| apache-2.0 |
nmayorov/scikit-learn | benchmarks/bench_plot_lasso_path.py | 301 | 4003 | """Benchmarks of Lasso regularization path computation using Lars and CD
The input data is mostly low rank but is a fat infinite tail.
"""
from __future__ import print_function
from collections import defaultdict
import gc
import sys
from time import time
import numpy as np
from sklearn.linear_model import lars_path
from sklearn.linear_model import lasso_path
from sklearn.datasets.samples_generator import make_regression
def compute_bench(samples_range, features_range):
it = 0
results = defaultdict(lambda: [])
max_it = len(samples_range) * len(features_range)
for n_samples in samples_range:
for n_features in features_range:
it += 1
print('====================')
print('Iteration %03d of %03d' % (it, max_it))
print('====================')
dataset_kwargs = {
'n_samples': n_samples,
'n_features': n_features,
'n_informative': n_features / 10,
'effective_rank': min(n_samples, n_features) / 10,
#'effective_rank': None,
'bias': 0.0,
}
print("n_samples: %d" % n_samples)
print("n_features: %d" % n_features)
X, y = make_regression(**dataset_kwargs)
gc.collect()
print("benchmarking lars_path (with Gram):", end='')
sys.stdout.flush()
tstart = time()
G = np.dot(X.T, X) # precomputed Gram matrix
Xy = np.dot(X.T, y)
lars_path(X, y, Xy=Xy, Gram=G, method='lasso')
delta = time() - tstart
print("%0.3fs" % delta)
results['lars_path (with Gram)'].append(delta)
gc.collect()
print("benchmarking lars_path (without Gram):", end='')
sys.stdout.flush()
tstart = time()
lars_path(X, y, method='lasso')
delta = time() - tstart
print("%0.3fs" % delta)
results['lars_path (without Gram)'].append(delta)
gc.collect()
print("benchmarking lasso_path (with Gram):", end='')
sys.stdout.flush()
tstart = time()
lasso_path(X, y, precompute=True)
delta = time() - tstart
print("%0.3fs" % delta)
results['lasso_path (with Gram)'].append(delta)
gc.collect()
print("benchmarking lasso_path (without Gram):", end='')
sys.stdout.flush()
tstart = time()
lasso_path(X, y, precompute=False)
delta = time() - tstart
print("%0.3fs" % delta)
results['lasso_path (without Gram)'].append(delta)
return results
if __name__ == '__main__':
from mpl_toolkits.mplot3d import axes3d # register the 3d projection
import matplotlib.pyplot as plt
samples_range = np.linspace(10, 2000, 5).astype(np.int)
features_range = np.linspace(10, 2000, 5).astype(np.int)
results = compute_bench(samples_range, features_range)
max_time = max(max(t) for t in results.values())
fig = plt.figure('scikit-learn Lasso path benchmark results')
i = 1
for c, (label, timings) in zip('bcry', sorted(results.items())):
ax = fig.add_subplot(2, 2, i, projection='3d')
X, Y = np.meshgrid(samples_range, features_range)
Z = np.asarray(timings).reshape(samples_range.shape[0],
features_range.shape[0])
# plot the actual surface
ax.plot_surface(X, Y, Z.T, cstride=1, rstride=1, color=c, alpha=0.8)
# dummy point plot to stick the legend to since surface plot do not
# support legends (yet?)
#ax.plot([1], [1], [1], color=c, label=label)
ax.set_xlabel('n_samples')
ax.set_ylabel('n_features')
ax.set_zlabel('Time (s)')
ax.set_zlim3d(0.0, max_time * 1.1)
ax.set_title(label)
#ax.legend()
i += 1
plt.show()
| bsd-3-clause |
lopezpdvn/pysyspol | pysyspol/medasys/__init__.py | 1 | 5726 | import sys
import os
import json
import logging
import re
import datetime as dt
from os import access, R_OK, walk, remove
from os.path import join, sep
from itertools import chain
from subprocess import Popen
from pysyspol.util import get_script_name
import timeman
MODE_UPDATE = 'update'
MODE_RETRIEVE = 'retrieve'
def get_core_resources(core_resources_fp):
with open(core_resources_fp) as coref:
return json.load(coref)
def logging_config(prgname=None, msgfmt='[{0}]: %(message)s',
level=logging.INFO):
'''Deprecated'''
print('Function `pysyspol.medasys.logging_config` is deprecated',
file=sys.stderr)
prgname = get_script_name() if not prgname else prgname
logging.basicConfig(format=msgfmt.format(prgname), level=level)
def get_tagged_resources(resources, tags=()):
return (resource for tag in tags for resource in resources
if tag in resource['tags'])
def validate_paths(resources, resources_fp):
valid = True
for resource in resources:
for path in resource['path']:
path = join(resources_fp, path)
if not access(path, R_OK):
logging.error('Resource `{}` not readable'.format(path))
valid = False
if valid:
logging.info('All resources exist and readable')
else:
logging.error('Some errors happened trying to read resources')
return valid
def get_valid_tags(tags_fp):
with open(tags_fp) as tagsf:
tag_entries = json.load(tagsf)
return chain.from_iterable(tag['id'] for tag in tag_entries)
def validate_tags(resources, tags_fp):
valid_tags = tuple(get_valid_tags(tags_fp))
valid = True
for resource in resources:
for tag in resource['tags']:
if tag not in valid_tags:
logging.error('Tag `{}` of resource `{}` not valid'.format(
tag, resource['path']))
valid = False
if valid:
logging.info('All tags are valid')
else:
logging.error('Some errors happened trying to validate tags')
return valid
def get_tags(resources):
return sorted(set(chain.from_iterable(i['tags'] for i in resources)))
def get_all_resources(resources_fp, approot_fp):
for relpath, dirs, files in walk(resources_fp):
if relpath == approot_fp:
dirs[:] = []
continue
for f in files:
yield join(relpath, f).replace(resources_fp, '').strip(sep)
def get_matched_resource(resource_path, core_resources, resources_path):
matches = []
resource_path = resource_path.lower()
i = 0
for resource in core_resources:
for path in resource['path']:
if resource_path in path.lower():
matches.append((i, resource))
i += 1
if len(matches) > 1:
raise ValueError(
'len(matches) > 1, can only edit one resource at a time')
if len(matches) == 1:
return matches[0]
matches = []
for relpath, dirs, files in walk(resources_path):
if matches:
break
for fname in files:
if resource_path in fname.lower():
matches.append((-1, join(relpath, fname)))
break
if not matches:
raise ValueError('No matched core resource')
return matches[0]
def update_resource_tags(tagging_resource_path, core_resources_path,
core_resources, resources_path):
with open(tagging_resource_path) as tagging_resource_f:
tagging_resource = json.load(tagging_resource_f)
tagging_resource['tags'] = sorted(set(
tag for selected, tag in tagging_resource['tags'] if selected))
i = get_matched_resource(tagging_resource['path'][0], core_resources,
resources_path)[0]
core_resources[i] = tagging_resource
with open(core_resources_path, 'w') as f:
json.dump(core_resources, f, indent=2, sort_keys=True)
logging.info('Printed `{}`'.format(core_resources_path))
remove(tagging_resource_path)
logging.info('Removed `{}`'.format(tagging_resource_path))
def get_mode(tagging_resource_path):
try:
with open(tagging_resource_path) as tagging_resource_f:
json.load(tagging_resource_f)
return MODE_UPDATE
except FileNotFoundError:
return MODE_RETRIEVE
def add_core_resource(new_resource_fp, core_resources, resources_path,
core_resources_path, core_resource_schema_path,
datetime_fmt=timeman.DEFAULT_DATETIME_FMT):
with open(core_resource_schema_path) as core_schema_f:
new_core_res = json.load(core_schema_f)
relative_new_resource_fp = os.path.relpath(new_resource_fp, resources_path)
new_core_res['path'].append(relative_new_resource_fp)
new_core_res['datetime'] = dt.datetime.today().strftime(datetime_fmt)
core_resources.append(new_core_res)
with open(core_resources_path, 'w') as f:
json.dump(core_resources, f, indent=2, sort_keys=True)
msg = 'Added core resource with path `{}` to database. Run again to edit tags'
logging.info(msg.format(relative_new_resource_fp))
def list_grepped_resources(resources, patterns):
for resource in get_grepped_resources(resources, patterns):
for path in resource['path']:
print(path)
def get_grepped_resources(resources, patterns,
grep_cmd=('grep', '-i', '-q', '-E')):
pattern = patterns[0]
for resource in resources:
for path in resource['path']:
cmd = (*grep_cmd, pattern, path)
with Popen(cmd) as proc:
proc.wait()
if proc.returncode:
continue
yield resource
break
| mit |
RudoCris/horizon | openstack_dashboard/dashboards/admin/instances/views.py | 6 | 7755 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tables
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard.dashboards.admin.instances \
import forms as project_forms
from openstack_dashboard.dashboards.admin.instances \
import tables as project_tables
from openstack_dashboard.dashboards.project.instances import views
from openstack_dashboard.dashboards.project.instances.workflows \
import update_instance
# re-use console from project.instances.views to make reflection work
def console(args, **kvargs):
return views.console(args, **kvargs)
# re-use vnc from project.instances.views to make reflection work
def vnc(args, **kvargs):
return views.vnc(args, **kvargs)
# re-use spice from project.instances.views to make reflection work
def spice(args, **kvargs):
return views.spice(args, **kvargs)
# re-use rdp from project.instances.views to make reflection work
def rdp(args, **kvargs):
return views.rdp(args, **kvargs)
class AdminUpdateView(views.UpdateView):
workflow_class = update_instance.AdminUpdateInstance
success_url = reverse_lazy("horizon:admin:instances:index")
class AdminIndexView(tables.DataTableView):
table_class = project_tables.AdminInstancesTable
template_name = 'admin/instances/index.html'
page_title = _("Instances")
def has_more_data(self, table):
return self._more
def get_data(self):
instances = []
marker = self.request.GET.get(
project_tables.AdminInstancesTable._meta.pagination_param, None)
search_opts = self.get_filters({'marker': marker, 'paginate': True})
# Gather our tenants to correlate against IDs
try:
tenants, has_more = api.keystone.tenant_list(self.request)
except Exception:
tenants = []
msg = _('Unable to retrieve instance project information.')
exceptions.handle(self.request, msg)
if 'project' in search_opts:
ten_filter_ids = [t.id for t in tenants
if t.name == search_opts['project']]
del search_opts['project']
if len(ten_filter_ids) > 0:
search_opts['tenant_id'] = ten_filter_ids[0]
else:
self._more = False
return []
try:
instances, self._more = api.nova.server_list(
self.request,
search_opts=search_opts,
all_tenants=True)
except Exception:
self._more = False
exceptions.handle(self.request,
_('Unable to retrieve instance list.'))
if instances:
try:
api.network.servers_update_addresses(self.request, instances,
all_tenants=True)
except Exception:
exceptions.handle(
self.request,
message=_('Unable to retrieve IP addresses from Neutron.'),
ignore=True)
# Gather our flavors to correlate against IDs
try:
flavors = api.nova.flavor_list(self.request)
except Exception:
# If fails to retrieve flavor list, creates an empty list.
flavors = []
full_flavors = SortedDict([(f.id, f) for f in flavors])
tenant_dict = SortedDict([(t.id, t) for t in tenants])
# Loop through instances to get flavor and tenant info.
for inst in instances:
flavor_id = inst.flavor["id"]
try:
if flavor_id in full_flavors:
inst.full_flavor = full_flavors[flavor_id]
else:
# If the flavor_id is not in full_flavors list,
# gets it via nova api.
inst.full_flavor = api.nova.flavor_get(
self.request, flavor_id)
except Exception:
msg = _('Unable to retrieve instance size information.')
exceptions.handle(self.request, msg)
tenant = tenant_dict.get(inst.tenant_id, None)
inst.tenant_name = getattr(tenant, "name", None)
return instances
def get_filters(self, filters):
filter_field = self.table.get_filter_field()
filter_action = self.table._meta._filter_action
if filter_action.is_api_filter(filter_field):
filter_string = self.table.get_filter_string()
if filter_field and filter_string:
filters[filter_field] = filter_string
return filters
class LiveMigrateView(forms.ModalFormView):
form_class = project_forms.LiveMigrateForm
template_name = 'admin/instances/live_migrate.html'
context_object_name = 'instance'
success_url = reverse_lazy("horizon:admin:instances:index")
page_title = _("Live Migrate")
def get_context_data(self, **kwargs):
context = super(LiveMigrateView, self).get_context_data(**kwargs)
context["instance_id"] = self.kwargs['instance_id']
return context
@memoized.memoized_method
def get_hosts(self, *args, **kwargs):
try:
return api.nova.host_list(self.request)
except Exception:
redirect = reverse("horizon:admin:instances:index")
msg = _('Unable to retrieve host information.')
exceptions.handle(self.request, msg, redirect=redirect)
@memoized.memoized_method
def get_object(self, *args, **kwargs):
instance_id = self.kwargs['instance_id']
try:
return api.nova.server_get(self.request, instance_id)
except Exception:
redirect = reverse("horizon:admin:instances:index")
msg = _('Unable to retrieve instance details.')
exceptions.handle(self.request, msg, redirect=redirect)
def get_initial(self):
initial = super(LiveMigrateView, self).get_initial()
_object = self.get_object()
if _object:
current_host = getattr(_object, 'OS-EXT-SRV-ATTR:host', '')
initial.update({'instance_id': self.kwargs['instance_id'],
'current_host': current_host,
'hosts': self.get_hosts()})
return initial
class DetailView(views.DetailView):
redirect_url = 'horizon:admin:instances:index'
image_url = 'horizon:admin:images:detail'
def _get_actions(self, instance):
table = project_tables.AdminInstancesTable(self.request)
return table.render_row_actions(instance)
| apache-2.0 |
marktwtn/gdb | gdb/python/lib/gdb/command/explore.py | 137 | 26824 | # GDB 'explore' command.
# Copyright (C) 2012-2013 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Implementation of the GDB 'explore' command using the GDB Python API."""
import gdb
import sys
if sys.version_info[0] > 2:
# Python 3 renamed raw_input to input
raw_input = input
class Explorer(object):
"""Internal class which invokes other explorers."""
# This map is filled by the Explorer.init_env() function
type_code_to_explorer_map = { }
_SCALAR_TYPE_LIST = (
gdb.TYPE_CODE_CHAR,
gdb.TYPE_CODE_INT,
gdb.TYPE_CODE_BOOL,
gdb.TYPE_CODE_FLT,
gdb.TYPE_CODE_VOID,
gdb.TYPE_CODE_ENUM,
)
@staticmethod
def guard_expr(expr):
length = len(expr)
guard = False
if expr[0] == '(' and expr[length-1] == ')':
pass
else:
i = 0
while i < length:
c = expr[i]
if (c == '_' or ('a' <= c and c <= 'z') or
('A' <= c and c <= 'Z') or ('0' <= c and c <= '9')):
pass
else:
guard = True
break
i += 1
if guard:
return "(" + expr + ")"
else:
return expr
@staticmethod
def explore_expr(expr, value, is_child):
"""Main function to explore an expression value.
Arguments:
expr: The expression string that is being explored.
value: The gdb.Value value of the expression.
is_child: Boolean value to indicate if the expression is a child.
An expression is a child if it is derived from the main
expression entered by the user. For example, if the user
entered an expression which evaluates to a struct, then
when exploring the fields of the struct, is_child is set
to True internally.
Returns:
No return value.
"""
type_code = value.type.code
if type_code in Explorer.type_code_to_explorer_map:
explorer_class = Explorer.type_code_to_explorer_map[type_code]
while explorer_class.explore_expr(expr, value, is_child):
pass
else:
print ("Explorer for type '%s' not yet available.\n" %
str(value.type))
@staticmethod
def explore_type(name, datatype, is_child):
"""Main function to explore a data type.
Arguments:
name: The string representing the path to the data type being
explored.
datatype: The gdb.Type value of the data type being explored.
is_child: Boolean value to indicate if the name is a child.
A name is a child if it is derived from the main name
entered by the user. For example, if the user entered
the name of struct type, then when exploring the fields
of the struct, is_child is set to True internally.
Returns:
No return value.
"""
type_code = datatype.code
if type_code in Explorer.type_code_to_explorer_map:
explorer_class = Explorer.type_code_to_explorer_map[type_code]
while explorer_class.explore_type(name, datatype, is_child):
pass
else:
print ("Explorer for type '%s' not yet available.\n" %
str(datatype))
@staticmethod
def init_env():
"""Initializes the Explorer environment.
This function should be invoked before starting any exploration. If
invoked before an exploration, it need not be invoked for subsequent
explorations.
"""
Explorer.type_code_to_explorer_map = {
gdb.TYPE_CODE_CHAR : ScalarExplorer,
gdb.TYPE_CODE_INT : ScalarExplorer,
gdb.TYPE_CODE_BOOL : ScalarExplorer,
gdb.TYPE_CODE_FLT : ScalarExplorer,
gdb.TYPE_CODE_VOID : ScalarExplorer,
gdb.TYPE_CODE_ENUM : ScalarExplorer,
gdb.TYPE_CODE_STRUCT : CompoundExplorer,
gdb.TYPE_CODE_UNION : CompoundExplorer,
gdb.TYPE_CODE_PTR : PointerExplorer,
gdb.TYPE_CODE_REF : ReferenceExplorer,
gdb.TYPE_CODE_TYPEDEF : TypedefExplorer,
gdb.TYPE_CODE_ARRAY : ArrayExplorer
}
@staticmethod
def is_scalar_type(type):
"""Checks whether a type is a scalar type.
A type is a scalar type of its type is
gdb.TYPE_CODE_CHAR or
gdb.TYPE_CODE_INT or
gdb.TYPE_CODE_BOOL or
gdb.TYPE_CODE_FLT or
gdb.TYPE_CODE_VOID or
gdb.TYPE_CODE_ENUM.
Arguments:
type: The type to be checked.
Returns:
'True' if 'type' is a scalar type. 'False' otherwise.
"""
return type.code in Explorer._SCALAR_TYPE_LIST
@staticmethod
def return_to_parent_value():
"""A utility function which prints that the current exploration session
is returning to the parent value. Useful when exploring values.
"""
print ("\nReturning to parent value...\n")
@staticmethod
def return_to_parent_value_prompt():
"""A utility function which prompts the user to press the 'enter' key
so that the exploration session can shift back to the parent value.
Useful when exploring values.
"""
raw_input("\nPress enter to return to parent value: ")
@staticmethod
def return_to_enclosing_type():
"""A utility function which prints that the current exploration session
is returning to the enclosing type. Useful when exploring types.
"""
print ("\nReturning to enclosing type...\n")
@staticmethod
def return_to_enclosing_type_prompt():
"""A utility function which prompts the user to press the 'enter' key
so that the exploration session can shift back to the enclosing type.
Useful when exploring types.
"""
raw_input("\nPress enter to return to enclosing type: ")
class ScalarExplorer(object):
"""Internal class used to explore scalar values."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore scalar values.
See Explorer.explore_expr and Explorer.is_scalar_type for more
information.
"""
print ("'%s' is a scalar value of type '%s'." %
(expr, value.type))
print ("%s = %s" % (expr, str(value)))
if is_child:
Explorer.return_to_parent_value_prompt()
Explorer.return_to_parent_value()
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore scalar types.
See Explorer.explore_type and Explorer.is_scalar_type for more
information.
"""
if datatype.code == gdb.TYPE_CODE_ENUM:
if is_child:
print ("%s is of an enumerated type '%s'." %
(name, str(datatype)))
else:
print ("'%s' is an enumerated type." % name)
else:
if is_child:
print ("%s is of a scalar type '%s'." %
(name, str(datatype)))
else:
print ("'%s' is a scalar type." % name)
if is_child:
Explorer.return_to_enclosing_type_prompt()
Explorer.return_to_enclosing_type()
return False
class PointerExplorer(object):
"""Internal class used to explore pointer values."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore pointer values.
See Explorer.explore_expr for more information.
"""
print ("'%s' is a pointer to a value of type '%s'" %
(expr, str(value.type.target())))
option = raw_input("Continue exploring it as a pointer to a single "
"value [y/n]: ")
if option == "y":
deref_value = None
try:
deref_value = value.dereference()
str(deref_value)
except gdb.MemoryError:
print ("'%s' a pointer pointing to an invalid memory "
"location." % expr)
if is_child:
Explorer.return_to_parent_value_prompt()
return False
Explorer.explore_expr("*%s" % Explorer.guard_expr(expr),
deref_value, is_child)
return False
option = raw_input("Continue exploring it as a pointer to an "
"array [y/n]: ")
if option == "y":
while True:
index = 0
try:
index = int(raw_input("Enter the index of the element you "
"want to explore in '%s': " % expr))
except ValueError:
break
element_expr = "%s[%d]" % (Explorer.guard_expr(expr), index)
element = value[index]
try:
str(element)
except gdb.MemoryError:
print ("Cannot read value at index %d." % index)
continue
Explorer.explore_expr(element_expr, element, True)
return False
if is_child:
Explorer.return_to_parent_value()
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore pointer types.
See Explorer.explore_type for more information.
"""
target_type = datatype.target()
print ("\n%s is a pointer to a value of type '%s'." %
(name, str(target_type)))
Explorer.explore_type("the pointee type of %s" % name,
target_type,
is_child)
return False
class ReferenceExplorer(object):
"""Internal class used to explore reference (TYPE_CODE_REF) values."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore array values.
See Explorer.explore_expr for more information.
"""
referenced_value = value.referenced_value()
Explorer.explore_expr(expr, referenced_value, is_child)
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore pointer types.
See Explorer.explore_type for more information.
"""
target_type = datatype.target()
Explorer.explore_type(name, target_type, is_child)
return False
class ArrayExplorer(object):
"""Internal class used to explore arrays."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore array values.
See Explorer.explore_expr for more information.
"""
target_type = value.type.target()
print ("'%s' is an array of '%s'." % (expr, str(target_type)))
index = 0
try:
index = int(raw_input("Enter the index of the element you want to "
"explore in '%s': " % expr))
except ValueError:
if is_child:
Explorer.return_to_parent_value()
return False
element = None
try:
element = value[index]
str(element)
except gdb.MemoryError:
print ("Cannot read value at index %d." % index)
raw_input("Press enter to continue... ")
return True
Explorer.explore_expr("%s[%d]" % (Explorer.guard_expr(expr), index),
element, True)
return True
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore array types.
See Explorer.explore_type for more information.
"""
target_type = datatype.target()
print ("%s is an array of '%s'." % (name, str(target_type)))
Explorer.explore_type("the array element of %s" % name, target_type,
is_child)
return False
class CompoundExplorer(object):
"""Internal class used to explore struct, classes and unions."""
@staticmethod
def _print_fields(print_list):
"""Internal function which prints the fields of a struct/class/union.
"""
max_field_name_length = 0
for pair in print_list:
if max_field_name_length < len(pair[0]):
max_field_name_length = len(pair[0])
for pair in print_list:
print (" %*s = %s" % (max_field_name_length, pair[0], pair[1]))
@staticmethod
def _get_real_field_count(fields):
real_field_count = 0;
for field in fields:
if not field.artificial:
real_field_count = real_field_count + 1
return real_field_count
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore structs/classes and union values.
See Explorer.explore_expr for more information.
"""
datatype = value.type
type_code = datatype.code
fields = datatype.fields()
if type_code == gdb.TYPE_CODE_STRUCT:
type_desc = "struct/class"
else:
type_desc = "union"
if CompoundExplorer._get_real_field_count(fields) == 0:
print ("The value of '%s' is a %s of type '%s' with no fields." %
(expr, type_desc, str(value.type)))
if is_child:
Explorer.return_to_parent_value_prompt()
return False
print ("The value of '%s' is a %s of type '%s' with the following "
"fields:\n" % (expr, type_desc, str(value.type)))
has_explorable_fields = False
choice_to_compound_field_map = { }
current_choice = 0
print_list = [ ]
for field in fields:
if field.artificial:
continue
field_full_name = Explorer.guard_expr(expr) + "." + field.name
if field.is_base_class:
field_value = value.cast(field.type)
else:
field_value = value[field.name]
literal_value = ""
if type_code == gdb.TYPE_CODE_UNION:
literal_value = ("<Enter %d to explore this field of type "
"'%s'>" % (current_choice, str(field.type)))
has_explorable_fields = True
else:
if Explorer.is_scalar_type(field.type):
literal_value = ("%s .. (Value of type '%s')" %
(str(field_value), str(field.type)))
else:
if field.is_base_class:
field_desc = "base class"
else:
field_desc = "field"
literal_value = ("<Enter %d to explore this %s of type "
"'%s'>" %
(current_choice, field_desc,
str(field.type)))
has_explorable_fields = True
choice_to_compound_field_map[str(current_choice)] = (
field_full_name, field_value)
current_choice = current_choice + 1
print_list.append((field.name, literal_value))
CompoundExplorer._print_fields(print_list)
print ("")
if has_explorable_fields:
choice = raw_input("Enter the field number of choice: ")
if choice in choice_to_compound_field_map:
Explorer.explore_expr(choice_to_compound_field_map[choice][0],
choice_to_compound_field_map[choice][1],
True)
return True
else:
if is_child:
Explorer.return_to_parent_value()
else:
if is_child:
Explorer.return_to_parent_value_prompt()
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore struct/class and union types.
See Explorer.explore_type for more information.
"""
type_code = datatype.code
type_desc = ""
if type_code == gdb.TYPE_CODE_STRUCT:
type_desc = "struct/class"
else:
type_desc = "union"
fields = datatype.fields()
if CompoundExplorer._get_real_field_count(fields) == 0:
if is_child:
print ("%s is a %s of type '%s' with no fields." %
(name, type_desc, str(datatype)))
Explorer.return_to_enclosing_type_prompt()
else:
print ("'%s' is a %s with no fields." % (name, type_desc))
return False
if is_child:
print ("%s is a %s of type '%s' "
"with the following fields:\n" %
(name, type_desc, str(datatype)))
else:
print ("'%s' is a %s with the following "
"fields:\n" %
(name, type_desc))
has_explorable_fields = False
current_choice = 0
choice_to_compound_field_map = { }
print_list = [ ]
for field in fields:
if field.artificial:
continue
if field.is_base_class:
field_desc = "base class"
else:
field_desc = "field"
rhs = ("<Enter %d to explore this %s of type '%s'>" %
(current_choice, field_desc, str(field.type)))
print_list.append((field.name, rhs))
choice_to_compound_field_map[str(current_choice)] = (
field.name, field.type, field_desc)
current_choice = current_choice + 1
CompoundExplorer._print_fields(print_list)
print ("")
if len(choice_to_compound_field_map) > 0:
choice = raw_input("Enter the field number of choice: ")
if choice in choice_to_compound_field_map:
if is_child:
new_name = ("%s '%s' of %s" %
(choice_to_compound_field_map[choice][2],
choice_to_compound_field_map[choice][0],
name))
else:
new_name = ("%s '%s' of '%s'" %
(choice_to_compound_field_map[choice][2],
choice_to_compound_field_map[choice][0],
name))
Explorer.explore_type(new_name,
choice_to_compound_field_map[choice][1], True)
return True
else:
if is_child:
Explorer.return_to_enclosing_type()
else:
if is_child:
Explorer.return_to_enclosing_type_prompt()
return False
class TypedefExplorer(object):
"""Internal class used to explore values whose type is a typedef."""
@staticmethod
def explore_expr(expr, value, is_child):
"""Function to explore typedef values.
See Explorer.explore_expr for more information.
"""
actual_type = value.type.strip_typedefs()
print ("The value of '%s' is of type '%s' "
"which is a typedef of type '%s'" %
(expr, str(value.type), str(actual_type)))
Explorer.explore_expr(expr, value.cast(actual_type), is_child)
return False
@staticmethod
def explore_type(name, datatype, is_child):
"""Function to explore typedef types.
See Explorer.explore_type for more information.
"""
actual_type = datatype.strip_typedefs()
if is_child:
print ("The type of %s is a typedef of type '%s'." %
(name, str(actual_type)))
else:
print ("The type '%s' is a typedef of type '%s'." %
(name, str(actual_type)))
Explorer.explore_type(name, actual_type, is_child)
return False
class ExploreUtils(object):
"""Internal class which provides utilities for the main command classes."""
@staticmethod
def check_args(name, arg_str):
"""Utility to check if adequate number of arguments are passed to an
explore command.
Arguments:
name: The name of the explore command.
arg_str: The argument string passed to the explore command.
Returns:
True if adequate arguments are passed, false otherwise.
Raises:
gdb.GdbError if adequate arguments are not passed.
"""
if len(arg_str) < 1:
raise gdb.GdbError("ERROR: '%s' requires an argument."
% name)
return False
else:
return True
@staticmethod
def get_type_from_str(type_str):
"""A utility function to deduce the gdb.Type value from a string
representing the type.
Arguments:
type_str: The type string from which the gdb.Type value should be
deduced.
Returns:
The deduced gdb.Type value if possible, None otherwise.
"""
try:
# Assume the current language to be C/C++ and make a try.
return gdb.parse_and_eval("(%s *)0" % type_str).type.target()
except RuntimeError:
# If assumption of current language to be C/C++ was wrong, then
# lookup the type using the API.
try:
return gdb.lookup_type(type_str)
except RuntimeError:
return None
@staticmethod
def get_value_from_str(value_str):
"""A utility function to deduce the gdb.Value value from a string
representing the value.
Arguments:
value_str: The value string from which the gdb.Value value should
be deduced.
Returns:
The deduced gdb.Value value if possible, None otherwise.
"""
try:
return gdb.parse_and_eval(value_str)
except RuntimeError:
return None
class ExploreCommand(gdb.Command):
"""Explore a value or a type valid in the current context.
Usage:
explore ARG
- ARG is either a valid expression or a type name.
- At any stage of exploration, hit the return key (instead of a
choice, if any) to return to the enclosing type or value.
"""
def __init__(self):
super(ExploreCommand, self).__init__(name = "explore",
command_class = gdb.COMMAND_DATA,
prefix = True)
def invoke(self, arg_str, from_tty):
if ExploreUtils.check_args("explore", arg_str) == False:
return
# Check if it is a value
value = ExploreUtils.get_value_from_str(arg_str)
if value is not None:
Explorer.explore_expr(arg_str, value, False)
return
# If it is not a value, check if it is a type
datatype = ExploreUtils.get_type_from_str(arg_str)
if datatype is not None:
Explorer.explore_type(arg_str, datatype, False)
return
# If it is neither a value nor a type, raise an error.
raise gdb.GdbError(
("'%s' neither evaluates to a value nor is a type "
"in the current context." %
arg_str))
class ExploreValueCommand(gdb.Command):
"""Explore value of an expression valid in the current context.
Usage:
explore value ARG
- ARG is a valid expression.
- At any stage of exploration, hit the return key (instead of a
choice, if any) to return to the enclosing value.
"""
def __init__(self):
super(ExploreValueCommand, self).__init__(
name = "explore value", command_class = gdb.COMMAND_DATA)
def invoke(self, arg_str, from_tty):
if ExploreUtils.check_args("explore value", arg_str) == False:
return
value = ExploreUtils.get_value_from_str(arg_str)
if value is None:
raise gdb.GdbError(
(" '%s' does not evaluate to a value in the current "
"context." %
arg_str))
return
Explorer.explore_expr(arg_str, value, False)
class ExploreTypeCommand(gdb.Command):
"""Explore a type or the type of an expression valid in the current
context.
Usage:
explore type ARG
- ARG is a valid expression or a type name.
- At any stage of exploration, hit the return key (instead of a
choice, if any) to return to the enclosing type.
"""
def __init__(self):
super(ExploreTypeCommand, self).__init__(
name = "explore type", command_class = gdb.COMMAND_DATA)
def invoke(self, arg_str, from_tty):
if ExploreUtils.check_args("explore type", arg_str) == False:
return
datatype = ExploreUtils.get_type_from_str(arg_str)
if datatype is not None:
Explorer.explore_type(arg_str, datatype, False)
return
value = ExploreUtils.get_value_from_str(arg_str)
if value is not None:
print ("'%s' is of type '%s'." % (arg_str, str(value.type)))
Explorer.explore_type(str(value.type), value.type, False)
return
raise gdb.GdbError(("'%s' is not a type or value in the current "
"context." % arg_str))
Explorer.init_env()
ExploreCommand()
ExploreValueCommand()
ExploreTypeCommand()
| gpl-2.0 |
ltilve/ChromiumGStreamerBackend | tools/telemetry/telemetry/internal/backends/chrome/desktop_browser_finder.py | 5 | 11222 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Finds desktop browsers that can be controlled by telemetry."""
import logging
import os
import subprocess
import sys
from telemetry.core import exceptions
from telemetry.core import platform as platform_module
from telemetry.internal.backends.chrome import desktop_browser_backend
from telemetry.internal.browser import browser
from telemetry.internal.browser import possible_browser
from telemetry.internal.platform import desktop_device
from telemetry.internal.util import path
class PossibleDesktopBrowser(possible_browser.PossibleBrowser):
"""A desktop browser that can be controlled."""
def __init__(self, browser_type, finder_options, executable, flash_path,
is_content_shell, browser_directory, is_local_build=False):
target_os = sys.platform.lower()
super(PossibleDesktopBrowser, self).__init__(
browser_type, target_os, not is_content_shell)
assert browser_type in FindAllBrowserTypes(finder_options), (
'Please add %s to desktop_browser_finder.FindAllBrowserTypes' %
browser_type)
self._local_executable = executable
self._flash_path = flash_path
self._is_content_shell = is_content_shell
self._browser_directory = browser_directory
self.is_local_build = is_local_build
def __repr__(self):
return 'PossibleDesktopBrowser(type=%s, executable=%s, flash=%s)' % (
self.browser_type, self._local_executable, self._flash_path)
def _InitPlatformIfNeeded(self):
if self._platform:
return
self._platform = platform_module.GetHostPlatform()
# pylint: disable=W0212
self._platform_backend = self._platform._platform_backend
def Create(self, finder_options):
if self._flash_path and not os.path.exists(self._flash_path):
logging.warning(
'Could not find Flash at %s. Continuing without Flash.\n'
'To run with Flash, check it out via http://go/read-src-internal',
self._flash_path)
self._flash_path = None
self._InitPlatformIfNeeded()
browser_backend = desktop_browser_backend.DesktopBrowserBackend(
self._platform_backend,
finder_options.browser_options, self._local_executable,
self._flash_path, self._is_content_shell, self._browser_directory,
output_profile_path=finder_options.output_profile_path,
extensions_to_load=finder_options.extensions_to_load)
return browser.Browser(
browser_backend, self._platform_backend, self._credentials_path)
def SupportsOptions(self, finder_options):
if (len(finder_options.extensions_to_load) != 0) and self._is_content_shell:
return False
return True
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
if os.path.exists(self._local_executable):
return os.path.getmtime(self._local_executable)
return -1
def SelectDefaultBrowser(possible_browsers):
local_builds_by_date = [
b for b in sorted(possible_browsers,
key=lambda b: b.last_modification_time())
if b.is_local_build]
if local_builds_by_date:
return local_builds_by_date[-1]
return None
def CanFindAvailableBrowsers():
return not platform_module.GetHostPlatform().GetOSName() == 'chromeos'
def CanPossiblyHandlePath(target_path):
_, extension = os.path.splitext(target_path.lower())
if sys.platform == 'darwin' or sys.platform.startswith('linux'):
return not extension
elif sys.platform.startswith('win'):
return extension == '.exe'
return False
def FindAllBrowserTypes(_):
return [
'exact',
'reference',
'release',
'release_x64',
'debug',
'debug_x64',
'default',
'stable',
'beta',
'dev',
'canary',
'content-shell-debug',
'content-shell-debug_x64',
'content-shell-release',
'content-shell-release_x64',
'content-shell-default',
'system']
def FindAllAvailableBrowsers(finder_options, device):
"""Finds all the desktop browsers available on this machine."""
if not isinstance(device, desktop_device.DesktopDevice):
return []
browsers = []
if not CanFindAvailableBrowsers():
return []
has_x11_display = True
if (sys.platform.startswith('linux') and
os.getenv('DISPLAY') == None):
has_x11_display = False
# Look for a browser in the standard chrome build locations.
if finder_options.chrome_root:
chrome_root = finder_options.chrome_root
else:
chrome_root = path.GetChromiumSrcDir()
flash_bin_dir = os.path.join(
chrome_root, 'third_party', 'adobe', 'flash', 'binaries', 'ppapi')
chromium_app_names = []
if sys.platform == 'darwin':
chromium_app_names.append('Chromium.app/Contents/MacOS/Chromium')
chromium_app_names.append('Google Chrome.app/Contents/MacOS/Google Chrome')
content_shell_app_name = 'Content Shell.app/Contents/MacOS/Content Shell'
flash_bin = 'PepperFlashPlayer.plugin'
flash_path = os.path.join(flash_bin_dir, 'mac', flash_bin)
flash_path_64 = os.path.join(flash_bin_dir, 'mac_64', flash_bin)
elif sys.platform.startswith('linux'):
chromium_app_names.append('chrome')
content_shell_app_name = 'content_shell'
flash_bin = 'libpepflashplayer.so'
flash_path = os.path.join(flash_bin_dir, 'linux', flash_bin)
flash_path_64 = os.path.join(flash_bin_dir, 'linux_x64', flash_bin)
elif sys.platform.startswith('win'):
chromium_app_names.append('chrome.exe')
content_shell_app_name = 'content_shell.exe'
flash_bin = 'pepflashplayer.dll'
flash_path = os.path.join(flash_bin_dir, 'win', flash_bin)
flash_path_64 = os.path.join(flash_bin_dir, 'win_x64', flash_bin)
else:
raise Exception('Platform not recognized')
# Add the explicit browser executable if given and we can handle it.
if (finder_options.browser_executable and
CanPossiblyHandlePath(finder_options.browser_executable)):
normalized_executable = os.path.expanduser(
finder_options.browser_executable)
if path.IsExecutable(normalized_executable):
browser_directory = os.path.dirname(finder_options.browser_executable)
browsers.append(PossibleDesktopBrowser('exact', finder_options,
normalized_executable, flash_path,
False, browser_directory))
else:
raise exceptions.PathMissingError(
'%s specified by --browser-executable does not exist' %
normalized_executable)
def AddIfFound(browser_type, build_dir, type_dir, app_name, content_shell):
browser_directory = os.path.join(chrome_root, build_dir, type_dir)
app = os.path.join(browser_directory, app_name)
if path.IsExecutable(app):
is_64 = browser_type.endswith('_x64')
browsers.append(PossibleDesktopBrowser(
browser_type, finder_options, app,
flash_path_64 if is_64 else flash_path,
content_shell, browser_directory, is_local_build=True))
return True
return False
# Add local builds
for build_dir, build_type in path.GetBuildDirectories():
for chromium_app_name in chromium_app_names:
AddIfFound(build_type.lower(), build_dir, build_type,
chromium_app_name, False)
AddIfFound('content-shell-' + build_type.lower(), build_dir, build_type,
content_shell_app_name, True)
reference_build_root = os.path.join(
chrome_root, 'chrome', 'tools', 'test', 'reference_build')
# Mac-specific options.
if sys.platform == 'darwin':
mac_canary_root = '/Applications/Google Chrome Canary.app/'
mac_canary = mac_canary_root + 'Contents/MacOS/Google Chrome Canary'
mac_system_root = '/Applications/Google Chrome.app'
mac_system = mac_system_root + '/Contents/MacOS/Google Chrome'
mac_reference_root = reference_build_root + '/chrome_mac/Google Chrome.app/'
mac_reference = mac_reference_root + 'Contents/MacOS/Google Chrome'
if path.IsExecutable(mac_canary):
browsers.append(PossibleDesktopBrowser('canary', finder_options,
mac_canary, None, False,
mac_canary_root))
if path.IsExecutable(mac_system):
browsers.append(PossibleDesktopBrowser('system', finder_options,
mac_system, None, False,
mac_system_root))
if path.IsExecutable(mac_reference):
browsers.append(PossibleDesktopBrowser('reference', finder_options,
mac_reference, None, False,
mac_reference_root))
# Linux specific options.
if sys.platform.startswith('linux'):
versions = {
'system': ('google-chrome',
os.path.split(os.path.realpath('google-chrome'))[0]),
'stable': ('google-chrome-stable', '/opt/google/chrome'),
'beta': ('google-chrome-beta', '/opt/google/chrome-beta'),
'dev': ('google-chrome-unstable', '/opt/google/chrome-unstable')
}
for version, (name, root) in versions.iteritems():
found = False
try:
with open(os.devnull, 'w') as devnull:
found = subprocess.call([name, '--version'],
stdout=devnull, stderr=devnull) == 0
except OSError:
pass
if found:
browsers.append(PossibleDesktopBrowser(version, finder_options, name,
None, False, root))
linux_reference_root = os.path.join(reference_build_root, 'chrome_linux')
linux_reference = os.path.join(linux_reference_root, 'chrome')
if path.IsExecutable(linux_reference):
browsers.append(PossibleDesktopBrowser('reference', finder_options,
linux_reference, None, False,
linux_reference_root))
# Win32-specific options.
if sys.platform.startswith('win'):
app_paths = (
('system', os.path.join('Google', 'Chrome', 'Application')),
('canary', os.path.join('Google', 'Chrome SxS', 'Application')),
('reference', os.path.join(reference_build_root, 'chrome_win')),
)
for browser_name, app_path in app_paths:
for chromium_app_name in chromium_app_names:
app_path = os.path.join(app_path, chromium_app_name)
app_path = path.FindInstalledWindowsApplication(app_path)
if app_path:
browsers.append(PossibleDesktopBrowser(
browser_name, finder_options, app_path,
None, False, os.path.dirname(app_path)))
has_ozone_platform = False
for arg in finder_options.browser_options.extra_browser_args:
if "--ozone-platform" in arg:
has_ozone_platform = True
if len(browsers) and not has_x11_display and not has_ozone_platform:
logging.warning(
'Found (%s), but you do not have a DISPLAY environment set.' %
','.join([b.browser_type for b in browsers]))
return []
return browsers
| bsd-3-clause |
Tesora/tesora-horizon | openstack_dashboard/test/api_tests/nova_rest_tests.py | 1 | 13771 | # Copyright 2014, Rackspace, US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from django.conf import settings
from openstack_dashboard import api
from openstack_dashboard.api.rest import nova
from openstack_dashboard.test import helpers as test
class NovaRestTestCase(test.TestCase):
#
# Keypairs
#
@mock.patch.object(nova.api, 'nova')
def test_keypair_get(self, nc):
request = self.mock_rest_request()
nc.keypair_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'id': 'one'}}),
mock.Mock(**{'to_dict.return_value': {'id': 'two'}}),
]
response = nova.Keypairs().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"id": "one"}, {"id": "two"}]})
nc.keypair_list.assert_called_once_with(request)
@mock.patch.object(nova.api, 'nova')
def test_keypair_create(self, nc):
request = self.mock_rest_request(body='''{"name": "Ni!"}''')
new = nc.keypair_create.return_value
new.to_dict.return_value = {'name': 'Ni!', 'public_key': 'sekrit'}
new.name = 'Ni!'
with mock.patch.object(settings, 'DEBUG', True):
response = nova.Keypairs().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response.json,
{"name": "Ni!", "public_key": "sekrit"})
self.assertEqual(response['location'], '/api/nova/keypairs/Ni%21')
nc.keypair_create.assert_called_once_with(request, 'Ni!')
@mock.patch.object(nova.api, 'nova')
def test_keypair_import(self, nc):
request = self.mock_rest_request(body='''
{"name": "Ni!", "public_key": "hi"}
''')
new = nc.keypair_import.return_value
new.to_dict.return_value = {'name': 'Ni!', 'public_key': 'hi'}
new.name = 'Ni!'
with mock.patch.object(settings, 'DEBUG', True):
response = nova.Keypairs().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response.json,
{"name": "Ni!", "public_key": "hi"})
self.assertEqual(response['location'], '/api/nova/keypairs/Ni%21')
nc.keypair_import.assert_called_once_with(request, 'Ni!', 'hi')
#
# Availability Zones
#
def test_availzone_get_brief(self):
self._test_availzone_get(False)
def test_availzone_get_detailed(self):
self._test_availzone_get(True)
@mock.patch.object(nova.api, 'nova')
def _test_availzone_get(self, detail, nc):
if detail:
request = self.mock_rest_request(GET={'detailed': 'true'})
else:
request = self.mock_rest_request(GET={})
nc.availability_zone_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'id': 'one'}}),
mock.Mock(**{'to_dict.return_value': {'id': 'two'}}),
]
response = nova.AvailabilityZones().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"id": "one"}, {"id": "two"}]})
nc.availability_zone_list.assert_called_once_with(request, detail)
#
# Limits
#
def test_limits_get_not_reserved(self):
self._test_limits_get(False)
def test_limits_get_reserved(self):
self._test_limits_get(True)
@mock.patch.object(nova.api, 'nova')
def _test_limits_get(self, reserved, nc):
if reserved:
request = self.mock_rest_request(GET={'reserved': 'true'})
else:
request = self.mock_rest_request(GET={})
nc.tenant_absolute_limits.return_value = {'id': 'one'}
response = nova.Limits().get(request)
self.assertStatusCode(response, 200)
nc.tenant_absolute_limits.assert_called_once_with(request, reserved)
self.assertEqual(response.json, {"id": "one"})
#
# Servers
#
@mock.patch.object(nova.api, 'nova')
def test_server_create_missing(self, nc):
request = self.mock_rest_request(body='''{"name": "hi"}''')
response = nova.Servers().post(request)
self.assertStatusCode(response, 400)
self.assertEqual(response.json,
"missing required parameter 'source_id'")
nc.server_create.assert_not_called()
@mock.patch.object(nova.api, 'nova')
def test_server_create_basic(self, nc):
request = self.mock_rest_request(body='''{"name": "Ni!",
"source_id": "image123", "flavor_id": "flavor123",
"key_name": "sekrit", "user_data": "base64 yes",
"security_groups": [{"name": "root"}]}
''')
new = nc.server_create.return_value
new.to_dict.return_value = {'id': 'server123'}
new.id = 'server123'
response = nova.Servers().post(request)
self.assertStatusCode(response, 201)
self.assertEqual(response.json, {"id": "server123"})
self.assertEqual(response['location'], '/api/nova/servers/server123')
nc.server_create.assert_called_once_with(
request, 'Ni!', 'image123', 'flavor123', 'sekrit', 'base64 yes',
[{'name': 'root'}]
)
@mock.patch.object(nova.api, 'nova')
def test_server_get_single(self, nc):
request = self.mock_rest_request()
nc.server_get.return_value.to_dict.return_value = {'name': '1'}
response = nova.Server().get(request, "1")
self.assertStatusCode(response, 200)
nc.server_get.assert_called_once_with(request, "1")
#
# Extensions
#
@mock.patch.object(nova.api, 'nova')
@mock.patch.object(settings,
'OPENSTACK_NOVA_EXTENSIONS_BLACKLIST', ['baz'])
def _test_extension_list(self, nc):
request = self.mock_rest_request()
nc.list_extensions.return_value = [
mock.Mock(**{'to_dict.return_value': {'name': 'foo'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'bar'}}),
mock.Mock(**{'to_dict.return_value': {'name': 'baz'}}),
]
response = nova.Extensions().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"name": "foo"}, {"name": "bar"}]})
nc.list_extensions.assert_called_once_with(request)
#
# Flavors
#
def test_get_extras_no(self):
self._test_flavor_get_single(get_extras=False)
def test_get_extras_yes(self):
self._test_flavor_get_single(get_extras=True)
def test_get_extras_default(self):
self._test_flavor_get_single(get_extras=None)
@mock.patch.object(nova.api, 'nova')
def _test_flavor_get_single(self, nc, get_extras):
if get_extras:
request = self.mock_rest_request(GET={'get_extras': 'tRuE'})
elif get_extras is None:
request = self.mock_rest_request()
get_extras = False
else:
request = self.mock_rest_request(GET={'get_extras': 'fAlsE'})
nc.flavor_get.return_value.to_dict.return_value = {'name': '1'}
response = nova.Flavor().get(request, "1")
self.assertStatusCode(response, 200)
if get_extras:
self.assertEqual(response.json, {"extras": {}, "name": "1"})
else:
self.assertEqual(response.json, {"name": "1"})
nc.flavor_get.assert_called_once_with(request, "1",
get_extras=get_extras)
@mock.patch.object(nova.api, 'nova')
def _test_flavor_list_public(self, nc, is_public=None):
if is_public:
request = self.mock_rest_request(GET={'is_public': 'tRuE'})
elif is_public is None:
request = self.mock_rest_request(GET={})
else:
request = self.mock_rest_request(GET={'is_public': 'fAlsE'})
nc.flavor_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'id': '1'}}),
mock.Mock(**{'to_dict.return_value': {'id': '2'}}),
]
response = nova.Flavors().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.json,
{"items": [{"id": "1"}, {"id": "2"}]})
nc.flavor_list.assert_called_once_with(request, is_public=is_public,
get_extras=False)
def test_flavor_list_private(self):
self._test_flavor_list_public(is_public=False)
def test_flavor_list_public(self):
self._test_flavor_list_public(is_public=True)
def test_flavor_list_public_none(self):
self._test_flavor_list_public(is_public=None)
@mock.patch.object(nova.api, 'nova')
def _test_flavor_list_extras(self, nc, get_extras=None):
if get_extras:
request = self.mock_rest_request(GET={'get_extras': 'tRuE'})
elif get_extras is None:
request = self.mock_rest_request(GET={})
get_extras = False
else:
request = self.mock_rest_request(GET={'get_extras': 'fAlsE'})
nc.flavor_list.return_value = [
mock.Mock(**{'extras': {}, 'to_dict.return_value': {'id': '1'}}),
mock.Mock(**{'extras': {}, 'to_dict.return_value': {'id': '2'}}),
]
response = nova.Flavors().get(request)
self.assertStatusCode(response, 200)
if get_extras:
self.assertEqual(response.json,
{"items": [{"extras": {}, "id": "1"},
{"extras": {}, "id": "2"}]})
else:
self.assertEqual(response.json,
{"items": [{"id": "1"}, {"id": "2"}]})
nc.flavor_list.assert_called_once_with(request, is_public=None,
get_extras=get_extras)
def test_flavor_list_extras_no(self):
self._test_flavor_list_extras(get_extras=False)
def test_flavor_list_extras_yes(self):
self._test_flavor_list_extras(get_extras=True)
def test_flavor_list_extras_absent(self):
self._test_flavor_list_extras(get_extras=None)
@mock.patch.object(nova.api, 'nova')
def test_flavor_get_extra_specs(self, nc):
request = self.mock_rest_request()
nc.flavor_get_extras.return_value.to_dict.return_value = {'foo': '1'}
response = nova.FlavorExtraSpecs().get(request, "1")
self.assertStatusCode(response, 200)
nc.flavor_get_extras.assert_called_once_with(request, "1", raw=True)
@mock.patch.object(nova.api, 'nova')
def test_flavor_edit_extra_specs(self, nc):
request = self.mock_rest_request(
body='{"updated": {"a": "1", "b": "2"}, "removed": ["c", "d"]}'
)
response = nova.FlavorExtraSpecs().patch(request, '1')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
nc.flavor_extra_set.assert_called_once_with(
request, '1', {'a': '1', 'b': '2'}
)
nc.flavor_extra_delete.assert_called_once_with(
request, '1', ['c', 'd']
)
@mock.patch.object(nova.api, 'nova')
def test_aggregate_get_extra_specs(self, nc):
request = self.mock_rest_request()
nc.aggregate_get.return_value.metadata = {'a': '1', 'b': '2'}
response = nova.AggregateExtraSpecs().get(request, "1")
self.assertStatusCode(response, 200)
self.assertEqual(response.json, {"a": "1", "b": "2"})
nc.aggregate_get.assert_called_once_with(request, "1")
@mock.patch.object(nova.api, 'nova')
def test_aggregate_edit_extra_specs(self, nc):
request = self.mock_rest_request(
body='{"updated": {"a": "1", "b": "2"}, "removed": ["c", "d"]}'
)
response = nova.AggregateExtraSpecs().patch(request, '1')
self.assertStatusCode(response, 204)
self.assertEqual(response.content, b'')
nc.aggregate_set_metadata.assert_called_once_with(
request, '1', {'a': '1', 'b': '2', 'c': None, 'd': None}
)
#
# Services
#
@test.create_stubs({api.base: ('is_service_enabled',)})
@mock.patch.object(nova.api, 'nova')
def test_services_get(self, nc):
request = self.mock_rest_request(GET={})
nc.service_list.return_value = [
mock.Mock(**{'to_dict.return_value': {'id': '1'}}),
mock.Mock(**{'to_dict.return_value': {'id': '2'}})
]
api.base.is_service_enabled(request, 'compute').AndReturn(True)
self.mox.ReplayAll()
response = nova.Services().get(request)
self.assertStatusCode(response, 200)
self.assertEqual(response.content.decode('utf-8'),
'{"items": [{"id": "1"}, {"id": "2"}]}')
nc.service_list.assert_called_once_with(request)
@test.create_stubs({api.base: ('is_service_enabled',)})
def test_services_get_disabled(self):
request = self.mock_rest_request(GET={})
api.base.is_service_enabled(request, 'compute').AndReturn(False)
self.mox.ReplayAll()
response = nova.Services().get(request)
self.assertStatusCode(response, 501)
| apache-2.0 |
jamwt/pgasync | tests/test_types.py | 1 | 5941 | from testbase import *
import pgasync
from twisted.internet.defer import waitForDeferred as waitD, deferredGenerator
class TestNone(TestCase):
schema = 'id int, nil varchar(1)'
@deferredGenerator
def testnull(self):
id = getId()
i = None
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, %s)',(id, i,)))
d = waitD(
self.pool.runQuery(
'select nil from t_tbl where id = %d', (id,))
)
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
class TestIntegers(TestCase):
schema = 'id int, t_smallint smallint, t_int int, t_bigint bigint'
@deferredGenerator
def test_smallint(self):
id = getId()
i = 8
yield waitD(
self.pool.runOperation(
'insert into t_tbl VALUES(%d, %d, NULL, NULL)',(id, i,)))
d = waitD(
self.pool.runQuery(
'select t_smallint from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
@deferredGenerator
def test_int(self):
id = getId()
i = 66666
yield waitD(
self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, %d, NULL)',(id, i,)))
d = waitD(
self.pool.runQuery(
'select t_int from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
@deferredGenerator
def test_bigint(self):
id = getId()
i = 6000000000
yield waitD(
self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, NULL, %d)',(id, i,)))
d = waitD(
self.pool.runQuery(
'select t_bigint from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
from decimal import Decimal
class TestFloats(TestCase):
schema = 'id int, t_numeric numeric, t_real real, t_dp double precision'
@deferredGenerator
def test_numeric(self):
id = getId()
i = Decimal('3.291283719')
yield waitD(
self.pool.runOperation(
'insert into t_tbl VALUES(%d, %s, NULL, NULL)',(id, i,)))
d = waitD(
self.pool.runQuery(
'select t_numeric from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
@deferredGenerator
def test_real(self):
id = getId()
i = 3.291283719
ti = type(i)
yield waitD(
self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, %s, NULL)',(id, i,)))
d = waitD(
self.pool.runQuery(
'select t_real from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(ti, type(o[0][0]))
@deferredGenerator
def test_double(self):
id = getId()
i = 3.291283719
ti = type(i)
yield waitD(
self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, NULL, %s)',(id, i,)))
d = waitD(
self.pool.runQuery(
'select t_dp from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(ti, type(o[0][0]))
class TestMoney(TestCase):
schema = 'id int, t_money money'
@deferredGenerator
def test_money(self):
id = getId()
i = Decimal('3.29')
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, %s)',(id, pgasync.MONEY(i))))
d = waitD(
self.pool.runQuery(
'select t_money from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
class TestStrings(TestCase):
schema = 'id int, t_varchar varchar(30), t_char char(5), t_text text, t_bytea bytea'
@deferredGenerator
def test_varchar(self):
id = getId()
i = "how's she doing; good)?'"
yield waitD(
self.pool.runOperation(
'insert into t_tbl VALUES(%d, %s, NULL, NULL, NULL)',(id, i,)))
d = waitD(self.pool.runQuery(
'select t_varchar from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
@deferredGenerator
def test_char(self):
id = getId()
i = "'wh\\"
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, %s, NULL, NULL)',(id, i,)))
d = waitD(self.pool.runQuery(
'select t_char from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i + (' ' * (5 - len(i))), o[0][0])
@deferredGenerator
def test_text(self):
id = getId()
i = "how's s\x13he doing; good)?'"
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, NULL, %s, NULL)',(id, i,)))
d = waitD(self.pool.runQuery(
'select t_text from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
@deferredGenerator
def test_bytea(self):
id = getId()
import random
i = ''.join([chr(random.randint(0,255)) for x in range(100 * 1024)])
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, NULL, NULL, %s)',(id, pgasync.BINARY(i),)))
d = waitD(self.pool.runQuery(
'select t_bytea from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
import datetime
class TestDatesTimes(TestCase):
schema = 'id int, t_date date, t_time time, t_timestamp timestamp'
@deferredGenerator
def test_date(self):
id = getId()
i = datetime.date(1980, 12, 28)
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, %s, NULL, NULL)',(id, i,)))
d = waitD(self.pool.runQuery(
'select t_date from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
@deferredGenerator
def test_time(self):
id = getId()
i = datetime.time(8,8,0,234)
o = [None]
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, %s, NULL)',(id, i,)))
d = waitD(self.pool.runQuery(
'select t_time from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
@deferredGenerator
def test_timestamp(self):
id = getId()
i = datetime.datetime(1980, 12, 28, 8, 8, 0, 234)
o = [None]
yield waitD(self.pool.runOperation(
'insert into t_tbl VALUES(%d, NULL, NULL, %s)',(id, i,)))
d = waitD(self.pool.runQuery(
'select t_timestamp from t_tbl where id = %d', (id,)))
yield d
o = d.getResult()
self.assertEquals(i, o[0][0])
| bsd-3-clause |
uudiin/bleachbit | bleachbit/Diagnostic.py | 3 | 2719 | # vim: ts=4:sw=4:expandtab
# -*- coding: UTF-8 -*-
# BleachBit
# Copyright (C) 2008-2015 Andrew Ziem
# http://bleachbit.sourceforge.net
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Show diagnostic information
"""
import Common
import locale
import os
import platform
import sys
if 'nt' == os.name:
from win32com.shell import shell
def diagnostic_info():
"""Return diagnostic information as a string"""
s = "BleachBit version %s" % Common.APP_VERSION
try:
import gtk
s += '\nGTK version %s' % '.'.join([str(x) for x in gtk.gtk_version])
except:
pass
s += "\nlocal_cleaners_dir = %s" % Common.local_cleaners_dir
s += "\nlocale_dir = %s" % Common.locale_dir
s += "\noptions_dir = %s" % Common.options_dir
s += "\npersonal_cleaners_dir = %s" % Common.personal_cleaners_dir
s += "\nsystem_cleaners_dir = %s" % Common.system_cleaners_dir
s += "\nlocale.getdefaultlocale = %s" % str(locale.getdefaultlocale())
if 'posix' == os.name:
envs = ('DESKTOP_SESSION', 'LOGNAME', 'USER', 'SUDO_UID')
if 'nt' == os.name:
envs = ('APPDATA', 'LocalAppData', 'LocalAppDataLow', 'Music',
'USERPROFILE', 'ProgramFiles', 'ProgramW6432', 'TMP')
for env in envs:
s += "\nos.getenv('%s') = %s" % (env, os.getenv(env))
s += "\nos.path.expanduser('~') = %s" % os.path.expanduser('~')
if sys.platform.startswith('linux'):
if hasattr(platform, 'linux_distribution'):
s += "\nplatform.linux_distribution() = %s" % str(
platform.linux_distribution())
else:
s += "\nplatform.dist() = %s" % str(platform.dist())
if 'nt' == os.name:
s += "\nplatform.win32_ver[1]() = %s" % platform.win32_ver()[1]
s += "\nplatform.platform = %s" % platform.platform()
s += "\nsys.argv = %s" % sys.argv
s += "\nsys.executable = %s" % sys.executable
s += "\nsys.version = %s" % sys.version
if 'nt' == os.name:
s += "\nwin32com.shell.shell.IsUserAnAdmin() = %s" % shell.IsUserAnAdmin(
)
s += "\n__file__ = %s" % __file__
return s
| gpl-3.0 |
saradbowman/osf.io | addons/base/serializer.py | 36 | 8052 | import abc
from framework.auth.decorators import collect_auth
from website.util import api_url_for, web_url_for
class AddonSerializer(object):
__metaclass__ = abc.ABCMeta
# TODO take addon_node_settings, addon_user_settings
def __init__(self, node_settings=None, user_settings=None):
self.node_settings = node_settings
self.user_settings = user_settings
@abc.abstractproperty
def addon_short_name(self):
pass
@abc.abstractproperty
def addon_serialized_urls(self):
pass
@abc.abstractproperty
def serialized_urls(self):
pass
@abc.abstractproperty
def user_is_owner(self):
pass
@abc.abstractproperty
def credentials_owner(self):
pass
@property
def serialized_node_settings(self):
result = {
'nodeHasAuth': self.node_settings.has_auth,
'userIsOwner': self.user_is_owner,
'urls': self.serialized_urls,
}
if self.user_settings:
result['userHasAuth'] = self.user_settings.has_auth
else:
result['userHasAuth'] = False
if self.node_settings.has_auth:
owner = self.credentials_owner
if owner:
result['urls']['owner'] = web_url_for('profile_view_id',
uid=owner._primary_key)
result['ownerName'] = owner.fullname
return result
@property
def serialized_user_settings(self):
return {}
class OAuthAddonSerializer(AddonSerializer):
@property
def credentials_owner(self):
return self.user_settings.owner if self.user_settings else None
@property
def user_is_owner(self):
if self.user_settings is None or self.node_settings is None:
return False
user_accounts = self.user_settings.external_accounts.all()
return bool(
self.node_settings.has_auth and
self.node_settings.external_account in user_accounts
)
@property
def serialized_urls(self):
ret = self.addon_serialized_urls
# Make sure developer returns set of needed urls
for url in self.REQUIRED_URLS:
msg = "addon_serialized_urls must include key '{0}'".format(url)
assert url in ret, msg
ret.update({'settings': web_url_for('user_addons')})
return ret
@property
def serialized_accounts(self):
return [
self.serialize_account(each)
for each in self.user_settings.external_accounts.all()
]
@property
def serialized_user_settings(self):
retval = super(OAuthAddonSerializer, self).serialized_user_settings
retval['accounts'] = []
if self.user_settings:
retval['accounts'] = self.serialized_accounts
return retval
def serialize_account(self, external_account):
if external_account is None:
return None
return {
'id': external_account._id,
'provider_id': external_account.provider_id,
'provider_name': external_account.provider_name,
'provider_short_name': external_account.provider,
'display_name': external_account.display_name,
'profile_url': external_account.profile_url,
'nodes': [
self.serialize_granted_node(node)
for node in self.user_settings.get_attached_nodes(
external_account=external_account
)
],
}
@collect_auth
def serialize_granted_node(self, node, auth):
node_settings = node.get_addon(
self.user_settings.oauth_provider.short_name
)
serializer = node_settings.serializer(node_settings=node_settings)
urls = serializer.addon_serialized_urls
urls['view'] = node.url
return {
'id': node._id,
'title': node.title if node.can_view(auth) else None,
'urls': urls,
}
class StorageAddonSerializer(OAuthAddonSerializer):
REQUIRED_URLS = ('auth', 'importAuth', 'folders', 'files', 'config', 'deauthorize', 'accounts')
@abc.abstractmethod
def credentials_are_valid(self, user_settings):
pass
@abc.abstractmethod
def serialized_folder(self, node_settings):
pass
def serialize_settings(self, node_settings, current_user, client=None):
user_settings = node_settings.user_settings
self.node_settings = node_settings
current_user_settings = current_user.get_addon(self.addon_short_name)
user_is_owner = user_settings is not None and user_settings.owner == current_user
valid_credentials = self.credentials_are_valid(user_settings, client)
result = {
'userIsOwner': user_is_owner,
'nodeHasAuth': node_settings.has_auth,
'urls': self.serialized_urls,
'validCredentials': valid_credentials,
'userHasAuth': current_user_settings is not None and current_user_settings.has_auth,
}
if node_settings.has_auth:
# Add owner's profile URL
result['urls']['owner'] = web_url_for(
'profile_view_id',
uid=user_settings.owner._id
)
result['ownerName'] = user_settings.owner.fullname
# Show available folders
if node_settings.folder_id is None:
result['folder'] = {'name': None, 'path': None}
elif valid_credentials:
result['folder'] = self.serialized_folder(node_settings)
return result
class CitationsAddonSerializer(OAuthAddonSerializer):
REQUIRED_URLS = ('importAuth', 'folders', 'config', 'deauthorize', 'accounts')
serialized_root_folder = {
'name': 'All Documents',
'provider_list_id': None,
'id': 'ROOT',
'parent_list_id': '__',
'kind': 'folder',
}
@property
def serialized_urls(self):
external_account = self.node_settings.external_account
ret = {
'auth': api_url_for('oauth_connect',
service_name=self.addon_short_name),
'files': self.node_settings.owner.url,
}
if external_account and external_account.profile_url:
ret['owner'] = external_account.profile_url
ret.update(super(CitationsAddonSerializer, self).serialized_urls)
return ret
@property
def serialized_node_settings(self):
result = super(CitationsAddonSerializer, self).serialized_node_settings
result['folder'] = {
'name': self.node_settings.fetch_folder_name
}
return result
@property
def credentials_owner(self):
return self.node_settings.user_settings.owner
def serialize_folder(self, folder):
return {
'data': folder,
'kind': 'folder',
'name': folder['name'],
'id': folder['id'],
'urls': {
'fetch': self.node_settings.owner.api_url_for(
'{0}_citation_list'.format(self.addon_short_name),
list_id=folder['id']
),
},
}
@property
def addon_serialized_urls(self):
node = self.node_settings.owner
return {
'importAuth': node.api_url_for('{0}_import_auth'.format(self.addon_short_name)),
'folders': node.api_url_for('{0}_citation_list'.format(self.addon_short_name)),
'config': node.api_url_for('{0}_set_config'.format(self.addon_short_name)),
'deauthorize': node.api_url_for('{0}_deauthorize_node'.format(self.addon_short_name)),
'accounts': node.api_url_for('{0}_account_list'.format(self.addon_short_name)),
}
def serialize_citation(self, citation):
return {
'csl': citation,
'kind': 'file',
'id': citation['id'],
}
| apache-2.0 |
adustm/mbed | tools/export/cmsis/__init__.py | 12 | 5808 | import os
from os.path import sep, join, exists
from itertools import groupby
from xml.etree.ElementTree import Element, tostring
import ntpath
import re
import json
from tools.arm_pack_manager import Cache
from tools.targets import TARGET_MAP
from tools.export.exporters import Exporter, TargetNotSupportedException
class fileCMSIS():
"""CMSIS file class.
Encapsulates information necessary for files in cpdsc project file"""
file_types = {'.cpp': 'sourceCpp', '.c': 'sourceC', '.s': 'sourceAsm',
'.obj': 'object', '.o': 'object', '.lib': 'library',
'.ar': 'linkerScript', '.h': 'header', '.sct': 'linkerScript'}
def __init__(self, loc, name):
#print loc
_, ext = os.path.splitext(loc)
self.type = self.file_types[ext.lower()]
self.loc = loc
self.name = name
class DeviceCMSIS():
"""CMSIS Device class
Encapsulates target information retrieved by arm-pack-manager"""
CACHE = Cache(True, False)
def __init__(self, target):
target_info = self.check_supported(target)
if not target_info:
raise TargetNotSupportedException("Target not supported in CMSIS pack")
self.url = target_info['pdsc_file']
self.pack_url, self.pack_id = ntpath.split(self.url)
self.dname = target_info["_cpu_name"]
self.core = target_info["_core"]
self.dfpu = target_info['processor']['fpu']
self.debug, self.dvendor = self.vendor_debug(target_info['vendor'])
self.dendian = target_info['processor'].get('endianness','Little-endian')
self.debug_svd = target_info.get('debug', '')
self.compile_header = target_info['compile']['header']
self.target_info = target_info
@staticmethod
def check_supported(target):
t = TARGET_MAP[target]
try:
cpu_name = t.device_name
target_info = DeviceCMSIS.CACHE.index[cpu_name]
# Target does not have device name or pdsc file
except:
try:
# Try to find the core as a generic CMSIS target
cpu_name = DeviceCMSIS.cpu_cmsis(t.core)
target_info = DeviceCMSIS.CACHE.index[cpu_name]
except:
return False
target_info["_cpu_name"] = cpu_name
target_info["_core"] = t.core
return target_info
def vendor_debug(self, vendor):
"""Reads the vendor from a PDSC <dvendor> tag.
This tag contains some additional numeric information that is meaningless
for our purposes, so we use a regex to filter.
Positional arguments:
Vendor - information in <dvendor> tag scraped from ArmPackManager
Returns a tuple of (debugger, vendor)
"""
reg = "([\w\s]+):?\d*?"
m = re.search(reg, vendor)
vendor_match = m.group(1) if m else None
debug_map ={
'STMicroelectronics':'ST-Link',
'Silicon Labs':'J-LINK',
'Nuvoton':'NULink'
}
return debug_map.get(vendor_match, "CMSIS-DAP"), vendor_match
@staticmethod
def cpu_cmsis(cpu):
"""
Transforms information from targets.json to the way the generic cores are named
in CMSIS PDSC files.
Ex:
Cortex-M4F => ARMCM4_FP, Cortex-M0+ => ARMCM0P
Returns formatted CPU
"""
cpu = cpu.replace("Cortex-","ARMC")
cpu = cpu.replace("+","P")
cpu = cpu.replace("F","_FP")
return cpu
class CMSIS(Exporter):
NAME = 'cmsis'
TOOLCHAIN = 'ARM'
TARGETS = [target for target, obj in TARGET_MAP.iteritems()
if "ARM" in obj.supported_toolchains]
def make_key(self, src):
"""turn a source file into its group name"""
key = src.name.split(sep)[0]
if key == ".":
key = os.path.basename(os.path.realpath(self.export_dir))
return key
def group_project_files(self, sources, root_element):
"""Recursively group the source files by their encompassing directory"""
data = sorted(sources, key=self.make_key)
for group, files in groupby(data, self.make_key):
new_srcs = []
for f in list(files):
spl = f.name.split(sep)
if len(spl)==2:
file_element = Element('file',
attrib={
'category':f.type,
'name': f.loc})
root_element.append(file_element)
else:
f.name = os.path.join(*spl[1:])
new_srcs.append(f)
if new_srcs:
group_element = Element('group',attrib={'name':group})
root_element.append(self.group_project_files(new_srcs,
group_element))
return root_element
def generate(self):
srcs = self.resources.headers + self.resources.s_sources + \
self.resources.c_sources + self.resources.cpp_sources + \
self.resources.objects + self.resources.libraries + \
[self.resources.linker_script]
srcs = [fileCMSIS(src, src) for src in srcs if src]
ctx = {
'name': self.project_name,
'project_files': tostring(self.group_project_files(srcs, Element('files'))),
'device': DeviceCMSIS(self.target),
'date': ''
}
# TODO: find how to keep prettyxml from adding xml version to this blob
#dom = parseString(ctx['project_files'])
#ctx['project_files'] = dom.toprettyxml(indent="\t")
self.gen_file('cmsis/cpdsc.tmpl', ctx, 'project.cpdsc')
| apache-2.0 |
mcdope/powerline-shell | segments/git.py | 1 | 2501 | import re
import subprocess
def get_git_status():
has_pending_commits = True
has_untracked_files = False
has_changed_files = False
origin_position = ""
output = subprocess.Popen(['git', 'status', '--ignore-submodules'],
env={"LANG": "C", "HOME": os.getenv("HOME")}, stdout=subprocess.PIPE).communicate()[0]
for line in output.split('\n'):
origin_status = re.findall(
r"Your branch is (ahead|behind).*?(\d+) comm", line)
diverged_status = re.findall(r"and have (\d+) and (\d+) different commits each", line)
if origin_status:
origin_position = " %d" % int(origin_status[0][1])
if origin_status[0][0] == 'behind':
origin_position += u'\u21E3'
if origin_status[0][0] == 'ahead':
origin_position += u'\u21E1'
if diverged_status:
origin_position = " %d%c %d%c" % (int(diverged_status[0][0]), u'\u21E1', int(diverged_status[0][1]), u'\u21E3')
if line.find('nothing to commit') >= 0:
has_pending_commits = False
if line.find('Changes not staged for commit') >= 0:
has_changed_files = True
if line.find('Untracked files') >= 0:
has_untracked_files = True
return has_pending_commits, has_changed_files, has_untracked_files, origin_position
def add_git_segment():
# See http://git-blame.blogspot.com/2013/06/checking-current-branch-programatically.html
p = subprocess.Popen(['git', 'symbolic-ref', '-q', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if 'Not a git repo' in err:
return
branch = u'\ue0a0 '
if out:
branch += out[len('refs/heads/'):].rstrip()
else:
branch += '(Detached)'
has_pending_commits, has_changed_files, has_untracked_files, origin_position = get_git_status()
branch += origin_position
if has_untracked_files:
branch += u' \u271A'
bg = Color.REPO_CLEAN_BG
fg = Color.REPO_CLEAN_FG
if has_pending_commits:
bg = Color.REPO_DIRTY_BG
fg = Color.REPO_DIRTY_FG
branch += u' \u2718'
if not has_changed_files and not has_untracked_files and has_pending_commits:
bg = Color.REPO_READY_BG
fg = Color.REPO_READY_FG
else:
branch += u' \u2714'
powerline.append(' %s ' % branch, fg, bg)
try:
add_git_segment()
except OSError:
pass
except subprocess.CalledProcessError:
pass
| mit |
kodeaffe/jw2epub | docs/conf.py | 1 | 5108 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# JW2Epub documentation build configuration file, created by
# sphinx-quickstart on Sun Mar 12 09:52:19 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
path = os.path.abspath('..')
sys.path.insert(0, path)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'JW2Epub'
copyright = '2017, kodeaffe'
author = 'kodeaffe'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'JW2Epubdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'JW2Epub.tex', 'JW2Epub Documentation',
'kodeaffe', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'jw2epub', 'JW2Epub Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'JW2Epub', 'JW2Epub Documentation',
author, 'JW2Epub', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# Enable global TOC in all sidebars
html_sidebars = { '**': [
'globaltoc.html', 'localtoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'
]}
| gpl-3.0 |
tmerrick1/spack | var/spack/repos/builtin/packages/minife/package.py | 5 | 2518 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Minife(MakefilePackage):
"""Proxy Application. MiniFE is an proxy application
for unstructured implicit finite element codes.
"""
homepage = "https://mantevo.org/"
url = "https://github.com/Mantevo/miniFE/archive/v2.1.0.tar.gz"
tags = ['proxy-app', 'ecp-proxy-app']
version('2.1.0', '930a6b99c09722428a6f4d795b506a62')
variant('build', default='ref', description='Type of Parallelism',
values=('ref', 'openmp_ref', 'qthreads', 'kokkos'))
depends_on('mpi')
depends_on('qthreads', when='build=qthreads')
@property
def build_targets(self):
targets = [
'--directory={0}/src'.format(self.spec.variants['build'].value),
'CXX={0}'.format(self.spec['mpi'].mpicxx),
'CC={0}'.format(self.spec['mpi'].mpicc)
]
return targets
def edit(self, spec, prefix):
makefile = FileFilter('{0}/src/Makefile'.format(
self.spec.variants['build'].value))
makefile.filter('-fopenmp', self.compiler.openmp_flag, string=True)
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('{0}/src/miniFE.x'.format(self.spec.variants['build'].value),
prefix.bin)
| lgpl-2.1 |
shashank971/edx-platform | lms/djangoapps/verify_student/migrations/0008_auto__del_field_verificationcheckpoint_checkpoint_name__add_field_veri.py | 84 | 11766 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'VerificationCheckpoint', fields ['course_id', 'checkpoint_name']
db.delete_unique('verify_student_verificationcheckpoint', ['course_id', 'checkpoint_name'])
# Deleting field 'VerificationCheckpoint.checkpoint_name'
db.delete_column('verify_student_verificationcheckpoint', 'checkpoint_name')
# Adding field 'VerificationCheckpoint.checkpoint_location'
db.add_column('verify_student_verificationcheckpoint', 'checkpoint_location',
self.gf('django.db.models.fields.CharField')(default='', max_length=255),
keep_default=False)
# Adding unique constraint on 'VerificationCheckpoint', fields ['course_id', 'checkpoint_location']
db.create_unique('verify_student_verificationcheckpoint', ['course_id', 'checkpoint_location'])
# Deleting field 'VerificationStatus.location_id'
db.delete_column('verify_student_verificationstatus', 'location_id')
def backwards(self, orm):
# Removing unique constraint on 'VerificationCheckpoint', fields ['course_id', 'checkpoint_location']
db.delete_unique('verify_student_verificationcheckpoint', ['course_id', 'checkpoint_location'])
# User chose to not deal with backwards NULL issues for 'VerificationCheckpoint.checkpoint_name'
raise RuntimeError("Cannot reverse this migration. 'VerificationCheckpoint.checkpoint_name' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration # Adding field 'VerificationCheckpoint.checkpoint_name'
db.add_column('verify_student_verificationcheckpoint', 'checkpoint_name',
self.gf('django.db.models.fields.CharField')(max_length=32),
keep_default=False)
# Deleting field 'VerificationCheckpoint.checkpoint_location'
db.delete_column('verify_student_verificationcheckpoint', 'checkpoint_location')
# Adding unique constraint on 'VerificationCheckpoint', fields ['course_id', 'checkpoint_name']
db.create_unique('verify_student_verificationcheckpoint', ['course_id', 'checkpoint_name'])
# Adding field 'VerificationStatus.location_id'
db.add_column('verify_student_verificationstatus', 'location_id',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'reverification.midcoursereverificationwindow': {
'Meta': {'object_name': 'MidcourseReverificationWindow'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'end_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
'verify_student.incoursereverificationconfiguration': {
'Meta': {'object_name': 'InCourseReverificationConfiguration'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'verify_student.skippedreverification': {
'Meta': {'unique_together': "(('user', 'course_id'),)", 'object_name': 'SkippedReverification'},
'checkpoint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'skipped_checkpoint'", 'to': "orm['verify_student.VerificationCheckpoint']"}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'verify_student.softwaresecurephotoverification': {
'Meta': {'ordering': "['-created_at']", 'object_name': 'SoftwareSecurePhotoVerification'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'display': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'error_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'error_msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'face_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_key': ('django.db.models.fields.TextField', [], {'max_length': '1024'}),
'receipt_id': ('django.db.models.fields.CharField', [], {'default': "'4ae40fdd-c39a-4a23-a593-41beec90013b'", 'max_length': '255', 'db_index': 'True'}),
'reviewing_service': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'reviewing_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'photo_verifications_reviewed'", 'null': 'True', 'to': "orm['auth.User']"}),
'status': ('model_utils.fields.StatusField', [], {'default': "'created'", 'max_length': '100', u'no_check_for_status': 'True'}),
'status_changed': ('model_utils.fields.MonitorField', [], {'default': 'datetime.datetime.now', u'monitor': "u'status'"}),
'submitted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'window': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['reverification.MidcourseReverificationWindow']", 'null': 'True'})
},
'verify_student.verificationcheckpoint': {
'Meta': {'unique_together': "(('course_id', 'checkpoint_location'),)", 'object_name': 'VerificationCheckpoint'},
'checkpoint_location': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'photo_verification': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['verify_student.SoftwareSecurePhotoVerification']", 'symmetrical': 'False'})
},
'verify_student.verificationstatus': {
'Meta': {'object_name': 'VerificationStatus'},
'checkpoint': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'checkpoint_status'", 'to': "orm['verify_student.VerificationCheckpoint']"}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'response': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['verify_student'] | agpl-3.0 |
bitmingw/FindYourSister | sloth/sloth/core/labeltool.py | 3 | 15861 | """
This is the core labeltool module.
"""
import os
import sys
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from sloth.annotations.model import *
from sloth.annotations.container import AnnotationContainerFactory, AnnotationContainer
from sloth.conf import config
from sloth.core.cli import LaxOptionParser, BaseCommand
from sloth.core.utils import import_callable
from sloth import VERSION
from sloth.core.commands import get_commands
from sloth.gui import MainWindow
import logging
LOG = logging.getLogger(__name__)
try:
import okapy.videoio as okv
except ImportError:
pass
class LabelTool(QObject):
"""
This is the main label tool object. It stores the state of the tool, i.e.
the current annotations, the containers responsible for loading and saving
etc.
It is also responsible for parsing command line options, call respective
commands or start the gui.
"""
usage = "\n" + \
" %prog [options] [filename]\n\n" + \
" %prog subcommand [options] [args]\n"
help_text = "Sloth can be started in two different ways. If the first argument\n" + \
"is any of the following subcommands, this command is executed. Otherwise the\n" + \
"sloth GUI is started and the optionally given label file is loaded.\n" + \
"\n" + \
"Type '%s help <subcommand>' for help on a specific subcommand.\n\n"
# Signals
statusMessage = pyqtSignal(str)
annotationsLoaded = pyqtSignal()
pluginLoaded = pyqtSignal(QAction)
# This still emits a QModelIndex, because Qt cannot handle emiting
# a derived class instead of a base class, i.e. ImageFileModelItem
# instead of ModelItem
currentImageChanged = pyqtSignal()
# TODO clean up --> prefix all members with _
def __init__(self, parent=None):
"""
Constructor. Does nothing except resetting everything.
Initialize the labeltool with either::
execute_from_commandline()
or::
init_from_config()
"""
QObject.__init__(self, parent)
self._container_factory = None
self._container = AnnotationContainer()
self._current_image = None
self._model = AnnotationModel([])
self._mainwindow = None
def main_help_text(self):
"""
Returns the labeltool's main help text, as a string.
Includes a list of all available subcommands.
"""
usage = self.help_text % self.prog_name
usage += 'Available subcommands:\n'
commands = list(get_commands().keys())
commands.sort()
for cmd in commands:
usage += ' %s\n' % cmd
return usage
def execute_from_commandline(self, argv=None):
"""
TODO
"""
self.argv = argv or sys.argv[:]
self.prog_name = os.path.basename(argv[0])
# Preprocess options to extract --settings and --pythonpath.
# These options could affect the commands that are available, so they
# must be processed early.
parser = LaxOptionParser(usage=self.usage,
version=VERSION,
option_list=BaseCommand.option_list)
try:
options, args = parser.parse_args(self.argv)
except:
pass # Ignore any option errors at this point.
# Initialize logging
loglevel = (logging.CRITICAL, logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG)[int(options.verbosity)]
logging.basicConfig(level=loglevel,
format='%(asctime)s %(levelname)-8s %(name)-30s %(message)s') #, datefmt='%H:%M:%S.%m')
# Disable PyQt log messages
logging.getLogger("PyQt4").setLevel(logging.WARNING)
# Handle options common for all commands
# and initialize the labeltool object from
# the configuration (default config if not specified)
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
self.init_from_config(options.config)
# check for commands
try:
subcommand = args[1]
except IndexError:
subcommand = None
# handle commands and command line arguments
if subcommand == 'help':
if len(args) > 2:
self.fetch_command(args[2]).print_help(self.prog_name, args[2])
sys.exit(0)
else:
sys.stdout.write(self.main_help_text() + '\n')
parser.print_lax_help()
sys.exit(1)
elif self.argv[1:] == ['--version']:
# LaxOptionParser already takes care of printing the version.
sys.exit(0)
elif self.argv[1:] in (['--help'], ['-h']):
sys.stdout.write(self.main_help_text() + '\n')
parser.print_lax_help()
sys.exit(0)
elif subcommand in get_commands():
self.fetch_command(subcommand).run_from_argv(self.argv)
sys.exit(0)
else:
# Setup GUI
self._mainwindow = MainWindow(self)
self._mainwindow.show()
# Load plugins
self.loadPlugins(config.PLUGINS)
# check if args contain a labelfile filename to load
if len(args) > 1:
try:
self.loadAnnotations(args[1], handleErrors=False)
# goto to first image
self.gotoNext()
except Exception as e:
LOG.fatal("Error loading annotations: %s" % e)
if (int(options.verbosity)) > 1:
raise
else:
sys.exit(1)
else:
self.clearAnnotations()
def fetch_command(self, subcommand):
"""
Tries to fetch the given subcommand, printing a message with the
appropriate command called from the command line if it can't be found.
"""
try:
app_name = get_commands()[subcommand]
except KeyError:
sys.stderr.write("Unknown command: %r\nType '%s help' for usage.\n" %
(subcommand, self.prog_name))
sys.exit(1)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
# TODO implement load_command_class
klass = load_command_class(app_name, subcommand)
# set labeltool reference
klass.labeltool = self
return klass
def init_from_config(self, config_module_path=""):
"""
Initializes the labeltool from the given configuration
at ``config_module_path``. If empty, the default configuration
is used.
"""
# Load config
if config_module_path:
config.update(config_module_path)
# Instatiate container factory
self._container_factory = AnnotationContainerFactory(config.CONTAINERS)
def loadPlugins(self, plugins):
self._plugins = []
for plugin in plugins:
if type(plugin) == str:
plugin = import_callable(plugin)
p = plugin(self)
self._plugins.append(p)
action = p.action()
self.pluginLoaded.emit(action)
###
### Annoation file handling
###___________________________________________________________________________________________
def loadAnnotations(self, fname, handleErrors=True):
fname = str(fname) # convert from QString
try:
self._container = self._container_factory.create(fname)
self._model = AnnotationModel(self._container.load(fname))
msg = "Successfully loaded %s (%d files, %d annotations)" % \
(fname, self._model.root().numFiles(), self._model.root().numAnnotations())
except Exception as e:
if handleErrors:
msg = "Error: Loading failed (%s)" % str(e)
else:
raise
self.statusMessage.emit(msg)
self.annotationsLoaded.emit()
def annotations(self):
if self._model is None:
return None
return self._model.root().getAnnotations()
def saveAnnotations(self, fname):
success = False
try:
# create new container if the filename is different
if fname != self._container.filename():
self._container = self._container_factory.create(fname)
# Get annotations dict
ann = self._model.root().getAnnotations()
self._container.save(ann, fname)
#self._model.writeback() # write back changes that are cached in the model itself, e.g. mask updates
msg = "Successfully saved %s (%d files, %d annotations)" % \
(fname, self._model.root().numFiles(), self._model.root().numAnnotations())
success = True
self._model.setDirty(False)
except Exception as e:
msg = "Error: Saving failed (%s)" % str(e)
self.statusMessage.emit(msg)
return success
def clearAnnotations(self):
self._model = AnnotationModel([])
#self._model.setBasedir("")
self.statusMessage.emit('')
self.annotationsLoaded.emit()
def getCurrentFilename(self):
return self._container.filename()
###########################################################################
# Model stuff
###########################################################################
def model(self):
return self._model
def gotoIndex(self, idx):
if self._model is None:
return
current = self._current_image
if current is None:
current = next(self._model.iterator(ImageModelItem))
next_image = current.getSibling(idx)
if next_image is not None:
self.setCurrentImage(next_image)
def gotoNext(self, step=1):
if self._model is not None:
if self._current_image is not None:
next_image = self._current_image.getNextSibling(step)
else:
next_image = next(self._model.iterator(ImageModelItem))
if next_image is not None:
next_image = next_image.getNextSibling(step - 1)
if next_image is not None:
self.setCurrentImage(next_image)
def gotoPrevious(self, step=1):
if self._model is not None and self._current_image is not None:
prev_image = self._current_image.getPreviousSibling(step)
if prev_image is not None:
self.setCurrentImage(prev_image)
def updateModified(self):
"""update all GUI elements which depend on the state of the model,
e.g. whether it has been modified since the last save"""
#self.ui.action_Add_Image.setEnabled(self._model is not None)
# TODO also disable/enable other items
#self.ui.actionSave.setEnabled(self.annotations.dirty())
#self.setWindowModified(self.annotations.dirty())
pass
def currentImage(self):
return self._current_image
def setCurrentImage(self, image):
if isinstance(image, QModelIndex):
image = self._model.itemFromIndex(image)
if isinstance(image, RootModelItem):
return
while (image is not None) and (not isinstance(image, ImageModelItem)):
image = image.parent()
if image is None:
raise RuntimeError("Tried to set current image to item that has no Image or Frame as parent!")
if image != self._current_image:
self._current_image = image
self.currentImageChanged.emit()
def getImage(self, item):
if item['class'] == 'frame':
video = item.parent()
return self._container.loadFrame(video['filename'], item['num'])
else:
return self._container.loadImage(item['filename'])
def getAnnotationFilePatterns(self):
return self._container_factory.patterns()
def addImageFile(self, fname):
fileitem = {
'filename': fname,
'class': 'image',
'annotations': [],
}
return self._model._root.appendFileItem(fileitem)
def addVideoFile(self, fname):
fileitem = {
'filename': fname,
'class': 'video',
'frames': [],
}
# FIXME: OKAPI should provide a method to get all timestamps at once
# FIXME: Some dialog should be displayed, telling the user that the
# video is being loaded/indexed and that this might take a while
LOG.info("Importing frames from %s. This may take a while..." % fname)
video = okv.createVideoSourceFromString(fname)
video = okv.toRandomAccessVideoSource(video)
# try to convert to iseq, getting all timestamps will be significantly faster
iseq = okv.toImageSeqReader(video)
if iseq is not None:
timestamps = iseq.getTimestamps()
LOG.debug("Adding %d frames" % len(timestamps))
fileitem['frames'] = [{'annotations': [], 'num': i,
'timestamp': ts, 'class': 'frame'}
for i, ts in enumerate(timestamps)]
else:
i = 0
while video.getNextFrame():
LOG.debug("Adding frame %d" % i)
ts = video.getTimestamp()
frame = {'annotations': [],
'num': i,
'timestamp': ts,
'class': 'frame'
}
fileitem['frames'].append(frame)
i += 1
self._model._root.appendFileItem(fileitem)
###
### GUI functions
###___________________________________________________________________________________________
def mainWindow(self):
return self._mainwindow
###
### PropertyEditor functions
###___________________________________________________________________________________________
def propertyeditor(self):
if self._mainwindow is None:
return None
else:
return self._mainwindow.property_editor
###
### Scene functions
###___________________________________________________________________________________________
def scene(self):
if self._mainwindow is None:
return None
else:
return self._mainwindow.scene
def view(self):
if self._mainwindow is None:
return None
else:
return self._mainwindow.view
def selectNextAnnotation(self):
if self._mainwindow is not None:
return self._mainwindow.scene.selectNextItem()
def selectPreviousAnnotation(self):
if self._mainwindow is not None:
return self._mainwindow.scene.selectNextItem(reverse=True)
def selectAllAnnotations(self):
if self._mainwindow is not None:
return self._mainwindow.scene.selectAllItems()
def deleteSelectedAnnotations(self):
if self._mainwindow is not None:
self._mainwindow.scene.deleteSelectedItems()
def exitInsertMode(self):
if self._mainwindow is not None:
return self._mainwindow.property_editor.endInsertionMode()
###
### TreeView functions
###___________________________________________________________________________________________
def treeview(self):
if self._mainwindow is None:
return None
else:
return self._mainwindow.treeview | bsd-2-clause |
bodi000/odoo | addons/account_followup/__openerp__.py | 64 | 2998 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Payment Follow-up Management',
'version': '1.0',
'category': 'Accounting & Finance',
'description': """
Module to automate letters for unpaid invoices, with multi-level recalls.
=========================================================================
You can define your multiple levels of recall through the menu:
---------------------------------------------------------------
Configuration / Follow-up / Follow-up Levels
Once it is defined, you can automatically print recalls every day through simply clicking on the menu:
------------------------------------------------------------------------------------------------------
Payment Follow-Up / Send Email and letters
It will generate a PDF / send emails / set manual actions according to the the different levels
of recall defined. You can define different policies for different companies.
Note that if you want to check the follow-up level for a given partner/account entry, you can do from in the menu:
------------------------------------------------------------------------------------------------------------------
Reporting / Accounting / **Follow-ups Analysis
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'images': ['images/follow_ups.jpeg','images/send_followups.jpeg'],
'depends': ['account_accountant', 'mail'],
'data': [
'security/account_followup_security.xml',
'security/ir.model.access.csv',
'report/account_followup_report.xml',
'account_followup_data.xml',
'account_followup_view.xml',
'account_followup_customers.xml',
'wizard/account_followup_print_view.xml',
'res_config_view.xml',
'views/report_followup.xml',
'account_followup_reports.xml'
],
'demo': ['account_followup_demo.xml'],
'test': [
'test/account_followup.yml',
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pszemus/grpc | tools/release/verify_python_release.py | 2 | 4038 | #!/usr/bin/env python3
#Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Verifies that all gRPC Python artifacts have been successfully published.
This script is intended to be run from a directory containing the artifacts
that have been uploaded and only the artifacts that have been uploaded. We use
PyPI's JSON API to verify that the proper filenames and checksums are present.
Note that PyPI may take several minutes to update its metadata. Don't have a
heart attack immediately.
This sanity check is a good first step, but ideally, we would automate the
entire release process.
"""
import argparse
import collections
import hashlib
import os
import requests
import sys
_DEFAULT_PACKAGES = [
"grpcio",
"grpcio-tools",
"grpcio-status",
"grpcio-health-checking",
"grpcio-reflection",
"grpcio-channelz",
"grpcio-testing",
]
Artifact = collections.namedtuple("Artifact", ("filename", "checksum"))
def _get_md5_checksum(filename):
"""Calculate the md5sum for a file."""
hash_md5 = hashlib.md5()
with open(filename, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def _get_local_artifacts():
"""Get a set of artifacts representing all files in the cwd."""
return set(
Artifact(f, _get_md5_checksum(f)) for f in os.listdir(os.getcwd()))
def _get_remote_artifacts_for_package(package, version):
"""Get a list of artifacts based on PyPi's json metadata.
Note that this data will not updated immediately after upload. In my
experience, it has taken a minute on average to be fresh.
"""
artifacts = set()
payload = requests.get("https://pypi.org/pypi/{}/{}/json".format(
package, version)).json()
for download_info in payload['releases'][version]:
artifacts.add(
Artifact(download_info['filename'], download_info['md5_digest']))
return artifacts
def _get_remote_artifacts_for_packages(packages, version):
artifacts = set()
for package in packages:
artifacts |= _get_remote_artifacts_for_package(package, version)
return artifacts
def _verify_release(version, packages):
"""Compare the local artifacts to the packages uploaded to PyPI."""
local_artifacts = _get_local_artifacts()
remote_artifacts = _get_remote_artifacts_for_packages(packages, version)
if local_artifacts != remote_artifacts:
local_but_not_remote = local_artifacts - remote_artifacts
remote_but_not_local = remote_artifacts - local_artifacts
if local_but_not_remote:
print("The following artifacts exist locally but not remotely.")
for artifact in local_but_not_remote:
print(artifact)
if remote_but_not_local:
print("The following artifacts exist remotely but not locally.")
for artifact in remote_but_not_local:
print(artifact)
sys.exit(1)
print("Release verified successfully.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
"Verify a release. Run this from a directory containing only the"
"artifacts to be uploaded. Note that PyPI may take several minutes"
"after the upload to reflect the proper metadata.")
parser.add_argument("version")
parser.add_argument(
"packages", nargs='*', type=str, default=_DEFAULT_PACKAGES)
args = parser.parse_args()
_verify_release(args.version, args.packages)
| apache-2.0 |
abraxa/libsigrokdecode | decoders/tca6408a/pd.py | 13 | 4621 | ##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2012 Uwe Hermann <uwe@hermann-uwe.de>
## Copyright (C) 2013 Matt Ranostay <mranostay@gmail.com>
## Copyright (C) 2014 alberink <alberink@stampfini.org>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
import sigrokdecode as srd
class Decoder(srd.Decoder):
api_version = 2
id = 'tca6408a'
name = 'TI TCA6408A'
longname = 'Texas Instruments TCA6408A'
desc = 'Texas Instruments TCA6408A 8-bit I²C I/O expander.'
license = 'gplv2+'
inputs = ['i2c']
outputs = ['tca6408a']
annotations = (
('register', 'Register type'),
('value', 'Register value'),
('warnings', 'Warning messages'),
)
annotation_rows = (
('regs', 'Registers', (0, 1)),
('warnings', 'Warnings', (2,)),
)
def __init__(self, **kwargs):
self.state = 'IDLE'
self.chip = -1
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
def putx(self, data):
self.put(self.ss, self.es, self.out_ann, data)
def handle_reg_0x00(self, b):
self.putx([1, ['State of inputs: %02X' % b]])
def handle_reg_0x01(self, b):
self.putx([1, ['Outputs set: %02X' % b ]])
def handle_reg_0x02(self, b):
self.putx([1, ['Polarity inverted: %02X' % b]])
def handle_reg_0x03(self, b):
self.putx([1, ['Configuration: %02X' % b]])
def handle_write_reg(self, b):
if b == 0:
self.putx([0, ['Input port', 'In', 'I']])
elif b == 1:
self.putx([0, ['Output port', 'Out', 'O']])
elif b == 2:
self.putx([0, ['Polarity inversion register', 'Pol', 'P']])
elif b == 3:
self.putx([0, ['Configuration register', 'Conf', 'C']])
def check_correct_chip(self, addr):
if addr not in (0x20, 0x21):
self.putx([2, ['Warning: I²C slave 0x%02X not a TCA6408A '
'compatible chip.' % addr]])
self.state = 'IDLE'
def decode(self, ss, es, data):
cmd, databyte = data
# Store the start/end samples of this I²C packet.
self.ss, self.es = ss, es
# State machine.
if self.state == 'IDLE':
# Wait for an I²C START condition.
if cmd != 'START':
return
self.state = 'GET SLAVE ADDR'
elif self.state == 'GET SLAVE ADDR':
self.chip = databyte
self.state = 'GET REG ADDR'
elif self.state == 'GET REG ADDR':
# Wait for a data write (master selects the slave register).
if cmd in ('ADDRESS READ', 'ADDRESS WRITE'):
self.check_correct_chip(databyte)
if cmd != 'DATA WRITE':
return
self.reg = databyte
self.handle_write_reg(self.reg)
self.state = 'WRITE IO REGS'
elif self.state == 'WRITE IO REGS':
# If we see a Repeated Start here, the master wants to read.
if cmd == 'START REPEAT':
self.state = 'READ IO REGS'
return
# Otherwise: Get data bytes until a STOP condition occurs.
if cmd == 'DATA WRITE':
handle_reg = getattr(self, 'handle_reg_0x%02x' % self.reg)
handle_reg(databyte)
elif cmd == 'STOP':
self.state = 'IDLE'
self.chip = -1
elif self.state == 'READ IO REGS':
# Wait for an address read operation.
if cmd == 'ADDRESS READ':
self.state = 'READ IO REGS2'
self.chip = databyte
return
elif self.state == 'READ IO REGS2':
if cmd == 'DATA READ':
handle_reg = getattr(self, 'handle_reg_0x%02x' % self.reg)
handle_reg(databyte)
elif cmd == 'STOP':
self.state = 'IDLE'
| gpl-3.0 |
phyng/RSScrapy | rssweb/settings.py | 1 | 2197 | """
Django settings for rssweb project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h@y+_gcrhom&@1x=_q@^l(f+)*v)x24tu$yb22b03pl0to%&ws'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'rssweb.urls'
WSGI_APPLICATION = 'rssweb.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'zh-cn'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_CONTEXT_PROCESSORS = {
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
}
| mit |
mikeckennedy/suds_python | suds/umx/__init__.py | 4 | 1795 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides modules containing classes to support
unmarshalling (XML).
"""
from suds.sudsobject import Object
class Content(Object):
"""
@ivar node: The content source node.
@type node: L{sax.element.Element}
@ivar data: The (optional) content data.
@type data: L{Object}
@ivar text: The (optional) content (xml) text.
@type text: basestring
"""
extensions = []
def __init__(self, node, **kwargs):
Object.__init__(self)
self.node = node
self.data = None
self.text = None
for k,v in list(kwargs.items()):
setattr(self, k, v)
def __getattr__(self, name):
if name not in self.__dict__:
if name in self.extensions:
v = None
setattr(self, name, v)
else:
raise AttributeError('Content has no attribute %s' % name)
else:
v = self.__dict__[name]
return v | lgpl-3.0 |
privacyidea/privacyidea | privacyidea/lib/tokens/radiustoken.py | 1 | 22850 | # -*- coding: utf-8 -*-
#
# 2019-08-15 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Allow RADIUS challenge / response
# Credits to @droobah, who provided the first pull request
# https://github.com/privacyidea/privacyidea/pull/1389
# 2018-01-21 Cornelius Kölbel <cornelius.koelbel@netknights.it>
# Add tokenkind
# 2016-02-22 Cornelius Kölbel <cornelius@privacyidea.org>
# Add the RADIUS identifier, which points to the system wide list
# of RADIUS servers.
# 2015-10-09 Cornelius Kölbel <cornelius@privacyidea.org>
# Add the RADIUS-System-Config, so that not each
# RADIUS-token needs his own secret. -> change the
# secret globally
# 2015-01-29 Adapt for migration to flask
# Cornelius Kölbel <cornelius@privacyidea.org>
#
# May 08, 2014 Cornelius Kölbel
# License: AGPLv3
# contact: http://www.privacyidea.org
#
# Copyright (C) 2010 - 2014 LSE Leading Security Experts GmbH
# License: LSE
# contact: http://www.linotp.org
# http://www.lsexperts.de
# linotp@lsexperts.de
#
# This code is free software; you can redistribute it and/or
# modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
# License as published by the Free Software Foundation; either
# version 3 of the License, or any later version.
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU AFFERO GENERAL PUBLIC LICENSE for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__doc__ = """This module defines the RadiusTokenClass. The RADIUS token
forwards the authentication request to another RADIUS server.
The code is tested in tests/test_lib_tokens_radius
"""
import logging
import traceback
import binascii
from privacyidea.lib.utils import is_true, to_bytes, hexlify_and_unicode, to_unicode
from privacyidea.lib.tokens.remotetoken import RemoteTokenClass
from privacyidea.lib.tokenclass import TokenClass, TOKENKIND
from privacyidea.api.lib.utils import getParam, ParameterError
from privacyidea.lib.log import log_with
from privacyidea.lib.config import get_from_config
from privacyidea.lib.decorators import check_token_locked
from privacyidea.lib.radiusserver import get_radius
from privacyidea.models import Challenge
from privacyidea.lib.challenge import get_challenges
from privacyidea.lib.policydecorators import challenge_response_allowed
import pyrad.packet
from pyrad.client import Client, Timeout
from pyrad.dictionary import Dictionary
from pyrad.packet import AccessChallenge, AccessAccept, AccessReject
from privacyidea.lib import _
from privacyidea.lib.policy import SCOPE, ACTION, GROUP
optional = True
required = False
log = logging.getLogger(__name__)
###############################################
class RadiusTokenClass(RemoteTokenClass):
def __init__(self, db_token):
RemoteTokenClass.__init__(self, db_token)
self.set_type(u"radius")
self.mode = ['authenticate', 'challenge']
@staticmethod
def get_class_type():
return "radius"
@staticmethod
def get_class_prefix():
return "PIRA"
@staticmethod
@log_with(log)
def get_class_info(key=None, ret='all'):
"""
returns a subtree of the token definition
:param key: subsection identifier
:type key: string
:param ret: default return value, if nothing is found
:type ret: user defined
:return: subsection if key exists or user defined
:rtype: dict or string
"""
res = {'type': 'radius',
'title': 'RADIUS Token',
'description': _('RADIUS: Forward authentication request to a '
'RADIUS server.'),
'user': ['enroll'],
# This tokentype is enrollable in the UI for...
'ui_enroll': ["admin", "user"],
'policy': {
SCOPE.ENROLL: {
ACTION.MAXTOKENUSER: {
'type': 'int',
'desc': _("The user may only have this maximum number of RADIUS tokens assigned."),
'group': GROUP.TOKEN
},
ACTION.MAXACTIVETOKENUSER: {
'type': 'int',
'desc': _(
"The user may only have this maximum number of active RADIUS tokens assigned."),
'group': GROUP.TOKEN
}
}
},
}
if key:
ret = res.get(key, {})
else:
if ret == 'all':
ret = res
return ret
@log_with(log)
def update(self, param):
# New value
radius_identifier = getParam(param, "radius.identifier")
self.add_tokeninfo("radius.identifier", radius_identifier)
# old values
if not radius_identifier:
radiusServer = getParam(param, "radius.server", optional=required)
self.add_tokeninfo("radius.server", radiusServer)
radius_secret = getParam(param, "radius.secret", optional=required)
self.token.set_otpkey(hexlify_and_unicode(radius_secret))
system_settings = getParam(param, "radius.system_settings",
default=False)
self.add_tokeninfo("radius.system_settings", system_settings)
if not (radiusServer or radius_secret) and not system_settings:
raise ParameterError("Missing parameter: radius.identifier", id=905)
# if another OTP length would be specified in /admin/init this would
# be overwritten by the parent class, which is ok.
self.set_otplen(6)
TokenClass.update(self, param)
val = getParam(param, "radius.local_checkpin", optional) or 0
self.add_tokeninfo("radius.local_checkpin", val)
val = getParam(param, "radius.user", required)
self.add_tokeninfo("radius.user", val)
self.add_tokeninfo("tokenkind", TOKENKIND.VIRTUAL)
@log_with(log)
@challenge_response_allowed
def is_challenge_request(self, passw, user=None, options=None):
"""
This method checks, if this is a request, that triggers a challenge.
It depends on the way, the pin is checked - either locally or remotely.
In addition, the RADIUS token has to be configured to allow challenge response.
communication with RADIUS server: yes
modification of options: The communication with the RADIUS server can
change the options, radius_state, radius_result, radius_message
:param passw: password, which might be pin or pin+otp
:type passw: string
:param user: The user from the authentication request
:type user: User object
:param options: dictionary of additional request parameters
:type options: dict
:return: true or false
"""
if options is None:
options = {}
# should we check the pin locally?
if self.check_pin_local:
# With a local PIN the challenge response is always a privacyIDEA challenge response!
res = self.check_pin(passw, user=user, options=options)
return res
else:
state = options.get('radius_state')
# The pin is checked remotely
res = options.get('radius_result')
if res is None:
res = self._check_radius(passw, options=options, radius_state=state)
return res == AccessChallenge
@log_with(log)
def create_challenge(self, transactionid=None, options=None):
"""
create a challenge, which is submitted to the user
This method is called after ``is_challenge_request`` has verified,
that a challenge needs to be created.
communication with RADIUS server: no
modification of options: no
:param transactionid: the id of this challenge
:param options: the request context parameters / data
:return: tuple of (bool, message and data)
bool, if submit was successful
message is submitted to the user
data is preserved in the challenge
attributes - additional attributes, which are displayed in the
output
"""
if options is None:
options = {}
message = options.get('radius_message') or "Enter your RADIUS tokencode:"
state = hexlify_and_unicode(options.get('radius_state') or b'')
attributes = {'state': transactionid}
validity = int(get_from_config('DefaultChallengeValidityTime', 120))
db_challenge = Challenge(self.token.serial,
transaction_id=transactionid,
data=state,
challenge=message,
validitytime=validity)
db_challenge.save()
self.challenge_janitor()
return True, message, db_challenge.transaction_id, attributes
@log_with(log)
def is_challenge_response(self, passw, user=None, options=None):
"""
This method checks, if this is a request, that is the response to
a previously sent challenge. But we do not query the RADIUS server.
This is the first method in the loop ``check_token_list``.
communication with RADIUS server: no
modification of options: The "radius_result" key is set to None
:param passw: password, which might be pin or pin+otp
:type passw: string
:param user: the requesting user
:type user: User object
:param options: dictionary of additional request parameters
:type options: dict
:return: true or false
:rtype: bool
"""
if options is None:
options = {}
challenge_response = False
# clear the radius_result since this is the first function called in the chain
# this value will be utilized to ensure we do not _check_radius more than once in the loop
options.update({'radius_result': None})
# fetch the transaction_id
transaction_id = options.get('transaction_id')
if transaction_id is None:
transaction_id = options.get('state')
if transaction_id:
# get the challenges for this transaction ID
challengeobject_list = get_challenges(serial=self.token.serial,
transaction_id=transaction_id)
for challengeobject in challengeobject_list:
if challengeobject.is_valid():
challenge_response = True
return challenge_response
@log_with(log)
@check_token_locked
def check_challenge_response(self, user=None, passw=None, options=None):
"""
This method verifies if there is a matching question for the given
passw and also verifies if the answer is correct.
It then returns the the otp_counter = 1
:param user: the requesting user
:type user: User object
:param passw: the password - in fact it is the answer to the question
:type passw: string
:param options: additional arguments from the request, which could
be token specific. Usually "transaction_id"
:type options: dict
:return: return otp_counter. If -1, challenge does not match
:rtype: int
"""
if options is None:
options = {}
otp_counter = -1
# fetch the transaction_id
transaction_id = options.get('transaction_id') or options.get('state')
# get the challenges for this transaction ID
if transaction_id is not None:
challengeobject_list = get_challenges(serial=self.token.serial,
transaction_id=transaction_id)
for challengeobject in challengeobject_list:
if challengeobject.is_valid():
state = binascii.unhexlify(challengeobject.data)
# challenge is still valid
radius_response = self._check_radius(passw, options=options, radius_state=state)
if radius_response == AccessAccept:
# We found the matching challenge,
# and the RADIUS server returned AccessAccept
challengeobject.delete()
otp_counter = 1
break
elif radius_response == AccessChallenge:
# The response was valid but triggered a new challenge
# Note: The second challenge currently does not work correctly
# see https://github.com/privacyidea/privacyidea/issues/1792
challengeobject.delete()
_, _, transaction_id, _ = self.create_challenge(options=options)
options["transaction_id"] = transaction_id
otp_counter = -1
break
else:
otp_counter = -1
# increase the received_count
challengeobject.set_otp_status()
self.challenge_janitor()
return otp_counter
@property
def check_pin_local(self):
"""
lookup if pin should be checked locally or on radius host
:return: bool
"""
local_check = is_true(self.get_tokeninfo("radius.local_checkpin"))
log.debug("local checking pin? {0!r}".format(local_check))
return local_check
@log_with(log)
def split_pin_pass(self, passw, user=None, options=None):
"""
Split the PIN and the OTP value.
Only if it is locally checked and not remotely.
"""
res = 0
pin = ""
otpval = passw
if self.check_pin_local:
(res, pin, otpval) = TokenClass.split_pin_pass(self, passw)
return res, pin, otpval
@log_with(log)
@check_token_locked
def authenticate(self, passw, user=None, options=None):
"""
do the authentication on base of password / otp and user and
options, the request parameters.
This is only called after it is verified, that the upper level is no challenge-request
or challenge-response
The "options" are read-only in this method. They are not modified here. authenticate
is the last method in the loop ``check_token_list``.
communication with RADIUS server: yes, if is no previous "radius_result"
If there is a "radius" result in the options, we do not query the radius server
modification of options: options can be modified if we query the radius server.
However, this is not important since authenticate is the last call.
:param passw: the password / otp
:param user: the requesting user
:param options: the additional request parameters
:return: tuple of (success, otp_count - 0 or -1, reply)
"""
options = options or {}
res = False
otp_counter = -1
reply = None
otpval = passw
# should we check the pin locally?
if self.check_pin_local:
(_res, pin, otpval) = self.split_pin_pass(passw, user,
options=options)
if not self.check_pin(pin, user=user, options=options):
return False, -1, {'message': "Wrong PIN"}
# attempt to retrieve saved state/result
state = options.get('radius_state')
result = options.get('radius_result')
if result is None:
radius_response = self._check_radius(otpval, options=options, radius_state=state)
else:
radius_response = result
if radius_response == AccessAccept:
res = True
otp_counter = 1
return res, otp_counter, reply
@log_with(log)
@check_token_locked
def check_otp(self, otpval, counter=None, window=None, options=None):
"""
Originally check_otp returns an OTP counter. I.e. in a failed attempt
we return -1. In case of success we return 1
:param otpval:
:param counter:
:param window:
:param options:
:return:
"""
res = self._check_radius(otpval, options=options)
if res == AccessAccept:
return 1
else:
return -1
@log_with(log)
@check_token_locked
def _check_radius(self, otpval, options=None, radius_state=None):
"""
run the RADIUS request against the RADIUS server
:param otpval: the OTP value
:param options: additional token specific options
:type options: dict
:return: counter of the matching OTP value.
:rtype: AccessAccept, AccessReject, AccessChallenge
"""
result = AccessReject
radius_message = None
if options is None:
options = {}
radius_dictionary = None
radius_identifier = self.get_tokeninfo("radius.identifier")
radius_user = self.get_tokeninfo("radius.user")
system_radius_settings = self.get_tokeninfo("radius.system_settings")
radius_timeout = 5
radius_retries = 3
if radius_identifier:
# New configuration
radius_server_object = get_radius(radius_identifier)
radius_server = radius_server_object.config.server
radius_port = radius_server_object.config.port
radius_server = u"{0!s}:{1!s}".format(radius_server, radius_port)
radius_secret = radius_server_object.get_secret()
radius_dictionary = radius_server_object.config.dictionary
radius_timeout = int(radius_server_object.config.timeout or 10)
radius_retries = int(radius_server_object.config.retries or 1)
elif system_radius_settings:
# system configuration
radius_server = get_from_config("radius.server")
radius_secret = get_from_config("radius.secret")
else:
# individual token settings
radius_server = self.get_tokeninfo("radius.server")
# Read the secret
secret = self.token.get_otpkey()
radius_secret = binascii.unhexlify(secret.getKey())
# here we also need to check for radius.user
log.debug(u"checking OTP len:{0!s} on radius server: "
u"{1!s}, user: {2!r}".format(len(otpval), radius_server,
radius_user))
try:
# pyrad does not allow to set timeout and retries.
# it defaults to retries=3, timeout=5
# TODO: At the moment we support only one radius server.
# No round robin.
server = radius_server.split(':')
r_server = server[0]
r_authport = 1812
if len(server) >= 2:
r_authport = int(server[1])
nas_identifier = get_from_config("radius.nas_identifier",
"privacyIDEA")
if not radius_dictionary:
radius_dictionary = get_from_config("radius.dictfile",
"/etc/privacyidea/dictionary")
log.debug(u"NAS Identifier: %r, "
u"Dictionary: %r" % (nas_identifier, radius_dictionary))
log.debug(u"constructing client object "
u"with server: %r, port: %r, secret: %r" %
(r_server, r_authport, to_unicode(radius_secret)))
srv = Client(server=r_server,
authport=r_authport,
secret=to_bytes(radius_secret),
dict=Dictionary(radius_dictionary))
# Set retries and timeout of the client
srv.timeout = radius_timeout
srv.retries = radius_retries
req = srv.CreateAuthPacket(code=pyrad.packet.AccessRequest,
User_Name=radius_user.encode('utf-8'),
NAS_Identifier=nas_identifier.encode('ascii'))
req["User-Password"] = req.PwCrypt(otpval)
if radius_state:
req["State"] = radius_state
log.info(u"Sending saved challenge to radius server: {0!r} ".format(radius_state))
try:
response = srv.SendPacket(req)
except Timeout:
log.warning(u"The remote RADIUS server {0!s} timeout out for user {1!s}.".format(
r_server, radius_user))
return AccessReject
# handle the RADIUS challenge
if response.code == pyrad.packet.AccessChallenge:
# now we map this to a privacyidea challenge
if "State" in response:
radius_state = response["State"][0]
if "Reply-Message" in response:
radius_message = response["Reply-Message"][0]
result = AccessChallenge
elif response.code == pyrad.packet.AccessAccept:
radius_state = '<SUCCESS>'
radius_message = 'RADIUS authentication succeeded'
log.info(u"RADIUS server {0!s} granted "
u"access to user {1!s}.".format(r_server, radius_user))
result = AccessAccept
else:
radius_state = '<REJECTED>'
radius_message = 'RADIUS authentication failed'
log.debug(u'radius response code {0!s}'.format(response.code))
log.info(u"Radiusserver {0!s} "
u"rejected access to user {1!s}.".format(r_server, radius_user))
result = AccessReject
except Exception as ex: # pragma: no cover
log.error("Error contacting radius Server: {0!r}".format((ex)))
log.info("{0!s}".format(traceback.format_exc()))
options.update({'radius_result': result})
options.update({'radius_state': radius_state})
options.update({'radius_message': radius_message})
return result
| agpl-3.0 |
Mistobaan/tensorflow | tensorflow/python/training/queue_runner_test.py | 56 | 14970 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for QueueRunner."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import time
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import coordinator
from tensorflow.python.training import monitored_session
from tensorflow.python.training import queue_runner_impl
_MockOp = collections.namedtuple("MockOp", ["name"])
class QueueRunnerTest(test.TestCase):
def testBasic(self):
with self.test_session() as sess:
# CountUpTo will raise OUT_OF_RANGE when it reaches the count.
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
threads = qr.create_threads(sess)
self.assertEqual(sorted(t.name for t in threads),
["QueueRunnerThread-fifo_queue-CountUpTo:0"])
for t in threads:
t.start()
for t in threads:
t.join()
self.assertEqual(0, len(qr.exceptions_raised))
# The variable should be 3.
self.assertEqual(3, var.eval())
def testTwoOps(self):
with self.test_session() as sess:
# CountUpTo will raise OUT_OF_RANGE when it reaches the count.
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var0 = variables.Variable(zero64)
count_up_to_3 = var0.count_up_to(3)
var1 = variables.Variable(zero64)
count_up_to_30 = var1.count_up_to(30)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
qr = queue_runner_impl.QueueRunner(queue, [count_up_to_3, count_up_to_30])
threads = qr.create_threads(sess)
self.assertEqual(sorted(t.name for t in threads),
["QueueRunnerThread-fifo_queue-CountUpTo:0",
"QueueRunnerThread-fifo_queue-CountUpTo_1:0"])
variables.global_variables_initializer().run()
for t in threads:
t.start()
for t in threads:
t.join()
self.assertEqual(0, len(qr.exceptions_raised))
self.assertEqual(3, var0.eval())
self.assertEqual(30, var1.eval())
def testExceptionsCaptured(self):
with self.test_session() as sess:
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
qr = queue_runner_impl.QueueRunner(queue, [_MockOp("i fail"),
_MockOp("so fail")])
threads = qr.create_threads(sess)
variables.global_variables_initializer().run()
for t in threads:
t.start()
for t in threads:
t.join()
exceptions = qr.exceptions_raised
self.assertEqual(2, len(exceptions))
self.assertTrue("Operation not in the graph" in str(exceptions[0]))
self.assertTrue("Operation not in the graph" in str(exceptions[1]))
def testRealDequeueEnqueue(self):
with self.test_session() as sess:
q0 = data_flow_ops.FIFOQueue(3, dtypes.float32)
enqueue0 = q0.enqueue((10.0,))
close0 = q0.close()
q1 = data_flow_ops.FIFOQueue(30, dtypes.float32)
enqueue1 = q1.enqueue((q0.dequeue(),))
dequeue1 = q1.dequeue()
qr = queue_runner_impl.QueueRunner(q1, [enqueue1])
threads = qr.create_threads(sess)
for t in threads:
t.start()
# Enqueue 2 values, then close queue0.
enqueue0.run()
enqueue0.run()
close0.run()
# Wait for the queue runner to terminate.
for t in threads:
t.join()
# It should have terminated cleanly.
self.assertEqual(0, len(qr.exceptions_raised))
# The 2 values should be in queue1.
self.assertEqual(10.0, dequeue1.eval())
self.assertEqual(10.0, dequeue1.eval())
# And queue1 should now be closed.
with self.assertRaisesRegexp(errors_impl.OutOfRangeError, "is closed"):
dequeue1.eval()
def testRespectCoordShouldStop(self):
with self.test_session() as sess:
# CountUpTo will raise OUT_OF_RANGE when it reaches the count.
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
# As the coordinator to stop. The queue runner should
# finish immediately.
coord = coordinator.Coordinator()
coord.request_stop()
threads = qr.create_threads(sess, coord)
self.assertEqual(sorted(t.name for t in threads),
["QueueRunnerThread-fifo_queue-CountUpTo:0",
"QueueRunnerThread-fifo_queue-close_on_stop"])
for t in threads:
t.start()
coord.join()
self.assertEqual(0, len(qr.exceptions_raised))
# The variable should be 0.
self.assertEqual(0, var.eval())
def testRequestStopOnException(self):
with self.test_session() as sess:
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
qr = queue_runner_impl.QueueRunner(queue, [_MockOp("not an op")])
coord = coordinator.Coordinator()
threads = qr.create_threads(sess, coord)
for t in threads:
t.start()
# The exception should be re-raised when joining.
with self.assertRaisesRegexp(ValueError, "Operation not in the graph"):
coord.join()
def testGracePeriod(self):
with self.test_session() as sess:
# The enqueue will quickly block.
queue = data_flow_ops.FIFOQueue(2, dtypes.float32)
enqueue = queue.enqueue((10.0,))
dequeue = queue.dequeue()
qr = queue_runner_impl.QueueRunner(queue, [enqueue])
coord = coordinator.Coordinator()
qr.create_threads(sess, coord, start=True)
# Dequeue one element and then request stop.
dequeue.op.run()
time.sleep(0.02)
coord.request_stop()
# We should be able to join because the RequestStop() will cause
# the queue to be closed and the enqueue to terminate.
coord.join(stop_grace_period_secs=1.0)
def testMultipleSessions(self):
with self.test_session() as sess:
with session.Session() as other_sess:
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
coord = coordinator.Coordinator()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
# NOTE that this test does not actually start the threads.
threads = qr.create_threads(sess, coord=coord)
other_threads = qr.create_threads(other_sess, coord=coord)
self.assertEqual(len(threads), len(other_threads))
def testIgnoreMultiStarts(self):
with self.test_session() as sess:
# CountUpTo will raise OUT_OF_RANGE when it reaches the count.
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
coord = coordinator.Coordinator()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
threads = []
# NOTE that this test does not actually start the threads.
threads.extend(qr.create_threads(sess, coord=coord))
new_threads = qr.create_threads(sess, coord=coord)
self.assertEqual([], new_threads)
def testThreads(self):
with self.test_session() as sess:
# CountUpTo will raise OUT_OF_RANGE when it reaches the count.
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
variables.global_variables_initializer().run()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to,
_MockOp("bad_op")])
threads = qr.create_threads(sess, start=True)
self.assertEqual(sorted(t.name for t in threads),
["QueueRunnerThread-fifo_queue-CountUpTo:0",
"QueueRunnerThread-fifo_queue-bad_op"])
for t in threads:
t.join()
exceptions = qr.exceptions_raised
self.assertEqual(1, len(exceptions))
self.assertTrue("Operation not in the graph" in str(exceptions[0]))
threads = qr.create_threads(sess, start=True)
for t in threads:
t.join()
exceptions = qr.exceptions_raised
self.assertEqual(1, len(exceptions))
self.assertTrue("Operation not in the graph" in str(exceptions[0]))
def testName(self):
with ops.name_scope("scope"):
queue = data_flow_ops.FIFOQueue(10, dtypes.float32, name="queue")
qr = queue_runner_impl.QueueRunner(queue, [control_flow_ops.no_op()])
self.assertEqual("scope/queue", qr.name)
queue_runner_impl.add_queue_runner(qr)
self.assertEqual(
1, len(ops.get_collection(ops.GraphKeys.QUEUE_RUNNERS, "scope")))
def testStartQueueRunners(self):
# CountUpTo will raise OUT_OF_RANGE when it reaches the count.
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
init_op = variables.global_variables_initializer()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
queue_runner_impl.add_queue_runner(qr)
with self.test_session() as sess:
init_op.run()
threads = queue_runner_impl.start_queue_runners(sess)
for t in threads:
t.join()
self.assertEqual(0, len(qr.exceptions_raised))
# The variable should be 3.
self.assertEqual(3, var.eval())
def testStartQueueRunnersRaisesIfNotASession(self):
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
init_op = variables.global_variables_initializer()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
queue_runner_impl.add_queue_runner(qr)
with self.test_session():
init_op.run()
with self.assertRaisesRegexp(TypeError, "tf.Session"):
queue_runner_impl.start_queue_runners("NotASession")
def testStartQueueRunnersIgnoresMonitoredSession(self):
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
init_op = variables.global_variables_initializer()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
queue_runner_impl.add_queue_runner(qr)
with self.test_session():
init_op.run()
threads = queue_runner_impl.start_queue_runners(
monitored_session.MonitoredSession())
self.assertFalse(threads)
def testStartQueueRunnersNonDefaultGraph(self):
# CountUpTo will raise OUT_OF_RANGE when it reaches the count.
graph = ops.Graph()
with graph.as_default():
zero64 = constant_op.constant(0, dtype=dtypes.int64)
var = variables.Variable(zero64)
count_up_to = var.count_up_to(3)
queue = data_flow_ops.FIFOQueue(10, dtypes.float32)
init_op = variables.global_variables_initializer()
qr = queue_runner_impl.QueueRunner(queue, [count_up_to])
queue_runner_impl.add_queue_runner(qr)
with self.test_session(graph=graph) as sess:
init_op.run()
threads = queue_runner_impl.start_queue_runners(sess)
for t in threads:
t.join()
self.assertEqual(0, len(qr.exceptions_raised))
# The variable should be 3.
self.assertEqual(3, var.eval())
def testQueueRunnerSerializationRoundTrip(self):
graph = ops.Graph()
with graph.as_default():
queue = data_flow_ops.FIFOQueue(10, dtypes.float32, name="queue")
enqueue_op = control_flow_ops.no_op(name="enqueue")
close_op = control_flow_ops.no_op(name="close")
cancel_op = control_flow_ops.no_op(name="cancel")
qr0 = queue_runner_impl.QueueRunner(
queue, [enqueue_op],
close_op,
cancel_op,
queue_closed_exception_types=(errors_impl.OutOfRangeError,
errors_impl.CancelledError))
qr0_proto = queue_runner_impl.QueueRunner.to_proto(qr0)
qr0_recon = queue_runner_impl.QueueRunner.from_proto(qr0_proto)
self.assertEqual("queue", qr0_recon.queue.name)
self.assertEqual(1, len(qr0_recon.enqueue_ops))
self.assertEqual(enqueue_op, qr0_recon.enqueue_ops[0])
self.assertEqual(close_op, qr0_recon.close_op)
self.assertEqual(cancel_op, qr0_recon.cancel_op)
self.assertEqual(
(errors_impl.OutOfRangeError, errors_impl.CancelledError),
qr0_recon.queue_closed_exception_types)
# Assert we reconstruct an OutOfRangeError for QueueRunners
# created before QueueRunnerDef had a queue_closed_exception_types field.
del qr0_proto.queue_closed_exception_types[:]
qr0_legacy_recon = queue_runner_impl.QueueRunner.from_proto(qr0_proto)
self.assertEqual("queue", qr0_legacy_recon.queue.name)
self.assertEqual(1, len(qr0_legacy_recon.enqueue_ops))
self.assertEqual(enqueue_op, qr0_legacy_recon.enqueue_ops[0])
self.assertEqual(close_op, qr0_legacy_recon.close_op)
self.assertEqual(cancel_op, qr0_legacy_recon.cancel_op)
self.assertEqual((errors_impl.OutOfRangeError,),
qr0_legacy_recon.queue_closed_exception_types)
if __name__ == "__main__":
test.main()
| apache-2.0 |
hernad/erpnext | erpnext/manufacturing/doctype/workstation/workstation.py | 50 | 3780 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, cint, getdate, formatdate, comma_and, time_diff_in_seconds, to_timedelta
from frappe.model.document import Document
from dateutil.parser import parse
class WorkstationHolidayError(frappe.ValidationError): pass
class NotInWorkingHoursError(frappe.ValidationError): pass
class OverlapError(frappe.ValidationError): pass
class Workstation(Document):
def validate(self):
self.hour_rate = (flt(self.hour_rate_labour) + flt(self.hour_rate_electricity) +
flt(self.hour_rate_consumable) + flt(self.hour_rate_rent))
def on_update(self):
self.validate_overlap_for_operation_timings()
self.update_bom_operation()
def validate_overlap_for_operation_timings(self):
"""Check if there is no overlap in setting Workstation Operating Hours"""
for d in self.get("working_hours"):
existing = frappe.db.sql_list("""select idx from `tabWorkstation Working Hour`
where parent = %s and name != %s
and (
(start_time between %s and %s) or
(end_time between %s and %s) or
(%s between start_time and end_time))
""", (self.name, d.name, d.start_time, d.end_time, d.start_time, d.end_time, d.start_time))
if existing:
frappe.throw(_("Row #{0}: Timings conflicts with row {1}").format(d.idx, comma_and(existing)), OverlapError)
def update_bom_operation(self):
bom_list = frappe.db.sql("""select DISTINCT parent from `tabBOM Operation`
where workstation = %s""", self.name)
for bom_no in bom_list:
frappe.db.sql("""update `tabBOM Operation` set hour_rate = %s
where parent = %s and workstation = %s""",
(self.hour_rate, bom_no[0], self.name))
@frappe.whitelist()
def get_default_holiday_list():
return frappe.db.get_value("Company", frappe.defaults.get_user_default("company"), "default_holiday_list")
def check_if_within_operating_hours(workstation, operation, from_datetime, to_datetime):
if from_datetime and to_datetime:
if not cint(frappe.db.get_value("Manufacturing Settings", "None", "allow_production_on_holidays")):
check_workstation_for_holiday(workstation, from_datetime, to_datetime)
if not cint(frappe.db.get_value("Manufacturing Settings", None, "allow_overtime")):
is_within_operating_hours(workstation, operation, from_datetime, to_datetime)
def is_within_operating_hours(workstation, operation, from_datetime, to_datetime):
operation_length = time_diff_in_seconds(to_datetime, from_datetime)
workstation = frappe.get_doc("Workstation", workstation)
for working_hour in workstation.working_hours:
slot_length = (to_timedelta(working_hour.end_time or "") - to_timedelta(working_hour.start_time or "")).total_seconds()
if slot_length >= operation_length:
return
frappe.throw(_("Operation {0} longer than any available working hours in workstation {1}, break down the operation into multiple operations").format(operation, workstation.name), NotInWorkingHoursError)
def check_workstation_for_holiday(workstation, from_datetime, to_datetime):
holiday_list = frappe.db.get_value("Workstation", workstation, "holiday_list")
if holiday_list and from_datetime and to_datetime:
applicable_holidays = []
for d in frappe.db.sql("""select holiday_date from `tabHoliday` where parent = %s
and holiday_date between %s and %s """,
(holiday_list, getdate(from_datetime), getdate(to_datetime))):
applicable_holidays.append(formatdate(d[0]))
if applicable_holidays:
frappe.throw(_("Workstation is closed on the following dates as per Holiday List: {0}")
.format(holiday_list) + "\n" + "\n".join(applicable_holidays), WorkstationHolidayError)
| agpl-3.0 |
djw8605/condor | src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/Skype4Py/Languages/lv.py | 30 | 6601 | apiAttachAvailable = u'API Available'
apiAttachNotAvailable = u'Not Available'
apiAttachPendingAuthorization = u'Pending Authorization'
apiAttachRefused = u'Refused'
apiAttachSuccess = u'Success'
apiAttachUnknown = u'Unknown'
budDeletedFriend = u'Deleted From Friendlist'
budFriend = u'Friend'
budNeverBeenFriend = u'Never Been In Friendlist'
budPendingAuthorization = u'Pending Authorization'
budUnknown = u'Unknown'
cfrBlockedByRecipient = u'Call blocked by recipient'
cfrMiscError = u'Misc error'
cfrNoCommonCodec = u'No common codec found'
cfrNoProxyFound = u'No proxy found'
cfrNotAuthorizedByRecipient = u'Current user not authorized by recipient'
cfrRecipientNotFriend = u'Recipient not a friend'
cfrRemoteDeviceError = u'Problem with remote sound device'
cfrSessionTerminated = u'Session terminated'
cfrSoundIOError = u'Sound I/O error'
cfrSoundRecordingError = u'Sound recording error'
cfrUnknown = u'Unknown'
cfrUserDoesNotExist = u'User/phone number does not exist'
cfrUserIsOffline = u'User is offline'
chsAllCalls = u'Legacy Dialog'
chsDialog = u'Dialog'
chsIncomingCalls = u'Multi Need Accept'
chsLegacyDialog = u'Legacy Dialog'
chsMissedCalls = u'Dialog'
chsMultiNeedAccept = u'Multi Need Accept'
chsMultiSubscribed = u'Multi Subscribed'
chsOutgoingCalls = u'Multi Subscribed'
chsUnknown = u'Unknown'
chsUnsubscribed = u'Unsubscribed'
clsBusy = u'Busy'
clsCancelled = u'Cancelled'
clsEarlyMedia = u'Playing Early Media'
clsFailed = u'Sorry, call failed!'
clsFinished = u'Finished'
clsInProgress = u'Call in Progress'
clsLocalHold = u'On Local Hold'
clsMissed = u'Missed'
clsOnHold = u'On Hold'
clsRefused = u'Refused'
clsRemoteHold = u'On Remote Hold'
clsRinging = u'Calling'
clsRouting = u'Routing'
clsTransferred = u'Unknown'
clsTransferring = u'Unknown'
clsUnknown = u'Unknown'
clsUnplaced = u'Never placed'
clsVoicemailBufferingGreeting = u'Buffering Greeting'
clsVoicemailCancelled = u'Voicemail Has Been Cancelled'
clsVoicemailFailed = u'Voicemail Failed'
clsVoicemailPlayingGreeting = u'Playing Greeting'
clsVoicemailRecording = u'Recording'
clsVoicemailSent = u'Voicemail Has Been Sent'
clsVoicemailUploading = u'Uploading Voicemail'
cltIncomingP2P = u'Incoming Peer-to-Peer Call'
cltIncomingPSTN = u'Incoming Telephone Call'
cltOutgoingP2P = u'Outgoing Peer-to-Peer Call'
cltOutgoingPSTN = u'Outgoing Telephone Call'
cltUnknown = u'Unknown'
cmeAddedMembers = u'Added Members'
cmeCreatedChatWith = u'Created Chat With'
cmeEmoted = u'Unknown'
cmeLeft = u'Left'
cmeSaid = u'Said'
cmeSawMembers = u'Saw Members'
cmeSetTopic = u'Set Topic'
cmeUnknown = u'Unknown'
cmsRead = u'Read'
cmsReceived = u'Received'
cmsSending = u'Sending'
cmsSent = u'Sent'
cmsUnknown = u'Unknown'
conConnecting = u'Connecting'
conOffline = u'Offline'
conOnline = u'Online'
conPausing = u'Pausing'
conUnknown = u'Unknown'
cusAway = u'Away'
cusDoNotDisturb = u'Do Not Disturb'
cusInvisible = u'Invisible'
cusLoggedOut = u'Logged Out'
cusNotAvailable = u'Not Available'
cusOffline = u'Offline'
cusOnline = u'Online'
cusSkypeMe = u'Skype Me'
cusUnknown = u'Unknown'
cvsBothEnabled = u'Video Send and Receive'
cvsNone = u'No Video'
cvsReceiveEnabled = u'Video Receive'
cvsSendEnabled = u'Video Send'
cvsUnknown = u''
grpAllFriends = u'All Friends'
grpAllUsers = u'All Users'
grpCustomGroup = u'Custom'
grpOnlineFriends = u'Online Friends'
grpPendingAuthorizationFriends = u'Pending Authorization'
grpProposedSharedGroup = u'Proposed Shared Group'
grpRecentlyContactedUsers = u'Recently Contacted Users'
grpSharedGroup = u'Shared Group'
grpSkypeFriends = u'Skype Friends'
grpSkypeOutFriends = u'SkypeOut Friends'
grpUngroupedFriends = u'Ungrouped Friends'
grpUnknown = u'Unknown'
grpUsersAuthorizedByMe = u'Authorized By Me'
grpUsersBlockedByMe = u'Blocked By Me'
grpUsersWaitingMyAuthorization = u'Waiting My Authorization'
leaAddDeclined = u'Add Declined'
leaAddedNotAuthorized = u'Added Must Be Authorized'
leaAdderNotFriend = u'Adder Must Be Friend'
leaUnknown = u'Unknown'
leaUnsubscribe = u'Unsubscribed'
leaUserIncapable = u'User Incapable'
leaUserNotFound = u'User Not Found'
olsAway = u'Away'
olsDoNotDisturb = u'Do Not Disturb'
olsNotAvailable = u'Not Available'
olsOffline = u'Offline'
olsOnline = u'Online'
olsSkypeMe = u'SkypeMe'
olsSkypeOut = u'SkypeOut'
olsUnknown = u'Unknown'
smsMessageStatusComposing = u'Composing'
smsMessageStatusDelivered = u'Delivered'
smsMessageStatusFailed = u'Failed'
smsMessageStatusRead = u'Read'
smsMessageStatusReceived = u'Received'
smsMessageStatusSendingToServer = u'Sending to Server'
smsMessageStatusSentToServer = u'Sent to Server'
smsMessageStatusSomeTargetsFailed = u'Some Targets Failed'
smsMessageStatusUnknown = u'Unknown'
smsMessageTypeCCRequest = u'Confirmation Code Request'
smsMessageTypeCCSubmit = u'Confirmation Code Submit'
smsMessageTypeIncoming = u'Incoming'
smsMessageTypeOutgoing = u'Outgoing'
smsMessageTypeUnknown = u'Unknown'
smsTargetStatusAcceptable = u'Acceptable'
smsTargetStatusAnalyzing = u'Analyzing'
smsTargetStatusDeliveryFailed = u'Delivery Failed'
smsTargetStatusDeliveryPending = u'Delivery Pending'
smsTargetStatusDeliverySuccessful = u'Delivery Successful'
smsTargetStatusNotRoutable = u'Not Routable'
smsTargetStatusUndefined = u'Undefined'
smsTargetStatusUnknown = u'Unknown'
usexFemale = u'Female'
usexMale = u'Male'
usexUnknown = u'Unknown'
vmrConnectError = u'Connect Error'
vmrFileReadError = u'File Read Error'
vmrFileWriteError = u'File Write Error'
vmrMiscError = u'Misc Error'
vmrNoError = u'No Error'
vmrNoPrivilege = u'No Voicemail Privilege'
vmrNoVoicemail = u'No Such Voicemail'
vmrPlaybackError = u'Playback Error'
vmrRecordingError = u'Recording Error'
vmrUnknown = u'Unknown'
vmsBlank = u'Blank'
vmsBuffering = u'Buffering'
vmsDeleting = u'Deleting'
vmsDownloading = u'Downloading'
vmsFailed = u'Failed'
vmsNotDownloaded = u'Not Downloaded'
vmsPlayed = u'Played'
vmsPlaying = u'Playing'
vmsRecorded = u'Recorded'
vmsRecording = u'Recording Voicemail'
vmsUnknown = u'Unknown'
vmsUnplayed = u'Unplayed'
vmsUploaded = u'Uploaded'
vmsUploading = u'Uploading'
vmtCustomGreeting = u'Custom Greeting'
vmtDefaultGreeting = u'Default Greeting'
vmtIncoming = u'Incoming'
vmtOutgoing = u'Outgoing'
vmtUnknown = u'Unknown'
vssAvailable = u'Available'
vssNotAvailable = u'Not Available'
vssPaused = u'Paused'
vssRejected = u'Rejected'
vssRunning = u'Running'
vssStarting = u'Starting'
vssStopping = u'Stopping'
vssUnknown = u'Unknown'
| apache-2.0 |
dhalperi/incubator-beam | sdks/python/apache_beam/options/pipeline_options_validator_test.py | 2 | 11576 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the pipeline options validator module."""
import logging
import unittest
from apache_beam.internal import pickler
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options_validator import PipelineOptionsValidator
from hamcrest.core.base_matcher import BaseMatcher
# Mock runners to use for validations.
class MockRunners(object):
class DataflowRunner(object):
pass
class TestDataflowRunner(object):
pass
class OtherRunner(object):
pass
# Matcher that always passes for testing on_success_matcher option
class AlwaysPassMatcher(BaseMatcher):
def _matches(self, item):
return True
class SetupTest(unittest.TestCase):
def check_errors_for_arguments(self, errors, args):
"""Checks that there is exactly one error for each given argument."""
missing = []
remaining = list(errors)
for arg in args:
found = False
for error in remaining:
if arg in error:
remaining.remove(error)
found = True
break
if not found:
missing.append('Missing error for: ' + arg)
# Return missing and remaining (not matched) errors.
return missing + remaining
def test_local_runner(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertEqual(len(errors), 0)
def test_missing_required_options(self):
options = PipelineOptions([''])
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertEqual(
self.check_errors_for_arguments(
errors,
['project', 'staging_location', 'temp_location']),
[])
def test_gcs_path(self):
def get_validator(temp_location, staging_location):
options = ['--project=example:example', '--job_name=job']
if temp_location is not None:
options.append('--temp_location=' + temp_location)
if staging_location is not None:
options.append('--staging_location=' + staging_location)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'temp_location': None,
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': None,
'staging_location': None,
'errors': ['staging_location', 'temp_location']},
{'temp_location': 'gs://foo/bar',
'staging_location': None,
'errors': []},
{'temp_location': 'gs://foo/bar',
'staging_location': 'gs://ABC/bar',
'errors': ['staging_location']},
{'temp_location': 'gcs:/foo/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs:/foo/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://ABC/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://ABC/bar',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://foo',
'staging_location': 'gs://foo/bar',
'errors': ['temp_location']},
{'temp_location': 'gs://foo/',
'staging_location': 'gs://foo/bar',
'errors': []},
{'temp_location': 'gs://foo/bar',
'staging_location': 'gs://foo/bar',
'errors': []},
]
for case in test_cases:
errors = get_validator(case['temp_location'],
case['staging_location']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_project(self):
def get_validator(project):
options = ['--job_name=job', '--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if project is not None:
options.append('--project=' + project)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'project': None, 'errors': ['project']},
{'project': '12345', 'errors': ['project']},
{'project': 'FOO', 'errors': ['project']},
{'project': 'foo:BAR', 'errors': ['project']},
{'project': 'fo', 'errors': ['project']},
{'project': 'foo', 'errors': []},
{'project': 'foo:bar', 'errors': []},
]
for case in test_cases:
errors = get_validator(case['project']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_job_name(self):
def get_validator(job_name):
options = ['--project=example:example', '--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if job_name is not None:
options.append('--job_name=' + job_name)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'job_name': None, 'errors': []},
{'job_name': '12345', 'errors': ['job_name']},
{'job_name': 'FOO', 'errors': ['job_name']},
{'job_name': 'foo:bar', 'errors': ['job_name']},
{'job_name': 'fo', 'errors': []},
{'job_name': 'foo', 'errors': []},
]
for case in test_cases:
errors = get_validator(case['job_name']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_num_workers(self):
def get_validator(num_workers):
options = ['--project=example:example', '--job_name=job',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar']
if num_workers is not None:
options.append('--num_workers=' + num_workers)
pipeline_options = PipelineOptions(options)
runner = MockRunners.DataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
return validator
test_cases = [
{'num_workers': None, 'errors': []},
{'num_workers': '1', 'errors': []},
{'num_workers': '0', 'errors': ['num_workers']},
{'num_workers': '-1', 'errors': ['num_workers']},
]
for case in test_cases:
errors = get_validator(case['num_workers']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
def test_is_service_runner(self):
test_cases = [
{
'runner': MockRunners.OtherRunner(),
'options': [],
'expected': False,
},
{
'runner': MockRunners.OtherRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com'],
'expected': False,
},
{
'runner': MockRunners.OtherRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com/'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://another.service.com'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://another.service.com/'],
'expected': False,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com'],
'expected': True,
},
{
'runner': MockRunners.DataflowRunner(),
'options': ['--dataflow_endpoint=https://dataflow.googleapis.com/'],
'expected': True,
},
{
'runner': MockRunners.DataflowRunner(),
'options': [],
'expected': True,
},
]
for case in test_cases:
validator = PipelineOptionsValidator(
PipelineOptions(case['options']), case['runner'])
self.assertEqual(validator.is_service_runner(), case['expected'])
def test_dataflow_job_file_and_template_location_mutually_exclusive(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--template_location', 'abc',
'--dataflow_job_file', 'def'
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertTrue(errors)
def test_validate_template_location(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--template_location', 'abc',
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertFalse(errors)
def test_validate_dataflow_job_file(self):
runner = MockRunners.OtherRunner()
options = PipelineOptions([
'--dataflow_job_file', 'abc'
])
validator = PipelineOptionsValidator(options, runner)
errors = validator.validate()
self.assertFalse(errors)
def test_streaming(self):
pipeline_options = PipelineOptions(['--streaming'])
runner = MockRunners.TestDataflowRunner()
validator = PipelineOptionsValidator(pipeline_options, runner)
errors = validator.validate()
self.assertIn('Streaming pipelines are not supported.', errors)
def test_test_matcher(self):
def get_validator(matcher):
options = ['--project=example:example',
'--job_name=job',
'--staging_location=gs://foo/bar',
'--temp_location=gs://foo/bar',]
if matcher:
options.append('--on_success_matcher=' + matcher)
pipeline_options = PipelineOptions(options)
runner = MockRunners.TestDataflowRunner()
return PipelineOptionsValidator(pipeline_options, runner)
test_case = [
{'on_success_matcher': None,
'errors': []},
{'on_success_matcher': pickler.dumps(AlwaysPassMatcher()),
'errors': []},
{'on_success_matcher': 'abc',
'errors': ['on_success_matcher']},
{'on_success_matcher': pickler.dumps(object),
'errors': ['on_success_matcher']},
]
for case in test_case:
errors = get_validator(case['on_success_matcher']).validate()
self.assertEqual(
self.check_errors_for_arguments(errors, case['errors']), [])
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| apache-2.0 |
prarthitm/edxplatform | common/djangoapps/microsite_configuration/middleware.py | 10 | 1246 | """
This file implements the Middleware support for the Open edX platform.
A microsite enables the following features:
1) Mapping of sub-domain name to a 'brand', e.g. foo-university.edx.org
2) Present a landing page with a listing of courses that are specific to the 'brand'
3) Ability to swap out some branding elements in the website
"""
from django.conf import settings
from microsite_configuration import microsite
class MicrositeMiddleware(object):
"""
Middleware class which will bind configuration information regarding 'microsites' on a per request basis.
The actual configuration information is taken from Django settings information
"""
def process_request(self, request):
"""
Middleware entry point on every request processing. This will associate a request's domain name
with a 'University' and any corresponding microsite configuration information
"""
microsite.clear()
domain = request.META.get('HTTP_HOST', None)
microsite.set_by_domain(domain)
return None
def process_response(self, request, response):
"""
Middleware entry point for request completion.
"""
microsite.clear()
return response
| agpl-3.0 |
antonyc/django-rest-framework | runtests.py | 63 | 3080 | #! /usr/bin/env python
from __future__ import print_function
import os
import subprocess
import sys
import pytest
PYTEST_ARGS = {
'default': ['tests', '--tb=short', '-s'],
'fast': ['tests', '--tb=short', '-q', '-s'],
}
FLAKE8_ARGS = ['rest_framework', 'tests', '--ignore=E501']
ISORT_ARGS = ['--recursive', '--check-only', 'rest_framework', 'tests']
sys.path.append(os.path.dirname(__file__))
def exit_on_failure(ret, message=None):
if ret:
sys.exit(ret)
def flake8_main(args):
print('Running flake8 code linting')
ret = subprocess.call(['flake8'] + args)
print('flake8 failed' if ret else 'flake8 passed')
return ret
def isort_main(args):
print('Running isort code checking')
ret = subprocess.call(['isort'] + args)
if ret:
print('isort failed: Some modules have incorrectly ordered imports. Fix by running `isort --recursive .`')
else:
print('isort passed')
return ret
def split_class_and_function(string):
class_string, function_string = string.split('.', 1)
return "%s and %s" % (class_string, function_string)
def is_function(string):
# `True` if it looks like a test function is included in the string.
return string.startswith('test_') or '.test_' in string
def is_class(string):
# `True` if first character is uppercase - assume it's a class name.
return string[0] == string[0].upper()
if __name__ == "__main__":
try:
sys.argv.remove('--nolint')
except ValueError:
run_flake8 = True
run_isort = True
else:
run_flake8 = False
run_isort = False
try:
sys.argv.remove('--lintonly')
except ValueError:
run_tests = True
else:
run_tests = False
try:
sys.argv.remove('--fast')
except ValueError:
style = 'default'
else:
style = 'fast'
run_flake8 = False
run_isort = False
if len(sys.argv) > 1:
pytest_args = sys.argv[1:]
first_arg = pytest_args[0]
try:
pytest_args.remove('--coverage')
except ValueError:
pass
else:
pytest_args = ['--cov', 'rest_framework'] + pytest_args
if first_arg.startswith('-'):
# `runtests.py [flags]`
pytest_args = ['tests'] + pytest_args
elif is_class(first_arg) and is_function(first_arg):
# `runtests.py TestCase.test_function [flags]`
expression = split_class_and_function(first_arg)
pytest_args = ['tests', '-k', expression] + pytest_args[1:]
elif is_class(first_arg) or is_function(first_arg):
# `runtests.py TestCase [flags]`
# `runtests.py test_function [flags]`
pytest_args = ['tests', '-k', pytest_args[0]] + pytest_args[1:]
else:
pytest_args = PYTEST_ARGS[style]
if run_tests:
exit_on_failure(pytest.main(pytest_args))
if run_flake8:
exit_on_failure(flake8_main(FLAKE8_ARGS))
if run_isort:
exit_on_failure(isort_main(ISORT_ARGS))
| bsd-2-clause |
yjmade/odoo | addons/account_analytic_plans/wizard/analytic_plan_create_model.py | 384 | 2829 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
class analytic_plan_create_model(osv.osv_memory):
_name = "analytic.plan.create.model"
_description = "analytic.plan.create.model"
def activate(self, cr, uid, ids, context=None):
plan_obj = self.pool.get('account.analytic.plan.instance')
mod_obj = self.pool.get('ir.model.data')
anlytic_plan_obj = self.pool.get('account.analytic.plan')
if context is None:
context = {}
if 'active_id' in context and context['active_id']:
plan = plan_obj.browse(cr, uid, context['active_id'], context=context)
if (not plan.name) or (not plan.code):
raise osv.except_osv(_('Error!'), _('Please put a name and a code before saving the model.'))
pids = anlytic_plan_obj.search(cr, uid, [], context=context)
if not pids:
raise osv.except_osv(_('Error!'), _('There is no analytic plan defined.'))
plan_obj.write(cr, uid, [context['active_id']], {'plan_id':pids[0]}, context=context)
model_data_ids = mod_obj.search(cr, uid, [('model', '=', 'ir.ui.view'),('name', '=', 'view_analytic_plan_create_model')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {
'name': _('Distribution Model Saved'),
'view_type': 'form',
'view_mode': 'tree,form',
'res_model': 'analytic.plan.create.model',
'views': [(resource_id,'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
else:
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
heemanshu/swift_juno | test/unit/obj/test_updater.py | 4 | 17994 | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle as pickle
import mock
import os
import unittest
import random
import itertools
from contextlib import closing
from gzip import GzipFile
from tempfile import mkdtemp
from shutil import rmtree
from time import time
from distutils.dir_util import mkpath
from eventlet import spawn, Timeout, listen
from swift.obj import updater as object_updater
from swift.obj.diskfile import (ASYNCDIR_BASE, get_async_dir, DiskFileManager,
get_tmp_dir)
from swift.common.ring import RingData
from swift.common import utils
from swift.common.utils import hash_path, normalize_timestamp, mkdirs, \
write_pickle
from swift.common import swob
from test.unit import debug_logger, patch_policies, mocked_http_conn
from swift.common.storage_policy import StoragePolicy, POLICIES
_mocked_policies = [StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True)]
@patch_policies(_mocked_policies)
class TestObjectUpdater(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = ''
self.testdir = mkdtemp()
ring_file = os.path.join(self.testdir, 'container.ring.gz')
with closing(GzipFile(ring_file, 'wb')) as f:
pickle.dump(
RingData([[0, 1, 2, 0, 1, 2],
[1, 2, 0, 1, 2, 0],
[2, 3, 1, 2, 3, 1]],
[{'id': 0, 'ip': '127.0.0.1', 'port': 1,
'device': 'sda1', 'zone': 0},
{'id': 1, 'ip': '127.0.0.1', 'port': 1,
'device': 'sda1', 'zone': 2},
{'id': 2, 'ip': '127.0.0.1', 'port': 1,
'device': 'sda1', 'zone': 4}], 30),
f)
self.devices_dir = os.path.join(self.testdir, 'devices')
os.mkdir(self.devices_dir)
self.sda1 = os.path.join(self.devices_dir, 'sda1')
os.mkdir(self.sda1)
for policy in POLICIES:
os.mkdir(os.path.join(self.sda1, get_tmp_dir(int(policy))))
self.logger = debug_logger()
def tearDown(self):
rmtree(self.testdir, ignore_errors=1)
def test_creation(self):
cu = object_updater.ObjectUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '2',
'node_timeout': '5'})
self.assert_(hasattr(cu, 'logger'))
self.assert_(cu.logger is not None)
self.assertEquals(cu.devices, self.devices_dir)
self.assertEquals(cu.interval, 1)
self.assertEquals(cu.concurrency, 2)
self.assertEquals(cu.node_timeout, 5)
self.assert_(cu.get_container_ring() is not None)
def test_object_sweep(self):
def check_with_idx(index, warn, should_skip):
if int(index) > 0:
asyncdir = os.path.join(self.sda1,
ASYNCDIR_BASE + "-" + index)
else:
asyncdir = os.path.join(self.sda1, ASYNCDIR_BASE)
prefix_dir = os.path.join(asyncdir, 'abc')
mkpath(prefix_dir)
# A non-directory where directory is expected should just be
# skipped, but should not stop processing of subsequent
# directories.
not_dirs = (
os.path.join(self.sda1, 'not_a_dir'),
os.path.join(self.sda1,
ASYNCDIR_BASE + '-' + 'twentington'),
os.path.join(self.sda1,
ASYNCDIR_BASE + '-' + str(int(index) + 100)))
for not_dir in not_dirs:
with open(not_dir, 'w'):
pass
objects = {
'a': [1089.3, 18.37, 12.83, 1.3],
'b': [49.4, 49.3, 49.2, 49.1],
'c': [109984.123],
}
expected = set()
for o, timestamps in objects.iteritems():
ohash = hash_path('account', 'container', o)
for t in timestamps:
o_path = os.path.join(prefix_dir, ohash + '-' +
normalize_timestamp(t))
if t == timestamps[0]:
expected.add((o_path, int(index)))
write_pickle({}, o_path)
seen = set()
class MockObjectUpdater(object_updater.ObjectUpdater):
def process_object_update(self, update_path, device, idx):
seen.add((update_path, idx))
os.unlink(update_path)
cu = MockObjectUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '5'})
cu.logger = mock_logger = mock.MagicMock()
cu.object_sweep(self.sda1)
self.assertEquals(mock_logger.warn.call_count, warn)
self.assert_(os.path.exists(os.path.join(self.sda1, 'not_a_dir')))
if should_skip:
# if we were supposed to skip over the dir, we didn't process
# anything at all
self.assertTrue(os.path.exists(prefix_dir))
self.assertEqual(set(), seen)
else:
self.assert_(not os.path.exists(prefix_dir))
self.assertEqual(expected, seen)
# test cleanup: the tempdir gets cleaned up between runs, but this
# way we can be called multiple times in a single test method
for not_dir in not_dirs:
os.unlink(not_dir)
# first check with valid policies
for pol in POLICIES:
check_with_idx(str(pol.idx), 0, should_skip=False)
# now check with a bogus async dir policy and make sure we get
# a warning indicating that the '99' policy isn't valid
check_with_idx('99', 1, should_skip=True)
@mock.patch.object(object_updater, 'ismount')
def test_run_once_with_disk_unmounted(self, mock_ismount):
mock_ismount.return_value = False
cu = object_updater.ObjectUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15'})
cu.run_once()
async_dir = os.path.join(self.sda1, get_async_dir(0))
os.mkdir(async_dir)
cu.run_once()
self.assert_(os.path.exists(async_dir))
# mount_check == False means no call to ismount
self.assertEqual([], mock_ismount.mock_calls)
cu = object_updater.ObjectUpdater({
'devices': self.devices_dir,
'mount_check': 'TrUe',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15'}, logger=self.logger)
odd_dir = os.path.join(async_dir, 'not really supposed '
'to be here')
os.mkdir(odd_dir)
cu.run_once()
self.assert_(os.path.exists(async_dir))
self.assert_(os.path.exists(odd_dir)) # skipped - not mounted!
# mount_check == True means ismount was checked
self.assertEqual([
mock.call(self.sda1),
], mock_ismount.mock_calls)
self.assertEqual(cu.logger.get_increment_counts(), {'errors': 1})
@mock.patch.object(object_updater, 'ismount')
def test_run_once(self, mock_ismount):
mock_ismount.return_value = True
cu = object_updater.ObjectUpdater({
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15'}, logger=self.logger)
cu.run_once()
async_dir = os.path.join(self.sda1, get_async_dir(0))
os.mkdir(async_dir)
cu.run_once()
self.assert_(os.path.exists(async_dir))
# mount_check == False means no call to ismount
self.assertEqual([], mock_ismount.mock_calls)
cu = object_updater.ObjectUpdater({
'devices': self.devices_dir,
'mount_check': 'TrUe',
'swift_dir': self.testdir,
'interval': '1',
'concurrency': '1',
'node_timeout': '15'}, logger=self.logger)
odd_dir = os.path.join(async_dir, 'not really supposed '
'to be here')
os.mkdir(odd_dir)
cu.run_once()
self.assert_(os.path.exists(async_dir))
self.assert_(not os.path.exists(odd_dir))
# mount_check == True means ismount was checked
self.assertEqual([
mock.call(self.sda1),
], mock_ismount.mock_calls)
ohash = hash_path('a', 'c', 'o')
odir = os.path.join(async_dir, ohash[-3:])
mkdirs(odir)
older_op_path = os.path.join(
odir,
'%s-%s' % (ohash, normalize_timestamp(time() - 1)))
op_path = os.path.join(
odir,
'%s-%s' % (ohash, normalize_timestamp(time())))
for path in (op_path, older_op_path):
with open(path, 'wb') as async_pending:
pickle.dump({'op': 'PUT', 'account': 'a',
'container': 'c',
'obj': 'o', 'headers': {
'X-Container-Timestamp':
normalize_timestamp(0)}},
async_pending)
cu.run_once()
self.assert_(not os.path.exists(older_op_path))
self.assert_(os.path.exists(op_path))
self.assertEqual(cu.logger.get_increment_counts(),
{'failures': 1, 'unlinks': 1})
self.assertEqual(None,
pickle.load(open(op_path)).get('successes'))
bindsock = listen(('127.0.0.1', 0))
def accepter(sock, return_code):
try:
with Timeout(3):
inc = sock.makefile('rb')
out = sock.makefile('wb')
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/0/a/c/o HTTP/1.1\r\n')
headers = swob.HeaderKeyDict()
line = inc.readline()
while line and line != '\r\n':
headers[line.split(':')[0]] = \
line.split(':')[1].strip()
line = inc.readline()
self.assertTrue('x-container-timestamp' in headers)
self.assertTrue('X-Backend-Storage-Policy-Index' in
headers)
except BaseException as err:
return err
return None
def accept(return_codes):
codes = iter(return_codes)
try:
events = []
for x in xrange(len(return_codes)):
with Timeout(3):
sock, addr = bindsock.accept()
events.append(
spawn(accepter, sock, codes.next()))
for event in events:
err = event.wait()
if err:
raise err
except BaseException as err:
return err
return None
event = spawn(accept, [201, 500, 500])
for dev in cu.get_container_ring().devs:
if dev is not None:
dev['port'] = bindsock.getsockname()[1]
cu.logger._clear()
cu.run_once()
err = event.wait()
if err:
raise err
self.assert_(os.path.exists(op_path))
self.assertEqual(cu.logger.get_increment_counts(),
{'failures': 1})
self.assertEqual([0],
pickle.load(open(op_path)).get('successes'))
event = spawn(accept, [404, 500])
cu.logger._clear()
cu.run_once()
err = event.wait()
if err:
raise err
self.assert_(os.path.exists(op_path))
self.assertEqual(cu.logger.get_increment_counts(),
{'failures': 1})
self.assertEqual([0, 1],
pickle.load(open(op_path)).get('successes'))
event = spawn(accept, [201])
cu.logger._clear()
cu.run_once()
err = event.wait()
if err:
raise err
self.assert_(not os.path.exists(op_path))
self.assertEqual(cu.logger.get_increment_counts(),
{'unlinks': 1, 'successes': 1})
def test_obj_put_legacy_updates(self):
ts = (normalize_timestamp(t) for t in
itertools.count(int(time())))
policy = POLICIES.get_by_index(0)
# setup updater
conf = {
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
}
async_dir = os.path.join(self.sda1, get_async_dir(policy.idx))
os.mkdir(async_dir)
account, container, obj = 'a', 'c', 'o'
# write an async
for op in ('PUT', 'DELETE'):
self.logger._clear()
daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
dfmanager = DiskFileManager(conf, daemon.logger)
# don't include storage-policy-index in headers_out pickle
headers_out = swob.HeaderKeyDict({
'x-size': 0,
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-timestamp': ts.next(),
})
data = {'op': op, 'account': account, 'container': container,
'obj': obj, 'headers': headers_out}
dfmanager.pickle_async_update(self.sda1, account, container, obj,
data, ts.next(), policy.idx)
request_log = []
def capture(*args, **kwargs):
request_log.append((args, kwargs))
# run once
fake_status_codes = [200, 200, 200]
with mocked_http_conn(*fake_status_codes, give_connect=capture):
daemon.run_once()
self.assertEqual(len(fake_status_codes), len(request_log))
for request_args, request_kwargs in request_log:
ip, part, method, path, headers, qs, ssl = request_args
self.assertEqual(method, op)
self.assertEqual(headers['X-Backend-Storage-Policy-Index'],
str(policy.idx))
self.assertEqual(daemon.logger.get_increment_counts(),
{'successes': 1, 'unlinks': 1,
'async_pendings': 1})
def test_obj_put_async_updates(self):
ts = (normalize_timestamp(t) for t in
itertools.count(int(time())))
policy = random.choice(list(POLICIES))
# setup updater
conf = {
'devices': self.devices_dir,
'mount_check': 'false',
'swift_dir': self.testdir,
}
daemon = object_updater.ObjectUpdater(conf, logger=self.logger)
async_dir = os.path.join(self.sda1, get_async_dir(policy.idx))
os.mkdir(async_dir)
# write an async
dfmanager = DiskFileManager(conf, daemon.logger)
account, container, obj = 'a', 'c', 'o'
op = 'PUT'
headers_out = swob.HeaderKeyDict({
'x-size': 0,
'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-timestamp': ts.next(),
'X-Backend-Storage-Policy-Index': policy.idx,
})
data = {'op': op, 'account': account, 'container': container,
'obj': obj, 'headers': headers_out}
dfmanager.pickle_async_update(self.sda1, account, container, obj,
data, ts.next(), policy.idx)
request_log = []
def capture(*args, **kwargs):
request_log.append((args, kwargs))
# run once
fake_status_codes = [
200, # object update success
200, # object update success
200, # object update conflict
]
with mocked_http_conn(*fake_status_codes, give_connect=capture):
daemon.run_once()
self.assertEqual(len(fake_status_codes), len(request_log))
for request_args, request_kwargs in request_log:
ip, part, method, path, headers, qs, ssl = request_args
self.assertEqual(method, 'PUT')
self.assertEqual(headers['X-Backend-Storage-Policy-Index'],
str(policy.idx))
self.assertEqual(daemon.logger.get_increment_counts(),
{'successes': 1, 'unlinks': 1, 'async_pendings': 1})
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
andymason/istheansweryes | main.py | 1 | 3159 | from flask import Flask
from flask import render_template
from flask import request
from flask import redirect
from flask import url_for
from google.appengine.ext import ndb
from os import urandom
import base64
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
class Question(ndb.Model):
text = ndb.StringProperty()
secret = ndb.StringProperty()
status = ndb.BooleanProperty()
data = ndb.DateTimeProperty(auto_now_add=True)
@app.route('/')
def hello():
return render_template('index.html')
@app.route('/update/<id>', methods=['POST'])
def updateQuestion(id=None):
try:
keyID = int(id)
except:
return 'Invalid key'
questionKey = ndb.Key('Question', keyID)
storedQuestion = questionKey.get()
if storedQuestion is None:
return 'Question not found'
secret = request.form['secret']
if storedQuestion.secret != secret:
return 'Invalid secret'
storedQuestion.text = request.form['question_text']
if 'status' in request.form:
storedQuestion.status = True
else:
storedQuestion.status = False
storedQuestion.put()
return redirect(url_for('editQuestion',
id=questionKey.id(),
secret=storedQuestion.secret))
@app.route('/create', methods=['POST'])
def createQuestion():
error = None
questionText = request.form['question_text']
question = Question()
question.text = questionText
question.secret = base64.urlsafe_b64encode(urandom(24))
questionKey = question.put()
return redirect(url_for('editQuestion',
id=questionKey.id(),
secret=question.secret))
@app.route('/<id>/<secret>', methods=['GET'])
def editQuestion(id=None, secret=None):
try:
keyID = int(id)
except:
return 'Invalid key'
questionKey = ndb.Key('Question', keyID)
storedQuestion = questionKey.get()
if storedQuestion is None:
return 'Question not found'
if storedQuestion.secret != secret:
return 'Invalid secret'
return render_template('edit.html',
id=questionKey.id(),
status=storedQuestion.status,
text=storedQuestion.text,
secret=storedQuestion.secret)
@app.route('/all-questions', methods=['GET'])
def showQuestions(id=None):
questions = Question.query().order(-Question.data).fetch(200)
return render_template('questions.html',
questions=questions)
@app.route('/<id>', methods=['GET'])
def showQuestion(id=None):
try:
keyID = int(id)
except:
return 'not a valid key'
questionKey = ndb.Key('Question', keyID)
storedQuestion = questionKey.get()
if storedQuestion is None:
return 'Could\'t find question'
return render_template('public.html',
status=storedQuestion.status,
text=storedQuestion.text)
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, nothing at this URL.', 404
| apache-2.0 |
d6e/coala | tests/parsing/CliParsingTest.py | 18 | 2415 | import argparse
import unittest
from coalib.parsing.CliParsing import parse_cli, check_conflicts
class CliParserTest(unittest.TestCase):
def setUp(self):
self.test_arg_parser = argparse.ArgumentParser()
self.test_arg_parser.add_argument('-t', nargs='+', dest='test')
self.test_arg_parser.add_argument('-S',
'--settings',
nargs='+',
dest='settings')
@staticmethod
def dict_from_sections(parsed_sections):
parsed_dict = {}
for section_name, section in parsed_sections.items():
parsed_dict[section_name] = (
set([(key,
str(value)) for key, value in section.contents.items()]))
return parsed_dict
def test_parse_cli(self):
# regular parse
parsed_sections = parse_cli(
['-t', 'ignored1', 'ignored2',
'-t', 'taken',
'-S', 'section1.key1,section2.key2=value1,value2',
'section2.key2=only_this_value',
'SECTION2.key2a=k2a',
'invalid.=shouldnt_be_shown',
'.=not_either',
'.key=only_in_default',
'default_key1,default_key2=single_value',
'default_key3=first_value,second_value'],
arg_parser=self.test_arg_parser)
expected_dict = {
'default': {
("test", "taken"),
("key", "only_in_default"),
("default_key1", "single_value"),
("default_key2", "single_value"),
("default_key3", "first_value,second_value")},
'section1': {
("key1", "value1,value2")},
'section2': {
("key2", "only_this_value"),
("key2a", "k2a")}}
self.assertEqual(parsed_sections["default"].name, "Default")
self.assertEqual(self.dict_from_sections(parsed_sections),
expected_dict)
def test_check_conflicts(self):
sections = parse_cli(arg_list=["--save", "--no-config"])
with self.assertRaises(SystemExit) as cm:
check_conflicts(sections)
self.assertEqual(cm.exception.code, 2)
sections = parse_cli(arg_list=["--no-config", "-S", "val=42"])
self.assertTrue(check_conflicts(sections))
| agpl-3.0 |
abel-von/commons | src/python/twitter/common/metrics/metrics.py | 12 | 6055 | # ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
from twitter.common.lang import Compatibility, Singleton
from .gauge import (
Gauge,
MutatorGauge,
NamedGauge,
namablegauge)
class Observable(object):
"""
A trait providing a metric namespace for an object.
Classes should mix-in Observable and register metrics against self.metrics.
Application owners can then register observable objects into a metric
space or the root metrics, e.g. via
>>> RootMetrics().register_observable('object_namespace', my_object)
"""
@property
def metrics(self):
"""
Returns a Metric namespace for this object.
"""
if not hasattr(self, '_observable_metrics'):
self._observable_metrics = Metrics()
return self._observable_metrics
class MetricProvider(object):
def sample(self):
"""
Returns a dictionary
string (metric) => sample (number)
"""
raise NotImplementedError
class MetricRegistry(object):
def scope(self, name):
"""
Returns a (potentially memoized) child scope with a given name.
"""
raise NotImplementedError
def register(self, gauge):
"""
Register a gauge (mapper from name => sample) with this registry.
"""
raise NotImplementedError
def unregister(self, name):
"""
Unregister a name from the registry.
"""
raise NotImplementedError
def mutator(self, name):
"""
Return a mutator function of the gauge associated with name.
"""
raise NotImplementedError
class Metrics(MetricRegistry, MetricProvider):
"""
Metric collector.
"""
class Error(Exception): pass
@classmethod
def coerce_value(cls, value):
if isinstance(value, Compatibility.numeric + Compatibility.string + (bool,)):
return value
elif value is None:
return value
elif isinstance(value, list):
return [cls.coerce_value(v) for v in value]
elif isinstance(value, dict):
return dict((cls.coerce_value(k), cls.coerce_value(v)) for (k, v) in value.items())
else:
return str(value)
@classmethod
def coerce_metric(cls, metric_tuple):
name, value = metric_tuple
try:
return (name, cls.coerce_value(value.read()))
except ValueError:
return None
def __init__(self):
self._metrics = {}
self._children = {}
def scope(self, name):
if not isinstance(name, Compatibility.string):
raise TypeError('Scope names must be strings, got: %s' % type(name))
if name not in self._children:
self._children[name] = Metrics()
return self._children[name]
def register_observable(self, name, observable):
if not isinstance(name, Compatibility.string):
raise TypeError('Scope names must be strings, got: %s' % type(name))
if not isinstance(observable, Observable):
raise TypeError('observable must be an Observable, got: %s' % type(observable))
self._children[name] = observable.metrics
def unregister_observable(self, name):
if not isinstance(name, Compatibility.string):
raise TypeError('Unregister takes a string name!')
return self._children.pop(name, None)
def register(self, gauge):
if isinstance(gauge, Compatibility.string):
gauge = MutatorGauge(gauge)
if not isinstance(gauge, NamedGauge) and not namablegauge(gauge):
raise TypeError('Must register either a string or a Gauge-like object! Got %s' % gauge)
self._metrics[gauge.name()] = gauge
return gauge
def unregister(self, name):
if not isinstance(name, Compatibility.string):
raise TypeError('Unregister takes a string name!')
return self._metrics.pop(name, None)
@classmethod
def sample_name(cls, scope_name, sample_name):
return '.'.join([scope_name, sample_name])
def sample(self):
samples = dict(filter(None, map(self.coerce_metric, self._metrics.items())))
for scope_name, scope in self._children.items():
samples.update((self.sample_name(scope_name, sample_name), sample_value)
for (sample_name, sample_value) in scope.sample().items())
return samples
class CompoundMetrics(MetricProvider):
def __init__(self, *providers):
if not all(isinstance(provider, MetricProvider) for provider in providers):
raise TypeError('CompoundMetrics must take a collection of MetricProviders')
self._providers = providers
def sample(self):
root_sample = {}
for provider in self._providers:
root_sample.update(provider.sample())
return root_sample
class MemoizedMetrics(MetricProvider):
def __init__(self, provider):
if not isinstance(provider, MetricProvider):
raise TypeError('MemoizedMetrics must take a MetricProvider')
self._provider = provider
self._sample = {}
def sample(self):
self._sample = self._provider.sample()
return self._sample
@property
def memoized_sample(self):
return self._sample
class RootMetrics(Metrics, Singleton):
"""
Root singleton instance of the metrics.
"""
_INIT = False
def __init__(self):
if not RootMetrics._INIT:
Metrics.__init__(self)
RootMetrics._INIT = True
# For testing.
def clear(self):
Metrics.__init__(self)
| apache-2.0 |
jballanc/openmicroscopy | components/tools/OmeroWeb/omeroweb/webtest/views.py | 1 | 28338 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect, HttpResponse
from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response
from omeroweb.webgateway import views as webgateway_views
from omeroweb.webgateway.views import _get_prepared_image
from omeroweb.connector import Server
from omeroweb.webclient.decorators import login_required, render_response
from omeroweb.connector import Connector
from omeroweb.http import HttpJPEGResponse
from cStringIO import StringIO
import settings
import logging
import traceback
import omero
from omero.rtypes import rint, rstring
import omero.gateway
import random
import math
logger = logging.getLogger(__name__)
try:
from PIL import Image
except: #pragma: nocover
try:
import Image
except:
logger.error('No Pillow installed, line plots and split channel will fail!')
@login_required() # wrapper handles login (or redirects to webclient login). Connection passed in **kwargs
def dataset(request, datasetId, conn=None, **kwargs):
""" 'Hello World' example from tutorial on http://trac.openmicroscopy.org.uk/ome/wiki/OmeroWeb """
ds = conn.getObject("Dataset", datasetId) # before OMERO 4.3 this was conn.getDataset(datasetId)
return render_to_response('webtest/dataset.html', {'dataset': ds}) # generate html from template
@login_required(setGroupContext=True) # wrapper handles login (or redirects to webclient login). Connection passed in **kwargs
def index(request, conn=None, **kwargs):
# use Image IDs from request...
if request.REQUEST.get("Image", None):
imageIds = request.REQUEST.get("Image", None)
ids = [int(iid) for iid in imageIds.split(",")]
images = list(conn.getObjects("Image", ids))
else:
# OR find a random image and dataset to display & can be used in links to other pages
all_images = list(conn.getObjects("Image"))
img = random.choice(all_images)
images = [img]
imgIds = ",".join([str(img.getId()) for img in images])
# get a random dataset (making sure we get one that has some images in it)
all_datasets = list(conn.getObjects("Dataset"))
dataset = random.choice(all_datasets)
attempts = 0
while (dataset.countChildren() == 0 and attempts < 10):
dataset = random.choice(all_datasets)
attempts += 1
return render_to_response('webtest/index.html', {'images': images, 'imgIds': imgIds, 'dataset': dataset})
@login_required()
def channel_overlay_viewer(request, imageId, conn=None, **kwargs):
"""
Viewer for overlaying separate channels from the same image or different images
and adjusting horizontal and vertical alignment of each
"""
image = conn.getObject("Image", imageId)
default_z = image.getSizeZ()/2
# try to work out which channels should be 'red', 'green', 'blue' based on rendering settings
red = None
green = None
blue = None
notAssigned = []
channels = []
for i, c in enumerate(image.getChannels()):
channels.append( {'name':c.getName()} )
if c.getColor().getRGB() == (255, 0, 0) and red == None:
red = i
elif c.getColor().getRGB() == (0, 255, 0) and green == None:
green = i
elif c.getColor().getRGB() == (0, 0, 255) and blue == None:
blue = i
else:
notAssigned.append(i)
# any not assigned - try assigning
for i in notAssigned:
if red == None: red = i
elif green == None: green = i
elif blue == None: blue = i
# see if we have z, x, y offsets already annotated on this image
# added by javascript in viewer. E.g. 0|z:1_x:0_y:0,1|z:0_x:10_y:0,2|z:0_x:0_y:0
ns = "omero.web.channel_overlay.offsets"
comment = image.getAnnotation(ns)
if comment == None: # maybe offset comment has been added manually (no ns)
for ann in image.listAnnotations():
if isinstance(ann, omero.gateway.CommentAnnotationWrapper):
if ann.getValue().startswith("0|z:"):
comment = ann
break
if comment != None:
offsets = comment.getValue()
for o in offsets.split(","):
index,zxy = o.split("|",1)
if int(index) < len(channels):
keyVals = zxy.split("_")
for kv in keyVals:
key, val = kv.split(":")
if key == "z": val = int(val) + default_z
channels[int(index)][key] = int(val)
return render_to_response('webtest/demo_viewers/channel_overlay_viewer.html', {
'image': image, 'channels':channels, 'default_z':default_z, 'red': red, 'green': green, 'blue': blue})
@login_required()
def render_channel_overlay (request, conn=None, **kwargs):
"""
Overlays separate channels (red, green, blue) from the same image or different images
manipulating each indepdently (translate, scale, rotate etc? )
"""
# request holds info on all the planes we are working on and offset (may not all be visible)
# planes=0|imageId:z:c:t$x:shift_y:shift_rot:etc,1|imageId...
# E.g. planes=0|2305:7:0:0$x:-50_y:10,1|2305:7:1:0,2|2305:7:2:0&red=2&blue=0&green=1
planes = {}
p = request.REQUEST.get('planes', None)
if p is None:
return HttpResponse("Request needs plane info to render jpeg. E.g. ?planes=0|2305:7:0:0$x:-50_y:10,1|2305:7:1:0,2|2305:7:2:0&red=2&blue=0&green=1")
for plane in p.split(','):
infoMap = {}
plane_info = plane.split('|')
key = plane_info[0].strip()
info = plane_info[1].strip()
shift = None
if info.find('$')>=0:
info,shift = info.split('$')
imageId,z,c,t = [int(i) for i in info.split(':')]
infoMap['imageId'] = imageId
infoMap['z'] = z
infoMap['c'] = c
infoMap['t'] = t
if shift != None:
for kv in shift.split("_"):
k, v = kv.split(":")
infoMap[k] = v
planes[key] = infoMap
# from the request we need to know which plane is blue, green, red (if any) by index
# E.g. red=0&green=2
red = request.REQUEST.get('red', None)
green = request.REQUEST.get('green', None)
blue = request.REQUEST.get('blue', None)
# kinda like split-view: we want to get single-channel images...
# red...
redImg = None
def translate(image, deltaX, deltaY):
xsize, ysize = image.size
mode = image.mode
bg = Image.new(mode, image.size)
x = abs(min(deltaX, 0))
pasteX = max(0, deltaX)
y = abs(min(deltaY, 0))
pasteY = max(0, deltaY)
part = image.crop((x, y, xsize-deltaX, ysize-deltaY))
bg.paste(part, (pasteX, pasteY))
return bg
def getPlane(planeInfo):
""" Returns the rendered plane split into a single channel (ready for merging) """
img = conn.getObject("Image", planeInfo['imageId'])
img.setActiveChannels((planeInfo['c']+1,))
img.setGreyscaleRenderingModel()
rgb = img.renderImage(planeInfo['z'], planeInfo['t'])
# somehow this line is required to prevent an error at 'rgb.split()'
rgb.save(StringIO(), 'jpeg', quality=90)
r,g,b = rgb.split() # go from RGB to L
x,y = 0,0
if 'x' in planeInfo:
x = int(planeInfo['x'])
if 'y' in planeInfo:
y = int(planeInfo['y'])
if x or y:
r = translate(r, x, y)
return r
redChannel = None
greenChannel = None
blueChannel = None
if red != None and red in planes:
redChannel = getPlane(planes[red])
if green != None and green in planes:
greenChannel = getPlane(planes[green])
if blue != None and blue in planes:
blueChannel = getPlane(planes[blue])
if redChannel != None:
size = redChannel.size
elif greenChannel != None:
size = greenChannel.size
elif blueChannel != None:
size = blueChannel.size
black = Image.new('L', size)
redChannel = redChannel and redChannel or black
greenChannel = greenChannel and greenChannel or black
blueChannel = blueChannel and blueChannel or black
merge = Image.merge("RGB", (redChannel, greenChannel, blueChannel))
# convert from PIL back to string image data
rv = StringIO()
compression = 0.9
merge.save(rv, 'jpeg', quality=int(compression*100))
jpeg_data = rv.getvalue()
rsp = HttpJPEGResponse(jpeg_data)
return rsp
@login_required()
def add_annotations (request, conn=None, **kwargs):
"""
Creates a L{omero.gateway.CommentAnnotationWrapper} and adds it to the images according
to variables in the http request.
@param request: The django L{django.core.handlers.wsgi.WSGIRequest}
- imageIds: A comma-delimited list of image IDs
- comment: The text to add as a comment to the images
- ns: Namespace for the annotation
- replace: If "true", try to replace existing annotation with same ns
@return: A simple html page with a success message
"""
idList = request.REQUEST.get('imageIds', None) # comma - delimited list
if idList:
imageIds = [long(i) for i in idList.split(",")]
else: imageIds = []
comment = request.REQUEST.get('comment', None)
ns = request.REQUEST.get('ns', None)
replace = request.REQUEST.get('replace', False) in ('true', 'True')
updateService = conn.getUpdateService()
ann = omero.model.CommentAnnotationI()
ann.setTextValue(rstring( str(comment) ))
if ns != None:
ann.setNs(rstring( str(ns) ))
ann = updateService.saveAndReturnObject(ann)
annId = ann.getId().getValue()
images = []
for iId in imageIds:
image = conn.getObject("Image", iId)
if image == None: continue
if replace and ns != None:
oldComment = image.getAnnotation(ns)
if oldComment != None:
oldComment.setTextValue(rstring( str(comment) ))
updateService.saveObject(oldComment)
continue
l = omero.model.ImageAnnotationLinkI()
parent = omero.model.ImageI(iId, False) # use unloaded object to avoid update conflicts
l.setParent(parent)
l.setChild(ann)
updateService.saveObject(l)
images.append(image)
return render_to_response('webtest/util/add_annotations.html', {'images':images, 'comment':comment})
@login_required()
def split_view_figure (request, conn=None, **kwargs):
"""
Generates an html page displaying a number of images in a grid with channels split into different columns.
The page also includes a form for modifying various display parameters and re-submitting
to regenerate this page.
If no 'imageIds' parameter (comma-delimited list) is found in the 'request', the page generated is simply
a form requesting image IDs.
If there are imageIds, the first ID (image) is used to generate the form based on channels of that image.
@param request: The django L{http request <django.core.handlers.wsgi.WSGIRequest>}
@return: The http response - html page displaying split view figure.
"""
query_string = request.META["QUERY_STRING"]
idList = request.REQUEST.get('imageIds', None) # comma - delimited list
idList = request.REQUEST.get('Image', idList) # we also support 'Image'
if idList:
imageIds = [long(i) for i in idList.split(",")]
else:
imageIds = []
split_grey = request.REQUEST.get('split_grey', None)
merged_names = request.REQUEST.get('merged_names', None)
proj = request.REQUEST.get('proj', "normal") # intmean, intmax, normal
try:
w = request.REQUEST.get('width', 0)
width = int(w)
except:
width = 0
try:
h = request.REQUEST.get('height', 0)
height = int(h)
except:
height = 0
# returns a list of channel info from the image, overridden if values in request
def getChannelData(image):
channels = []
i = 0;
channel_data = image.getChannels()
if channel_data is None: # E.g. failed import etc
return None
for i, c in enumerate(channel_data):
name = request.REQUEST.get('cName%s' % i, c.getLogicalChannel().getName())
# if we have channel info from a form, we know that checkbox:None is unchecked (not absent)
if request.REQUEST.get('cName%s' % i, None):
active = (None != request.REQUEST.get('cActive%s' % i, None) )
merged = (None != request.REQUEST.get('cMerged%s' % i, None) )
else:
active = True
merged = True
colour = c.getColor()
if colour is None:
return None # rendering engine problems
colour = colour.getHtml()
start = request.REQUEST.get('cStart%s' % i, c.getWindowStart())
end = request.REQUEST.get('cEnd%s' % i, c.getWindowEnd())
render_all = (None != request.REQUEST.get('cRenderAll%s' % i, None) )
channels.append({"name": name, "index": i, "active": active, "merged": merged, "colour": colour,
"start": start, "end": end, "render_all": render_all})
return channels
channels = None
images = []
for iId in imageIds:
image = conn.getObject("Image", iId)
if image == None: continue
default_z = image.getSizeZ()/2 # image.getZ() returns 0 - should return default Z?
# need z for render_image even if we're projecting
images.append({"id":iId, "z":default_z, "name": image.getName() })
if channels is None:
channels = getChannelData(image)
if height == 0:
height = image.getSizeY()
if width == 0:
width = image.getSizeX()
if channels is None:
return HttpResponse("Couldn't load channels for this image")
size = {"height": height, "width": width}
c_strs = []
if channels: # channels will be none when page first loads (no images)
indexes = range(1, len(channels)+1)
c_string = ",".join(["-%s" % str(c) for c in indexes]) # E.g. -1,-2,-3,-4
mergedFlags = []
for i, c, in enumerate(channels):
if c["render_all"]:
levels = "%s:%s" % (c["start"], c["end"])
else: levels = ""
if c["active"]:
onFlag = str(i+1) + "|"
onFlag += levels
if split_grey: onFlag += "$FFFFFF" # E.g. 1|100:505$0000FF
c_strs.append( c_string.replace("-%s" % str(i+1), onFlag) ) # E.g. 1,-2,-3 or 1|$FFFFFF,-2,-3
if c["merged"]:
mergedFlags.append("%s|%s" % (i+1, levels)) # E.g. '1|200:4000'
else: mergedFlags.append("-%s" % (i+1)) # E.g. '-1'
# turn merged channels on in the last image
c_strs.append( ",".join(mergedFlags) )
template = kwargs.get('template', 'webtest/demo_viewers/split_view_figure.html')
return render_to_response(template, {'images':images, 'c_strs': c_strs,'imageIds':idList,
'channels': channels, 'split_grey':split_grey, 'merged_names': merged_names, 'proj': proj, 'size': size, 'query_string':query_string})
@login_required()
def dataset_split_view (request, datasetId, conn=None, **kwargs):
"""
Generates a web page that displays a dataset in two panels, with the option to choose different
rendering settings (channels on/off) for each panel. It uses the render_image url for each
image, generating the full sized image which is scaled down to view.
The page also includes a form for editing the channel settings and display size of images.
This form resubmits to this page and displays the page again with updated parameters.
@param request: The django L{http request <django.core.handlers.wsgi.WSGIRequest>}
@param datasetId: The ID of the dataset.
@type datasetId: Number.
@return: The http response - html page displaying split view figure.
"""
dataset = conn.getObject("Dataset", datasetId)
try:
size = request.REQUEST.get('size', 100)
size = int(size)
except:
size = 100
# returns a list of channel info from the image, overridden if values in request
def getChannelData(image):
channels = []
i = 0;
chs = image.getChannels()
if chs is None:
return []
for i, c in enumerate(chs):
if c is None:
continue
name = c.getLogicalChannel().getName()
# if we have channel info from a form, we know that checkbox:None is unchecked (not absent)
if request.REQUEST.get('cStart%s' % i, None):
active_left = (None != request.REQUEST.get('cActiveLeft%s' % i, None) )
active_right = (None != request.REQUEST.get('cActiveRight%s' % i, None) )
else:
active_left = True
active_right = True
colour = c.getColor()
if colour is None:
continue # serious rendering engine problems
colour = colour.getHtml();
start = request.REQUEST.get('cStart%s' % i, c.getWindowStart())
end = request.REQUEST.get('cEnd%s' % i, c.getWindowEnd())
render_all = (None != request.REQUEST.get('cRenderAll%s' % i, None) )
channels.append({"name": name, "index": i, "active_left": active_left, "active_right": active_right,
"colour": colour, "start": start, "end": end, "render_all": render_all})
return channels
images = []
channels = None
for image in dataset.listChildren():
if channels == None or len(channels) == 0:
channels = getChannelData(image)
default_z = image.getSizeZ()/2 # image.getZ() returns 0 - should return default Z?
# need z for render_image even if we're projecting
images.append({"id":image.getId(), "z":default_z, "name": image.getName() })
if channels is None:
return HttpResponse("<p class='center_message'>No Images in Dataset<p>")
indexes = range(1, len(channels)+1)
c_string = ",".join(["-%s" % str(c) for c in indexes]) # E.g. -1,-2,-3,-4
leftFlags = []
rightFlags = []
for i, c, in enumerate(channels):
if c["render_all"]:
levels = "%s:%s" % (c["start"], c["end"])
else: levels = ""
if c["active_left"]:
leftFlags.append("%s|%s" % (i+1, levels)) # E.g. '1|200:4000'
else: leftFlags.append("-%s" % (i+1)) # E.g. '-1'
if c["active_right"]:
rightFlags.append("%s|%s" % (i+1, levels)) # E.g. '1|200:4000'
else: rightFlags.append("-%s" % (i+1)) # E.g. '-1'
c_left = ",".join(leftFlags)
c_right = ",".join(rightFlags)
template = kwargs.get('template', 'webtest/webclient_plugins/dataset_split_view.html')
return render_to_response(template, {'dataset': dataset, 'images': images,
'channels':channels, 'size': size, 'c_left': c_left, 'c_right': c_right})
@login_required()
def image_dimensions (request, imageId, conn=None, **kwargs):
"""
Prepare data to display various dimensions of a multi-dim image as axes of a grid of image planes.
E.g. x-axis = Time, y-axis = Channel.
"""
image = conn.getObject("Image", imageId)
if image is None:
return render_to_response('webtest/demo_viewers/image_dimensions.html', {})
mode = request.REQUEST.get('mode', None) and 'g' or 'c'
dims = {'Z':image.getSizeZ(), 'C': image.getSizeC(), 'T': image.getSizeT()}
default_yDim = 'Z'
xDim = request.REQUEST.get('xDim', 'C')
if xDim not in dims.keys():
xDim = 'C'
yDim = request.REQUEST.get('yDim', default_yDim)
if yDim not in dims.keys():
yDim = 'Z'
xFrames = int(request.REQUEST.get('xFrames', 5))
xSize = dims[xDim]
yFrames = int(request.REQUEST.get('yFrames', 10))
ySize = dims[yDim]
xFrames = min(xFrames, xSize)
yFrames = min(yFrames, ySize)
xRange = range(xFrames)
yRange = range(yFrames)
# 2D array of (theZ, theC, theT)
grid = []
for y in yRange:
grid.append([])
for x in xRange:
iid, theZ, theC, theT = image.id, 0,None,0
if xDim == 'Z':
theZ = x
if xDim == 'C':
theC = x
if xDim == 'T':
theT = x
if yDim == 'Z':
theZ = y
if yDim == 'C':
theC = y
if yDim == 'T':
theT = y
grid[y].append( (iid, theZ, theC is not None and theC+1 or None, theT) )
size = {"height": 125, "width": 125}
return render_to_response('webtest/demo_viewers/image_dimensions.html', {'image':image, 'grid': grid,
"size": size, "mode":mode, 'xDim':xDim, 'xRange':xRange, 'yRange':yRange, 'yDim':yDim,
'xFrames':xFrames, 'yFrames':yFrames})
@login_required()
def image_rois (request, imageId, conn=None, **kwargs):
""" Simply shows a page of ROI thumbnails for the specified image """
roiService = conn.getRoiService()
result = roiService.findByImage(long(imageId), None, conn.SERVICE_OPTS)
roiIds = [r.getId().getValue() for r in result.rois]
return render_to_response('webtest/demo_viewers/image_rois.html', {'roiIds':roiIds})
def webgateway_templates (request, base_template):
""" Simply return the named template. Similar functionality to django.views.generic.simple.direct_to_template """
template_name = 'webtest/webgateway/%s.html' % base_template
return render_to_response(template_name, {})
@login_required()
@render_response()
def webclient_templates (request, base_template, **kwargs):
""" Simply return the named template. Similar functionality to django.views.generic.simple.direct_to_template """
template_name = 'webtest/webgateway/%s.html' % base_template
return {'template': template_name}
@login_required()
def image_viewer (request, iid=None, conn=None, **kwargs):
""" This view is responsible for showing pixel data as images. Delegates to webgateway, using share connection if appropriate """
if iid is None:
iid = request.REQUEST.get('image')
template = 'webtest/webclient_plugins/center_plugin.fullviewer.html'
return webgateway_views.full_viewer(request, iid, _conn=conn, template=template, **kwargs)
@login_required()
def stack_preview (request, imageId, conn=None, **kwargs):
""" Shows a subset of Z-planes for an image """
image = conn.getObject("Image", imageId)
image_name = image.getName()
sizeZ = image.getSizeZ()
z_indexes = [0, int(sizeZ*0.25), int(sizeZ*0.5), int(sizeZ*0.75), sizeZ-1]
return render_to_response('webtest/stack_preview.html', {'imageId':imageId, 'image_name':image_name, 'z_indexes':z_indexes})
@login_required()
def render_performance (request, obj_type, id, conn=None, **kwargs):
""" Test rendering performance for all planes in an image """
context = {}
if obj_type == 'image':
context['imageId'] = id
image = conn.getObject("Image", id)
image._prepareRenderingEngine()
# If a 'BIG Image'
if image._re.requiresPixelsPyramid():
MAX_TILES = 50
tileList = []
tile_w, tile_h = image._re.getTileSize()
cols = image.getSizeX() / tile_w
rows = image.getSizeY() / tile_h
tileList = [ {'col':c, 'row':r} for r in range(rows) for c in range(cols)]
if (len(tileList) > 2*MAX_TILES):
tileList = tileList[ (len(tileList)/2):] # start in middle of list (looks nicer!)
tileList = tileList[:MAX_TILES]
context['tileList'] = tileList
# A regular Image
else:
zctList = []
if request.REQUEST.get('split_channels') == 'true':
context['split_channels'] = True
sizeC = image.getSizeC()
else:
sizeC = 1
context['sizeX'] = image.getSizeX()
context['sizeY'] = image.getSizeY()
context['sizeT'] = image.getSizeT()
context['sizeZ'] = image.getSizeZ()
for z in range(image.getSizeZ()):
for c in range(sizeC):
for t in range(image.getSizeT()):
zct = {'z':z, 't':t}
if sizeC > 1:
zct['c'] = c+1
zctList.append(zct)
context['zctList'] = zctList
# A Plate
elif obj_type == 'plate':
imageIds = []
plate = conn.getObject("Plate", id)
for well in plate._listChildren():
for ws in well.copyWellSamples():
imageIds.append(ws.image.id.val)
context = {'plate':plate, 'imageIds':imageIds}
elif obj_type == "dataset":
dataset = conn.getObject("Dataset", id)
imageIds = [i.getId() for i in dataset.listChildren()]
context = {'imageIds':imageIds}
return render_to_response('webtest/demo_viewers/render_performance.html', context)
@login_required()
def render_planes_matrix (request, iid, conn=None, **kwargs):
"""
Renders the image as a 2D matrix of planes, with z horizontal and t vertical.
Test whether using a single rendering engine / request for a stack
is faster than opening a rendering engine for each plane / request.
Use zStart, zEnd, tStart, tEnd in request to get a sub-matrix.
@param request: http request
@param iid: image ID
@param conn: L{omero.gateway.BlitzGateway} connection
@return: http response wrapping jpeg
"""
pi = _get_prepared_image(request, iid, conn=conn)
if pi is None:
raise Http404
img, compress_quality = pi
sizeX = img.getSizeX()
sizeY = img.getSizeY()
sizeZ = img.getSizeZ()
sizeT = img.getSizeT()
zStart = int(request.REQUEST.get('zStart', 0))
zEnd = int(request.REQUEST.get('zEnd', sizeZ-1))
sizeZ = zEnd - zStart + 1
tStart = int(request.REQUEST.get('tStart', 0))
tEnd = int(request.REQUEST.get('tEnd', sizeT-1))
sizeT = tEnd - tStart + 1
rowCount = sizeZ
colCount = sizeT
w = colCount * sizeX
h = rowCount * sizeY
# Firefox bug limits w & h to 32767
# https://bugzilla.mozilla.org/show_bug.cgi?id=591822s
while(h > 32767):
rowCount = int(math.ceil(float(rowCount)/2))
colCount = colCount * 2
w = colCount * sizeX
h = rowCount * sizeY
while(w > 32767):
colCount = int(math.ceil(float(colCount)/2))
rowCount = rowCount * 2
w = colCount * sizeX
h = rowCount * sizeY
matrix = Image.new("RGBA", (w,h))
for z in range(zStart, zEnd + 1):
for t in range(tStart, tEnd + 1):
jpeg = img.renderImage(z,t, compression=compress_quality)
if jpeg is None:
raise Http404
row = z - zStart
col = t - tStart
# Handle large Z OR large T
if row >= rowCount:
row = z % rowCount
col = col + (sizeT * (z/rowCount))
elif col >= colCount:
col = t % colCount
row = row + (sizeZ * (t/colCount))
px = col * sizeX
py = row * sizeY
matrix.paste(jpeg, (px, py))
# convert from PIL back to string image data
rv = StringIO()
compression = 0.9
matrix.save(rv, 'jpeg', quality=int(compression*100))
jpeg_data = rv.getvalue()
rsp = HttpJPEGResponse(jpeg_data)
return rsp
| gpl-2.0 |
yanikou19/pymatgen | pymatgen/symmetry/structure.py | 3 | 2467 | # coding: utf-8
from __future__ import division, unicode_literals
"""
This module implements symmetry-related structure forms.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Mar 9, 2012"
import numpy as np
from pymatgen.core.structure import Structure
class SymmetrizedStructure(Structure):
"""
This class represents a symmetrized structure, i.e. a structure
where the spacegroup and symmetry operations are defined. This class is
typically not called but instead is typically obtained by calling
pymatgen.symmetry.analyzer.SpacegroupAnalyzer.get_symmetrized_structure.
Args:
structure (Structure): Original structure
spacegroup (Spacegroup): An input spacegroup from SpacegroupAnalyzer.
equivalent_positions: Equivalent positions from SpacegroupAnalyzer.
.. attribute: equivalent_indices
indices of structure grouped by equivalency
"""
def __init__(self, structure, spacegroup, equivalent_positions):
Structure.__init__(self, structure.lattice,
[site.species_and_occu
for site in structure],
structure.frac_coords,
site_properties=structure.site_properties)
self._spacegroup = spacegroup
u, inv = np.unique(equivalent_positions, return_inverse = True)
self.equivalent_indices = [[] for i in range(len(u))]
self._equivalent_sites = [[] for i in range(len(u))]
for i, inv in enumerate(inv):
self.equivalent_indices[inv].append(i)
self._equivalent_sites[inv].append(self.sites[i])
@property
def equivalent_sites(self):
"""
All the sites grouped by symmetry equivalence in the form of [[sites
in group1], [sites in group2], ...]
"""
return self._equivalent_sites
def find_equivalent_sites(self, site):
"""
Finds all symmetrically equivalent sites for a particular site
Args:
site (PeriodicSite): A site in the structure
Returns:
([PeriodicSite]): List of all symmetrically equivalent sites.
"""
for sites in self.equivalent_sites:
if site in sites:
return sites
raise ValueError("Site not in structure")
| mit |
tiagofrepereira2012/tensorflow | tensorflow/contrib/timeseries/python/timeseries/input_pipeline.py | 51 | 39644 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Defines ways of splicing and re-arranging time series.
This file provides methods for reading, parsing, and re-arranging a time
series. The main departure from standard TensorFlow input pipelines is a focus
on "chunking" a time series, i.e. slicing it into small contiguous windows which
are then batched together for training, a form of truncated
backpropagation. This typically provides a significant speedup compared to
looping over the whole series sequentially, by exploiting data parallelism and
by reducing redundant contributions to gradients (due to redundant information
in the series itself).
A series, consisting of times (an increasing vector of integers) and values (one
or more floating point values for each time) along with any exogenous features,
is stored either in memory or on disk in various formats (e.g. "one record per
timestep" on disk, or as a dictionary of Numpy arrays in memory). The location
and format is specified by configuring a `TimeSeriesReader` object
(e.g. `NumpyReader`, `CSVReader`), which reads the data into the TensorFlow
graph. A `TimeSeriesInputFn` object (typically `RandomWindowInputFn`) then
performs windowing and batching.
Time series are passed through this pipeline as dictionaries mapping feature
names to their values. For training and evaluation, these require at minimum
`TrainEvalFeatures.TIMES` (scalar integers, one per timestep) and
`TrainEvalFeatures.VALUES` (may be either univariate or multivariate). Exogenous
features may have any shape, but are likewise associated with a timestep. Times
themselves need not be contiguous or regular (although smaller/fewer gaps are
generally better), but each timestep must have all `VALUES` and any exogenous
features (i.e. times may be missing, but given that a time is specified, every
other feature must also be specified for that step; some models may support
making exogenous updates conditional).
The expected use case of a `TimeSeriesInputFn` is that it is first configured
(for example setting a batch or window size) and passed a reader (a
`TimeSeriesReader` object). The `TimeSeriesInputFn` can then be passed as the
input_fn of an Estimator.
For example, `RandomWindowInputFn` is useful for creating batches of random
chunks of a series for training:
```
# Read data in the default "time,value" CSV format with no header
reader = input_pipeline.CSVReader(csv_file_name)
# Set up windowing and batching for training
train_input_fn = input_pipeline.RandomWindowInputFn(
reader, batch_size=16, window_size=16)
# Fit model parameters to data
estimator.train(input_fn=train_input_fn, steps=150)
```
`RandomWindowInputFn` is the primary tool for training and quantitative
evaluation of time series. `WholeDatasetInputFn`, which reads a whole series
into memory, is useful for qualitative evaluation and preparing to make
predictions with `predict_continuation_input_fn`.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy
from tensorflow.contrib.timeseries.python.timeseries import feature_keys
from tensorflow.contrib.timeseries.python.timeseries import model_utils
from tensorflow.python.estimator import estimator_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.training import input as input_lib
from tensorflow.python.training import training
from tensorflow.python.util import nest
def predict_continuation_input_fn(
evaluation, steps=None, times=None, exogenous_features=None):
"""An Estimator input_fn for running predict() after evaluate().
If the call to evaluate() we are making predictions based on had a batch_size
greater than one, predictions will start after each of these windows
(i.e. will have the same batch dimension).
Args:
evaluation: The dictionary returned by `Estimator.evaluate`, with keys
FilteringResults.STATE_TUPLE and FilteringResults.TIMES.
steps: The number of steps to predict (scalar), starting after the
evaluation. If `times` is specified, `steps` must not be; one is required.
times: A [batch_size x window_size] array of integers (not a Tensor)
indicating times to make predictions for. These times must be after the
corresponding evaluation. If `steps` is specified, `times` must not be;
one is required. If the batch dimension is omitted, it is assumed to be 1.
exogenous_features: Optional dictionary. If specified, indicates exogenous
features for the model to use while making the predictions. Values must
have shape [batch_size x window_size x ...], where `batch_size` matches
the batch dimension used when creating `evaluation`, and `window_size` is
either the `steps` argument or the `window_size` of the `times` argument
(depending on which was specified).
Returns:
An `input_fn` suitable for passing to the `predict` function of a time
series `Estimator`.
Raises:
ValueError: If `times` or `steps` are misspecified.
"""
if exogenous_features is None:
exogenous_features = {}
predict_times = model_utils.canonicalize_times_or_steps_from_output(
times=times, steps=steps, previous_model_output=evaluation)
features = {
feature_keys.PredictionFeatures.STATE_TUPLE:
evaluation[feature_keys.FilteringResults.STATE_TUPLE],
feature_keys.PredictionFeatures.TIMES:
predict_times
}
features.update(exogenous_features)
def _predict_input_fn():
"""An input_fn for predict()."""
# Prevents infinite iteration with a constant output in an Estimator's
# predict().
limited_features = {}
for key, values in features.items():
limited_values = nest.map_structure(
lambda value: training.limit_epochs(value, num_epochs=1), values)
limited_features[key] = limited_values
return (limited_features, None)
return _predict_input_fn
class TimeSeriesReader(object):
"""Reads from and parses a data source for a `TimeSeriesInputFn`.
This class provides methods that read a few records (`read`) or the full data
set at once (`read_full`), and returns them as dictionaries mapping feature
names to feature Tensors. Please see note at the top of the file for the
structure of these dictionaries. The output is generally chunked by a
`TimeSeriesInputFn` before being passed to the model.
"""
def check_dataset_size(self, minimum_dataset_size):
"""When possible, raises an error if the dataset is too small.
This method allows TimeSeriesReaders to raise informative error messages if
the user has selected a window size in their TimeSeriesInputFn which is
larger than the dataset size. However, many TimeSeriesReaders will not have
access to a dataset size, in which case they do not need to override this
method.
Args:
minimum_dataset_size: The minimum number of records which should be
contained in the dataset. Readers should attempt to raise an error when
possible if an epoch of data contains fewer records.
"""
pass
@abc.abstractmethod
def read(self):
"""Parses one or more records into a feature dictionary.
This method is expected to be called by a `TimeSeriesInputFn` object, and is
not for use with models directly.
A `TimeSeriesReader` object reads multiple records at a single time for
efficiency; the size of these batches is an implementation detail internal
to the input pipeline. These records should generally be sequential,
although some out-of-order records due to file wraparounds are expected and
must be handled by callers.
Returns:
A dictionary mapping feature names to `Tensor` values, each with an
arbitrary batch dimension (for efficiency) as their first dimension.
"""
pass
@abc.abstractmethod
def read_full(self):
"""Return the full dataset.
Largely for interactive use/plotting (or evaluation on small
datasets). Generally not very efficient. Not recommended for training.
Returns:
Same return type as `read`, but with the full dataset rather than an
arbitrary chunk of it. A dictionary mapping feature names to `Tensor`
values, where the size of the first dimension of each `Tensor` is the
number of samples in the entire dataset. These `Tensor`s should be
constant across graph invocations, assuming that the underlying data
remains constant. Current implementations re-read data on each graph
invocation, although this may change in the future.
"""
pass
class NumpyReader(TimeSeriesReader):
"""A time series parser for feeding Numpy arrays to a `TimeSeriesInputFn`.
Avoids embedding data in the graph as constants.
"""
def __init__(self, data, read_num_records_hint=4096):
"""Numpy array input for a `TimeSeriesInputFn`.
Args:
data: A dictionary mapping feature names to Numpy arrays, with two
possible shapes (requires keys `TrainEvalFeatures.TIMES` and
`TrainEvalFeatures.VALUES`):
Univariate; `TIMES` and `VALUES` are both vectors of shape [series
length]
Multivariate; `TIMES` is a vector of shape [series length], `VALUES`
has shape [series length x number of features].
In any case, `VALUES` and any exogenous features must have their shapes
prefixed by the shape of the value corresponding to the `TIMES` key.
read_num_records_hint: The maximum number of samples to read at one time,
for efficiency.
"""
self._features = _canonicalize_numpy_data(
data, require_single_batch=True)
self._read_num_records_hint = read_num_records_hint
def check_dataset_size(self, minimum_dataset_size):
"""Raise an error if the dataset is too small."""
dataset_size = self._features[feature_keys.TrainEvalFeatures.TIMES].shape[1]
if dataset_size < minimum_dataset_size:
raise ValueError(
("A TimeSeriesInputFn is configured to create windows of size {}, "
"but only {} records were available in the dataset. Either decrease "
"the window size or provide more records.").format(
minimum_dataset_size, dataset_size))
def read(self):
"""Returns a large chunk of the Numpy arrays for later re-chunking."""
# Remove the batch dimension from all features
features = {key: numpy.squeeze(value, axis=0)
for key, value in self._features.items()}
return estimator_lib.inputs.numpy_input_fn(
x=features,
# The first dimensions of features are the series length, since we have
# removed the batch dimension above. We now pull out
# self._read_num_records_hint steps of this single time series to pass
# to the TimeSeriesInputFn.
batch_size=self._read_num_records_hint,
num_epochs=None,
shuffle=False)()
def read_full(self):
"""Returns `Tensor` versions of the full Numpy arrays."""
features = estimator_lib.inputs.numpy_input_fn(
x=self._features,
batch_size=1,
num_epochs=None,
queue_capacity=2, # Each queue element is a full copy of the dataset
shuffle=False)()
# TimeSeriesInputFn expect just a batch dimension
return {feature_name: array_ops.squeeze(feature_value, axis=0)
for feature_name, feature_value in features.items()}
class ReaderBaseTimeSeriesParser(TimeSeriesReader):
"""Base for time series readers which wrap a `tf.ReaderBase`."""
def __init__(self, filenames, read_num_records_hint=4096):
"""Configure the time series reader.
Args:
filenames: A string or list of strings indicating files to read records
from.
read_num_records_hint: When not reading a full dataset, indicates the
number of records to transfer in a single chunk (for efficiency). The
actual number transferred at one time may vary.
"""
self._filenames = filenames
self._read_num_records_hint = read_num_records_hint
@abc.abstractmethod
def _get_reader(self):
"""Get an instance of the tf.ReaderBase associated with this class."""
pass
@abc.abstractmethod
def _process_records(self, lines):
"""Given string items, return a processed dictionary of Tensors.
Args:
lines: A 1-dimensional string Tensor, each representing a record to parse
(source dependent, e.g. a line of a file, or a serialized protocol
buffer).
Returns:
A dictionary mapping feature names to their values. The batch dimensions
should match the length of `lines`.
"""
pass
def _get_filename_queue(self, epoch_limit):
"""Constructs a filename queue with an epoch limit.
`epoch_limit` is intended as an error checking fallback to prevent a reader
from infinitely looping in its requests for more work items if none are
available in any file. It should be set high enough that it is never reached
assuming at least one record exists in some file.
Args:
epoch_limit: The maximum number of times to read through the complete list
of files before throwing an OutOfRangeError.
Returns:
A tuple of (filename_queue, epoch_limiter):
filename_queue: A FIFOQueue with filename work items.
epoch_limiter: The local variable used for epoch limitation. This should
be set to zero before a reader is passed `filename_queue` in order to
reset the epoch limiter's state.
"""
epoch_limiter = variable_scope.variable(
initial_value=constant_op.constant(0, dtype=dtypes.int64),
name="epoch_limiter",
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES])
filenames_tensor = array_ops.reshape(
ops.convert_to_tensor(self._filenames), [-1])
# We can't rely on epoch_limiter being initialized, since queue runners are
# started before local variables are initialized. Instead, we ignore epoch
# limits before variable initialization. This means that prior to variable
# initialization, a QueueRunner may cause a reader to enter an un-checked
# infinite loop. However, as soon as local variables are initialized, we
# will start incrementing and checking epoch_limiter, which will interrupt
# any in-progress loops.
conditional_count_up_to = control_flow_ops.cond(
state_ops.is_variable_initialized(epoch_limiter),
lambda: epoch_limiter.count_up_to(epoch_limit),
lambda: constant_op.constant(0, dtype=dtypes.int64))
with ops.control_dependencies([conditional_count_up_to]):
filenames_tensor = array_ops.identity(filenames_tensor)
filename_queue = input_lib.string_input_producer(
filenames_tensor, shuffle=False, capacity=1)
return filename_queue, epoch_limiter
def read(self):
"""Reads a chunk of data from the `tf.ReaderBase` for later re-chunking."""
# Assuming there is at least one item to be read among all of the files in
# self._filenames, we will not need to go through more than
# self._read_num_records_hint epochs to get a batch of
# self._read_num_records_hint records. Setting this limit and resetting it
# before each reader.read_up_to call prevents infinite looping when there
# are no records available in any of the files.
filename_queue, epoch_limiter = self._get_filename_queue(
epoch_limit=self._read_num_records_hint)
reader = self._get_reader()
epoch_reset_op = state_ops.assign(epoch_limiter, 0)
with ops.control_dependencies([epoch_reset_op]):
_, records = reader.read_up_to(
filename_queue, self._read_num_records_hint)
return self._process_records(records)
def read_full(self):
"""Reads a full epoch of data into memory."""
reader = self._get_reader()
# Set a hard limit of 2 epochs through self._filenames. If there are any
# records available, we should only end up reading the first record in the
# second epoch before exiting the while loop and subsequently resetting the
# epoch limit. If there are no records available in any of the files, this
# hard limit prevents the reader.read_up_to call from looping infinitely.
filename_queue, epoch_limiter = self._get_filename_queue(epoch_limit=2)
epoch_reset_op = state_ops.assign(epoch_limiter, 0)
with ops.control_dependencies([epoch_reset_op]):
first_key, first_value = reader.read_up_to(filename_queue, 1)
# Read until we get a duplicate key (one epoch)
def _while_condition(
current_key, current_value, current_index, collected_records):
del current_value, current_index, collected_records # unused
return math_ops.not_equal(array_ops.squeeze(current_key, axis=0),
array_ops.squeeze(first_key, axis=0))
def _while_body(
current_key, current_value, current_index, collected_records):
del current_key # unused
new_key, new_value = reader.read_up_to(filename_queue, 1)
new_key.set_shape([1])
new_value.set_shape([1])
return (new_key,
new_value,
current_index + 1,
collected_records.write(current_index, current_value))
_, _, _, records_ta = control_flow_ops.while_loop(
_while_condition,
_while_body,
[constant_op.constant([""]), first_value,
0, # current_index starting value
tensor_array_ops.TensorArray( # collected_records
dtype=dtypes.string, size=0, dynamic_size=True)])
records = records_ta.concat()
# Reset the reader when we're done so that subsequent requests for data get
# the dataset in the proper order.
with ops.control_dependencies([records]):
reader_reset_op = reader.reset()
with ops.control_dependencies([reader_reset_op]):
records = array_ops.identity(records)
return self._process_records(records)
class CSVReader(ReaderBaseTimeSeriesParser):
"""Reads from a collection of CSV-formatted files."""
def __init__(self,
filenames,
column_names=(feature_keys.TrainEvalFeatures.TIMES,
feature_keys.TrainEvalFeatures.VALUES),
column_dtypes=None,
skip_header_lines=None,
read_num_records_hint=4096):
"""CSV-parsing reader for a `TimeSeriesInputFn`.
Args:
filenames: A filename or list of filenames to read the time series
from. Each line must have columns corresponding to `column_names`.
column_names: A list indicating names for each
feature. `TrainEvalFeatures.TIMES` and `TrainEvalFeatures.VALUES` are
required; `VALUES` may be repeated to indicate a multivariate series.
column_dtypes: If provided, must be a list with the same length as
`column_names`, indicating dtypes for each column. Defaults to
`tf.int64` for `TrainEvalFeatures.TIMES` and `tf.float32` for
everything else.
skip_header_lines: Passed on to `tf.TextLineReader`; skips this number of
lines at the beginning of each file.
read_num_records_hint: When not reading a full dataset, indicates the
number of records to parse/transfer in a single chunk (for
efficiency). The actual number transferred at one time may be more or
less.
Raises:
ValueError: If required column names are not specified, or if lengths do
not match.
"""
if feature_keys.TrainEvalFeatures.TIMES not in column_names:
raise ValueError("'{}' is a required column.".format(
feature_keys.TrainEvalFeatures.TIMES))
if feature_keys.TrainEvalFeatures.VALUES not in column_names:
raise ValueError("'{}' is a required column.".format(
feature_keys.TrainEvalFeatures.VALUES))
if column_dtypes is not None and len(column_dtypes) != len(column_names):
raise ValueError(
("If specified, the length of column_dtypes must match the length of "
"column_names (got column_dtypes={} and column_names={}).").format(
column_dtypes, column_names))
if sum(1 for column_name in column_names
if column_name == feature_keys.TrainEvalFeatures.TIMES) != 1:
raise ValueError(
"Got more than one times column ('{}'), but exactly "
"one is required.".format(feature_keys.TrainEvalFeatures.TIMES))
self._column_names = column_names
self._column_dtypes = column_dtypes
self._skip_header_lines = skip_header_lines
super(CSVReader, self).__init__(
filenames=filenames, read_num_records_hint=read_num_records_hint)
def _get_reader(self):
return io_ops.TextLineReader(skip_header_lines=self._skip_header_lines)
def _process_records(self, lines):
"""Parse `lines` as CSV records."""
if self._column_dtypes is None:
default_values = [(array_ops.zeros([], dtypes.int64),)
if column_name == feature_keys.TrainEvalFeatures.TIMES
else () for column_name in self._column_names]
else:
default_values = [(array_ops.zeros([], dtype),)
for dtype in self._column_dtypes]
columns = parsing_ops.decode_csv(lines, default_values)
features_lists = {}
for column_name, value in zip(self._column_names, columns):
features_lists.setdefault(column_name, []).append(value)
features = {}
for column_name, values in features_lists.items():
if (len(values) == 1 and
column_name != feature_keys.TrainEvalFeatures.VALUES):
features[column_name] = values[0]
else:
features[column_name] = array_ops.stack(values, axis=1)
return features
class TimeSeriesInputFn(object):
"""Base for classes which create batches of windows from a time series."""
@abc.abstractmethod
def create_batch(self):
"""Creates chunked Tensors from times, values, and other features.
Suitable for use as the input_fn argument of a tf.estimator.Estimator's
fit() or evaluate() method.
Returns:
A tuple of (features, targets):
features: A dictionary with `TrainEvalFeatures.TIMES` and
`TrainEvalFeatures.VALUES` as keys, `TIMES` having an associated value
with shape [batch size x window length], `VALUES` with shape [batch
size x window length x number of features]. Any other features will
also have shapes prefixed with [batch size x window length].
targets: Not used, but must have a value for compatibility with the
Estimator API. That value should be None.
"""
pass
def __call__(self):
# Allow a TimeSeriesInputFn to be used as an input function directly
return self.create_batch()
class WholeDatasetInputFn(TimeSeriesInputFn):
"""Supports passing a full time series to a model for evaluation/inference.
Note that this `TimeSeriesInputFn` is not designed for high throughput, and
should not be used for training. It allows for sequential evaluation on a full
dataset (with sequential in-sample predictions), which then feeds naturally
into `predict_continuation_input_fn` for making out-of-sample
predictions. While this is useful for plotting and interactive use,
`RandomWindowInputFn` is better suited to training and quantitative
evaluation.
"""
# TODO(allenl): A SequentialWindowInputFn for getting model end state without
# loading the whole dataset into memory (or for quantitative evaluation of
# sequential models). Note that an Estimator using such a TimeSeriesInputFn
# won't return in-sample predictions for the whole dataset, which means it
# won't be terribly useful for interactive use/plotting (unless the user
# passes in concat metrics). Also need to be careful about state saving for
# sequential models, particularly the gaps between chunks.
def __init__(self, time_series_reader):
"""Initialize the `TimeSeriesInputFn`.
Args:
time_series_reader: A TimeSeriesReader object.
"""
self._reader = time_series_reader
super(WholeDatasetInputFn, self).__init__()
def create_batch(self):
"""A suitable `input_fn` for an `Estimator`'s `evaluate()`.
Returns:
A dictionary mapping feature names to `Tensors`, each shape
prefixed by [1, data set size] (i.e. a batch size of 1).
"""
features = self._reader.read_full()
# Add a batch dimension of one to each feature.
return ({feature_name: feature_value[None, ...]
for feature_name, feature_value in features.items()},
None)
class RandomWindowInputFn(TimeSeriesInputFn):
"""Wraps a `TimeSeriesReader` to create random batches of windows.
Tensors are first collected into sequential windows (in a windowing queue
created by `tf.train.batch`, based on the order returned from
`time_series_reader`), then these windows are randomly batched (in a
`RandomShuffleQueue`), the Tensors returned by `create_batch` having shapes
prefixed by [`batch_size`, `window_size`].
This `TimeSeriesInputFn` is useful for both training and quantitative
evaluation (but be sure to run several epochs for sequential models such as
`StructuralEnsembleRegressor` to completely flush stale state left over from
training). For qualitative evaluation or when preparing for predictions, use
`WholeDatasetInputFn`.
"""
def __init__(
self, time_series_reader, window_size, batch_size,
queue_capacity_multiplier=1000, shuffle_min_after_dequeue_multiplier=2,
discard_out_of_order=True, discard_consecutive_batches_limit=1000,
jitter=True, num_threads=2, shuffle_seed=None):
"""Configure the RandomWindowInputFn.
Args:
time_series_reader: A TimeSeriesReader object.
window_size: The number of examples to keep together sequentially. This
controls the length of truncated backpropagation: smaller values mean
less sequential computation, which can lead to faster training, but
create a coarser approximation to the gradient (which would ideally be
computed by a forward pass over the entire sequence in order).
batch_size: The number of windows to place together in a batch. Larger
values will lead to more stable gradients during training.
queue_capacity_multiplier: The capacity for the queues used to create
batches, specified as a multiple of `batch_size` (for
RandomShuffleQueue) and `batch_size * window_size` (for the
FIFOQueue). Controls the maximum number of windows stored. Should be
greater than `shuffle_min_after_dequeue_multiplier`.
shuffle_min_after_dequeue_multiplier: The minimum number of windows in the
RandomShuffleQueue after a dequeue, which controls the amount of entropy
introduced during batching. Specified as a multiple of `batch_size`.
discard_out_of_order: If True, windows of data which have times which
decrease (a higher time followed by a lower time) are discarded. If
False, the window and associated features are instead sorted so that
times are non-decreasing. Discarding is typically faster, as models do
not have to deal with artificial gaps in the data. However, discarding
does create a bias where the beginnings and endings of files are
under-sampled.
discard_consecutive_batches_limit: Raise an OutOfRangeError if more than
this number of batches are discarded without a single non-discarded
window (prevents infinite looping when the dataset is too small).
jitter: If True, randomly discards examples between some windows in order
to avoid deterministic chunking patterns. This is important for models
like AR which may otherwise overfit a fixed chunking.
num_threads: Use this number of threads for queues. Setting a value of 1
removes one source of non-determinism (and in combination with
shuffle_seed should provide deterministic windowing).
shuffle_seed: A seed for window shuffling. The default value of None
provides random behavior. With `shuffle_seed` set and
`num_threads=1`, provides deterministic behavior.
"""
self._reader = time_series_reader
self._window_size = window_size
self._reader.check_dataset_size(minimum_dataset_size=self._window_size)
self._batch_size = batch_size
self._queue_capacity_multiplier = queue_capacity_multiplier
self._shuffle_min_after_dequeue_multiplier = (
shuffle_min_after_dequeue_multiplier)
self._discard_out_of_order = discard_out_of_order
self._discard_limit = discard_consecutive_batches_limit
self._jitter = jitter
if num_threads is None:
self._num_threads = self._batch_size
else:
self._num_threads = num_threads
self._shuffle_seed = shuffle_seed
super(RandomWindowInputFn, self).__init__()
def create_batch(self):
"""Create queues to window and batch time series data.
Returns:
A dictionary of Tensors corresponding to the output of `self._reader`
(from the `time_series_reader` constructor argument), each with shapes
prefixed by [`batch_size`, `window_size`].
"""
features = self._reader.read()
if self._jitter:
# TODO(agarwal, allenl): Figure out if more jitter is needed here.
jitter = random_ops.random_uniform(shape=[], maxval=2, dtype=dtypes.int32)
else:
jitter = 0
# To keep things efficient, we pass from the windowing batcher to the
# batch-of-windows batcher in batches. This avoids the need for huge numbers
# of threads, but does mean that jitter is only applied occasionally.
# TODO(allenl): Experiment with different internal passing sizes.
internal_passing_size = self._batch_size
features_windowed = input_lib.batch(
features,
batch_size=self._window_size * internal_passing_size + jitter,
enqueue_many=True,
capacity=(self._queue_capacity_multiplier
* internal_passing_size * self._window_size),
num_threads=self._num_threads)
raw_features_windowed = features_windowed
if self._jitter:
features_windowed = {
key: value[jitter:]
for key, value in features_windowed.items()}
features_windowed = {
key: array_ops.reshape(
value,
array_ops.concat(
[[internal_passing_size, self._window_size],
array_ops.shape(value)[1:]],
axis=0))
for key, value in features_windowed.items()}
batch_and_window_shape = tensor_shape.TensorShape(
[internal_passing_size, self._window_size])
for key in features_windowed.keys():
features_windowed[key].set_shape(
batch_and_window_shape.concatenate(
raw_features_windowed[key].get_shape()[1:]))
# When switching files, we may end up with windows where the time is not
# decreasing, even if times within each file are sorted (and even if those
# files are visited in order, when looping back around to the beginning of
# the first file). This is hard for models to deal with, so we either
# discard such examples, creating a bias where the beginning and end of the
# series is under-sampled, or we sort the window, creating large gaps.
times = features_windowed[feature_keys.TrainEvalFeatures.TIMES]
if self._discard_out_of_order:
non_decreasing = math_ops.reduce_all(
times[:, 1:] >= times[:, :-1], axis=1)
# Ensure that no more than self._discard_limit complete batches are
# discarded contiguously (resetting the count when we find a single clean
# window). This prevents infinite looping when the dataset is smaller than
# the window size.
# TODO(allenl): Figure out a way to return informative errors from
# count_up_to.
discarded_windows_limiter = variable_scope.variable(
initial_value=constant_op.constant(0, dtype=dtypes.int64),
name="discarded_windows_limiter",
trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES])
def _initialized_limit_check():
return control_flow_ops.cond(
math_ops.reduce_any(non_decreasing),
lambda: state_ops.assign(discarded_windows_limiter, 0),
lambda: discarded_windows_limiter.count_up_to(self._discard_limit))
discard_limit_op = control_flow_ops.cond(
state_ops.is_variable_initialized(discarded_windows_limiter),
_initialized_limit_check,
lambda: constant_op.constant(0, dtype=dtypes.int64))
with ops.control_dependencies([discard_limit_op]):
non_decreasing = array_ops.identity(non_decreasing)
else:
_, indices_descending = nn.top_k(
times, k=array_ops.shape(times)[-1], sorted=True)
indices = array_ops.reverse(indices_descending, axis=[0])
features_windowed = {
key: array_ops.gather(params=value, indices=indices)
for key, value in features_windowed.items()
}
non_decreasing = True
features_batched = input_lib.maybe_shuffle_batch(
features_windowed,
num_threads=self._num_threads,
seed=self._shuffle_seed,
batch_size=self._batch_size,
capacity=self._queue_capacity_multiplier * self._batch_size,
min_after_dequeue=(self._shuffle_min_after_dequeue_multiplier *
self._batch_size),
keep_input=non_decreasing,
enqueue_many=True)
return (features_batched, None)
def _canonicalize_numpy_data(data, require_single_batch):
"""Do basic checking and reshaping for Numpy data.
Args:
data: A dictionary mapping keys to Numpy arrays, with several possible
shapes (requires keys `TrainEvalFeatures.TIMES` and
`TrainEvalFeatures.VALUES`):
Single example; `TIMES` is a scalar and `VALUES` is either a scalar or a
vector of length [number of features].
Sequence; `TIMES` is a vector of shape [series length], `VALUES` either
has shape [series length] (univariate) or [series length x number of
features] (multivariate).
Batch of sequences; `TIMES` is a vector of shape [batch size x series
length], `VALUES` has shape [batch size x series length] or [batch
size x series length x number of features].
In any case, `VALUES` and any exogenous features must have their shapes
prefixed by the shape of the value corresponding to the `TIMES` key.
require_single_batch: If True, raises an error if the provided data has a
batch dimension > 1.
Returns:
A dictionary with features normalized to have shapes prefixed with [batch
size x series length]. The sizes of dimensions which were omitted in the
inputs are 1.
Raises:
ValueError: If dimensions are incorrect or do not match, or required
features are missing.
"""
features = {key: numpy.array(value) for key, value in data.items()}
if (feature_keys.TrainEvalFeatures.TIMES not in features or
feature_keys.TrainEvalFeatures.VALUES not in features):
raise ValueError("{} and {} are required features.".format(
feature_keys.TrainEvalFeatures.TIMES,
feature_keys.TrainEvalFeatures.VALUES))
times = features[feature_keys.TrainEvalFeatures.TIMES]
for key, value in features.items():
if value.shape[:len(times.shape)] != times.shape:
raise ValueError(
("All features must have their shapes prefixed by the shape of the"
" times feature. Got shape {} for feature '{}', but shape {} for"
" '{}'").format(value.shape, key, times.shape,
feature_keys.TrainEvalFeatures.TIMES))
if not times.shape: # a single example
if not features[feature_keys.TrainEvalFeatures.VALUES].shape: # univariate
# Add a feature dimension (with one feature)
features[feature_keys.TrainEvalFeatures.VALUES] = features[
feature_keys.TrainEvalFeatures.VALUES][..., None]
elif len(features[feature_keys.TrainEvalFeatures.VALUES].shape) > 1:
raise ValueError(
("Got an unexpected number of dimensions for the '{}' feature."
" Was expecting at most 1 dimension"
" ([number of features]) since '{}' does not "
"have a batch or time dimension, but got shape {}").format(
feature_keys.TrainEvalFeatures.VALUES,
feature_keys.TrainEvalFeatures.TIMES,
features[feature_keys.TrainEvalFeatures.VALUES].shape))
# Add trivial batch and time dimensions for every feature
features = {key: value[None, None, ...] for key, value in features.items()}
if len(times.shape) == 1: # shape [series length]
if len(features[feature_keys.TrainEvalFeatures.VALUES]
.shape) == 1: # shape [series length]
# Add a feature dimension (with one feature)
features[feature_keys.TrainEvalFeatures.VALUES] = features[
feature_keys.TrainEvalFeatures.VALUES][..., None]
elif len(features[feature_keys.TrainEvalFeatures.VALUES].shape) > 2:
raise ValueError(
("Got an unexpected number of dimensions for the '{}' feature."
" Was expecting at most 2 dimensions"
" ([series length, number of features]) since '{}' does not "
"have a batch dimension, but got shape {}").format(
feature_keys.TrainEvalFeatures.VALUES,
feature_keys.TrainEvalFeatures.TIMES,
features[feature_keys.TrainEvalFeatures.VALUES].shape))
# Add trivial batch dimensions for every feature
features = {key: value[None, ...] for key, value in features.items()}
elif len(features[feature_keys.TrainEvalFeatures.TIMES]
.shape) != 2: # shape [batch size, series length]
raise ValueError(
("Got an unexpected number of dimensions for times. Was expecting at "
"most two ([batch size, series length]), but got shape {}.").format(
times.shape))
if require_single_batch:
# We don't expect input to be already batched; batching is done later
if features[feature_keys.TrainEvalFeatures.TIMES].shape[0] != 1:
raise ValueError("Got batch input, was expecting unbatched input.")
return features
| apache-2.0 |
wolfier/incubator-airflow | airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py | 9 | 1288 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add ti job_id index
Revision ID: 947454bf1dff
Revises: bdaa763e6c56
Create Date: 2017-08-15 15:12:13.845074
"""
# revision identifiers, used by Alembic.
revision = '947454bf1dff'
down_revision = 'bdaa763e6c56'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('ti_job_id', 'task_instance', ['job_id'], unique=False)
def downgrade():
op.drop_index('ti_job_id', table_name='task_instance')
| apache-2.0 |
hinesmr/mica | util/pydevd/pysrc/_pydev_imports_tipper.py | 4 | 13856 | import os.path
import inspect
import sys
from _pydev_tipper_common import DoFind
#completion types.
TYPE_IMPORT = '0'
TYPE_CLASS = '1'
TYPE_FUNCTION = '2'
TYPE_ATTR = '3'
TYPE_BUILTIN = '4'
TYPE_PARAM = '5'
def _imp(name, log=None):
try:
return __import__(name)
except:
if '.' in name:
sub = name[0:name.rfind('.')]
if log is not None:
log.AddContent('Unable to import', name, 'trying with', sub)
#log.AddContent('PYTHONPATH:')
#log.AddContent('\n'.join(sorted(sys.path)))
log.AddException()
return _imp(sub, log)
else:
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
if log is not None:
log.AddContent(s)
log.AddException()
raise ImportError(s)
IS_IPY = False
if sys.platform == 'cli':
IS_IPY = True
_old_imp = _imp
def _imp(name, log=None):
#We must add a reference in clr for .Net
import clr #@UnresolvedImport
initial_name = name
while '.' in name:
try:
clr.AddReference(name)
break #If it worked, that's OK.
except:
name = name[0:name.rfind('.')]
else:
try:
clr.AddReference(name)
except:
pass #That's OK (not dot net module).
return _old_imp(initial_name, log)
def GetFile(mod):
f = None
try:
f = inspect.getsourcefile(mod) or inspect.getfile(mod)
except:
if hasattr(mod, '__file__'):
f = mod.__file__
if f.lower(f[-4:]) in ['.pyc', '.pyo']:
filename = f[:-4] + '.py'
if os.path.exists(filename):
f = filename
return f
def Find(name, log=None):
f = None
mod = _imp(name, log)
parent = mod
foundAs = ''
if inspect.ismodule(mod):
f = GetFile(mod)
components = name.split('.')
old_comp = None
for comp in components[1:]:
try:
#this happens in the following case:
#we have mx.DateTime.mxDateTime.mxDateTime.pyd
#but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
mod = getattr(mod, comp)
except AttributeError:
if old_comp != comp:
raise
if inspect.ismodule(mod):
f = GetFile(mod)
else:
if len(foundAs) > 0:
foundAs = foundAs + '.'
foundAs = foundAs + comp
old_comp = comp
return f, mod, parent, foundAs
def Search(data):
'''@return file, line, col
'''
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data)
try:
return DoFind(f, mod), foundAs
except:
return DoFind(f, parent), foundAs
def GenerateTip(data, log=None):
data = data.replace('\n', '')
if data.endswith('.'):
data = data.rstrip('.')
f, mod, parent, foundAs = Find(data, log)
#print_ >> open('temp.txt', 'w'), f
tips = GenerateImportsTipForModule(mod)
return f, tips
def CheckChar(c):
if c == '-' or c == '.':
return '_'
return c
def GenerateImportsTipForModule(obj_to_complete, dirComps=None, getattr=getattr, filter=lambda name:True):
'''
@param obj_to_complete: the object from where we should get the completions
@param dirComps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
@param filter: a callable that receives the name and decides if it should be appended or not to the results
@return: list of tuples, so that each tuple represents a completion with:
name, doc, args, type (from the TYPE_* constants)
'''
ret = []
if dirComps is None:
dirComps = dir(obj_to_complete)
if hasattr(obj_to_complete, '__dict__'):
dirComps.append('__dict__')
if hasattr(obj_to_complete, '__class__'):
dirComps.append('__class__')
getCompleteInfo = True
if len(dirComps) > 1000:
#ok, we don't want to let our users wait forever...
#no complete info for you...
getCompleteInfo = False
dontGetDocsOn = (float, int, str, tuple, list)
for d in dirComps:
if d is None:
continue
if not filter(d):
continue
args = ''
try:
obj = getattr(obj_to_complete, d)
except: #just ignore and get it without aditional info
ret.append((d, '', args, TYPE_BUILTIN))
else:
if getCompleteInfo:
try:
retType = TYPE_BUILTIN
#check if we have to get docs
getDoc = True
for class_ in dontGetDocsOn:
if isinstance(obj, class_):
getDoc = False
break
doc = ''
if getDoc:
#no need to get this info... too many constants are defined and
#makes things much slower (passing all that through sockets takes quite some time)
try:
doc = inspect.getdoc(obj)
if doc is None:
doc = ''
except: #may happen on jython when checking java classes (so, just ignore it)
doc = ''
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
try:
args, vargs, kwargs, defaults = inspect.getargspec(obj)
r = ''
for a in (args):
if len(r) > 0:
r = r + ', '
r = r + str(a)
args = '(%s)' % (r)
except TypeError:
#ok, let's see if we can get the arguments from the doc
args = '()'
try:
found = False
if len(doc) > 0:
if IS_IPY:
#Handle case where we have the situation below
#sort(self, object cmp, object key)
#sort(self, object cmp, object key, bool reverse)
#sort(self)
#sort(self, object cmp)
#Or: sort(self: list, cmp: object, key: object)
#sort(self: list, cmp: object, key: object, reverse: bool)
#sort(self: list)
#sort(self: list, cmp: object)
if hasattr(obj, '__name__'):
name = obj.__name__+'('
#Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line.
lines = doc.splitlines()
if len(lines) == 1:
c = doc.count(name)
if c > 1:
doc = ('\n'+name).join(doc.split(name))
major = ''
for line in doc.splitlines():
if line.startswith(name) and line.endswith(')'):
if len(line) > len(major):
major = line
if major:
args = major[major.index('('):]
found = True
if not found:
i = doc.find('->')
if i < 0:
i = doc.find('--')
if i < 0:
i = doc.find('\n')
if i < 0:
i = doc.find('\r')
if i > 0:
s = doc[0:i]
s = s.strip()
#let's see if we have a docstring in the first line
if s[-1] == ')':
start = s.find('(')
if start >= 0:
end = s.find('[')
if end <= 0:
end = s.find(')')
if end <= 0:
end = len(s)
args = s[start:end]
if not args[-1] == ')':
args = args + ')'
#now, get rid of unwanted chars
l = len(args) - 1
r = []
for i in range(len(args)):
if i == 0 or i == l:
r.append(args[i])
else:
r.append(CheckChar(args[i]))
args = ''.join(r)
if IS_IPY:
if args.startswith('(self:'):
i = args.find(',')
if i >= 0:
args = '(self'+args[i:]
else:
args = '(self)'
i = args.find(')')
if i > 0:
args = args[:i+1]
except:
pass
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
#add token and doc to return - assure only strings.
ret.append((d, doc, args, retType))
except: #just ignore and get it without aditional info
ret.append((d, '', args, TYPE_BUILTIN))
else: #getCompleteInfo == False
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
retType = TYPE_FUNCTION
elif inspect.isclass(obj):
retType = TYPE_CLASS
elif inspect.ismodule(obj):
retType = TYPE_IMPORT
else:
retType = TYPE_ATTR
#ok, no complete info, let's try to do this as fast and clean as possible
#so, no docs for this kind of information, only the signatures
ret.append((d, '', str(args), retType))
return ret
| apache-2.0 |
lungetech/luigi | test/rpc_test.py | 11 | 3942 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from helpers import unittest
try:
from unittest import mock
except ImportError:
import mock
import luigi.rpc
from luigi.scheduler import CentralPlannerScheduler
import central_planner_test
import luigi.server
from server_test import ServerTestBase
import time
import socket
class RemoteSchedulerTest(unittest.TestCase):
def testUrlArgumentVariations(self):
for url in ['http://zorg.com', 'http://zorg.com/']:
for suffix in ['api/123', '/api/123']:
s = luigi.rpc.RemoteScheduler(url, 42)
with mock.patch.object(s, '_fetcher') as fetcher:
s._fetch(suffix, '{}')
fetcher.fetch.assert_called_once_with('http://zorg.com/api/123', '{}', 42)
def get_work(self, fetcher_side_effect):
class ShorterWaitRemoteScheduler(luigi.rpc.RemoteScheduler):
"""
A RemoteScheduler which waits shorter than usual before retrying (to speed up tests).
"""
def _wait(self):
time.sleep(1)
scheduler = ShorterWaitRemoteScheduler('http://zorg.com', 42)
with mock.patch.object(scheduler, '_fetcher') as fetcher:
fetcher.raises = socket.timeout
fetcher.fetch.side_effect = fetcher_side_effect
return scheduler.get_work("fake_worker")
def test_retry_rpc_method(self):
"""
Tests that a call to a RPC method is re-tried 3 times.
"""
fetch_results = [socket.timeout, socket.timeout, '{"response":{}}']
self.assertEqual({}, self.get_work(fetch_results))
def test_retry_rpc_limited(self):
"""
Tests that a call to an RPC method fails after the third attempt
"""
fetch_results = [socket.timeout, socket.timeout, socket.timeout]
self.assertRaises(luigi.rpc.RPCError, self.get_work, fetch_results)
def test_get_work_retries_on_null(self):
"""
Tests that get_work will retry if the response is null
"""
fetch_results = ['{"response": null}', '{"response": {"pass": true}}']
self.assertEqual({'pass': True}, self.get_work(fetch_results))
def test_get_work_retries_on_null_limited(self):
"""
Tests that get_work will give up after the third null response
"""
fetch_results = ['{"response": null}'] * 3 + ['{"response": {}}']
self.assertRaises(luigi.rpc.RPCError, self.get_work, fetch_results)
class RPCTest(central_planner_test.CentralPlannerTest, ServerTestBase):
def get_app(self):
conf = self.get_scheduler_config()
sch = CentralPlannerScheduler(**conf)
return luigi.server.app(sch)
def setUp(self):
super(RPCTest, self).setUp()
self.sch = luigi.rpc.RemoteScheduler(self.get_url(''))
self.sch._wait = lambda: None
# disable test that doesn't work with remote scheduler
def test_task_first_failure_time(self):
pass
def test_task_first_failure_time_remains_constant(self):
pass
def test_task_has_excessive_failures(self):
pass
def test_quadratic_behavior(self):
""" This would be too slow to run through network """
pass
def test_get_work_speed(self):
""" This would be too slow to run through network """
pass
| apache-2.0 |
lionleaf/dwitter | dwitter/webhooks.py | 1 | 1324 | from django.conf import settings
import requests
class Webhooks:
@staticmethod
def send_discord_message(message):
if(not hasattr(settings, 'DISCORD_WEBHOOK')):
return # No discord webhook set up
post_data = {'content': message}
try:
requests.post(settings.DISCORD_WEBHOOK, json=post_data)
except Exception:
return # Fail silently, webhooks will stop rather than breaking the site
@staticmethod
def new_dweet_notifications(dweet):
authorname = dweet.author.username
msg = ('[u/%s](https://www.dwitter.net/u/%s) posted new dweet ' %
(authorname, authorname) +
'[d/%d](https://www.dwitter.net/d/%s):\n```js\n%s\n```' %
(dweet.id, dweet.id, dweet.code))
Webhooks.send_discord_message(msg)
@staticmethod
def send_mod_chat_message(message):
if(not hasattr(settings, 'DISCORD_MOD_CHAT_WEBHOOK')):
return False # No discord webhook set up
try:
response = requests.post(settings.DISCORD_MOD_CHAT_WEBHOOK, json={
'content': message,
})
# Discord should return the success code 204
return (response.status_code == 204)
except Exception:
return False
| apache-2.0 |
keedio/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/yaml/constructor.py | 114 | 25356 |
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
'ConstructorError']
from error import *
from nodes import *
import datetime
try:
set
except NameError:
from sets import Set as set
import binascii, re, sys, types
class ConstructorError(MarkedYAMLError):
pass
class BaseConstructor(object):
yaml_constructors = {}
yaml_multi_constructors = {}
def __init__(self):
self.constructed_objects = {}
self.recursive_objects = {}
self.state_generators = []
self.deep_construct = False
def check_data(self):
# If there are more documents available?
return self.check_node()
def get_data(self):
# Construct and return the next document.
if self.check_node():
return self.construct_document(self.get_node())
def get_single_data(self):
# Ensure that the stream contains a single document and construct it.
node = self.get_single_node()
if node is not None:
return self.construct_document(node)
return None
def construct_document(self, node):
data = self.construct_object(node)
while self.state_generators:
state_generators = self.state_generators
self.state_generators = []
for generator in state_generators:
for dummy in generator:
pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
return data
def construct_object(self, node, deep=False):
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.constructed_objects:
return self.constructed_objects[node]
if node in self.recursive_objects:
raise ConstructorError(None, None,
"found unconstructable recursive node", node.start_mark)
self.recursive_objects[node] = None
constructor = None
tag_suffix = None
if node.tag in self.yaml_constructors:
constructor = self.yaml_constructors[node.tag]
else:
for tag_prefix in self.yaml_multi_constructors:
if node.tag.startswith(tag_prefix):
tag_suffix = node.tag[len(tag_prefix):]
constructor = self.yaml_multi_constructors[tag_prefix]
break
else:
if None in self.yaml_multi_constructors:
tag_suffix = node.tag
constructor = self.yaml_multi_constructors[None]
elif None in self.yaml_constructors:
constructor = self.yaml_constructors[None]
elif isinstance(node, ScalarNode):
constructor = self.__class__.construct_scalar
elif isinstance(node, SequenceNode):
constructor = self.__class__.construct_sequence
elif isinstance(node, MappingNode):
constructor = self.__class__.construct_mapping
if tag_suffix is None:
data = constructor(self, node)
else:
data = constructor(self, tag_suffix, node)
if isinstance(data, types.GeneratorType):
generator = data
data = generator.next()
if self.deep_construct:
for dummy in generator:
pass
else:
self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
if deep:
self.deep_construct = old_deep
return data
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
raise ConstructorError(None, None,
"expected a scalar node, but found %s" % node.id,
node.start_mark)
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(None, None,
"expected a sequence node, but found %s" % node.id,
node.start_mark)
return [self.construct_object(child, deep=deep)
for child in node.value]
def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
value = self.construct_object(value_node, deep=deep)
pairs.append((key, value))
return pairs
def add_constructor(cls, tag, constructor):
if not 'yaml_constructors' in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
add_constructor = classmethod(add_constructor)
def add_multi_constructor(cls, tag_prefix, multi_constructor):
if not 'yaml_multi_constructors' in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
add_multi_constructor = classmethod(add_multi_constructor)
class SafeConstructor(BaseConstructor):
def construct_scalar(self, node):
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
if key_node.tag == u'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
def flatten_mapping(self, node):
merge = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
if key_node.tag == u'tag:yaml.org,2002:merge':
del node.value[index]
if isinstance(value_node, MappingNode):
self.flatten_mapping(value_node)
merge.extend(value_node.value)
elif isinstance(value_node, SequenceNode):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing a mapping",
node.start_mark,
"expected a mapping for merging, but found %s"
% subnode.id, subnode.start_mark)
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
raise ConstructorError("while constructing a mapping", node.start_mark,
"expected a mapping or list of mappings for merging, but found %s"
% value_node.id, value_node.start_mark)
elif key_node.tag == u'tag:yaml.org,2002:value':
key_node.tag = u'tag:yaml.org,2002:str'
index += 1
else:
index += 1
if merge:
node.value = merge + node.value
def construct_mapping(self, node, deep=False):
if isinstance(node, MappingNode):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
def construct_yaml_null(self, node):
self.construct_scalar(node)
return None
bool_values = {
u'yes': True,
u'no': False,
u'true': True,
u'false': False,
u'on': True,
u'off': False,
}
def construct_yaml_bool(self, node):
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
def construct_yaml_int(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '')
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '0':
return 0
elif value.startswith('0b'):
return sign*int(value[2:], 2)
elif value.startswith('0x'):
return sign*int(value[2:], 16)
elif value[0] == '0':
return sign*int(value, 8)
elif ':' in value:
digits = [int(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*int(value)
inf_value = 1e300
while inf_value != inf_value*inf_value:
inf_value *= inf_value
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
def construct_yaml_float(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '').lower()
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '.inf':
return sign*self.inf_value
elif value == '.nan':
return self.nan_value
elif ':' in value:
digits = [float(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0.0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*float(value)
def construct_yaml_binary(self, node):
value = self.construct_scalar(node)
try:
return str(value).decode('base64')
except (binascii.Error, UnicodeEncodeError), exc:
raise ConstructorError(None, None,
"failed to decode base64 data: %s" % exc, node.start_mark)
timestamp_regexp = re.compile(
ur'''^(?P<year>[0-9][0-9][0-9][0-9])
-(?P<month>[0-9][0-9]?)
-(?P<day>[0-9][0-9]?)
(?:(?:[Tt]|[ \t]+)
(?P<hour>[0-9][0-9]?)
:(?P<minute>[0-9][0-9])
:(?P<second>[0-9][0-9])
(?:\.(?P<fraction>[0-9]*))?
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
def construct_yaml_timestamp(self, node):
value = self.construct_scalar(node)
match = self.timestamp_regexp.match(node.value)
values = match.groupdict()
year = int(values['year'])
month = int(values['month'])
day = int(values['day'])
if not values['hour']:
return datetime.date(year, month, day)
hour = int(values['hour'])
minute = int(values['minute'])
second = int(values['second'])
fraction = 0
if values['fraction']:
fraction = values['fraction'][:6]
while len(fraction) < 6:
fraction += '0'
fraction = int(fraction)
delta = None
if values['tz_sign']:
tz_hour = int(values['tz_hour'])
tz_minute = int(values['tz_minute'] or 0)
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
if values['tz_sign'] == '-':
delta = -delta
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
if delta:
data -= delta
return data
def construct_yaml_omap(self, node):
# Note: we do not check for duplicate keys, because it's too
# CPU-expensive.
omap = []
yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
omap.append((key, value))
def construct_yaml_pairs(self, node):
# Note: the same code as `construct_yaml_omap`.
pairs = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
pairs.append((key, value))
def construct_yaml_set(self, node):
data = set()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
try:
return value.encode('ascii')
except UnicodeEncodeError:
return value
def construct_yaml_seq(self, node):
data = []
yield data
data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node):
data = {}
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_object(self, node, cls):
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
state = self.construct_mapping(node, deep=True)
data.__setstate__(state)
else:
state = self.construct_mapping(node)
data.__dict__.update(state)
def construct_undefined(self, node):
raise ConstructorError(None, None,
"could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
node.start_mark)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:null',
SafeConstructor.construct_yaml_null)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:bool',
SafeConstructor.construct_yaml_bool)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:int',
SafeConstructor.construct_yaml_int)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:float',
SafeConstructor.construct_yaml_float)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:binary',
SafeConstructor.construct_yaml_binary)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:timestamp',
SafeConstructor.construct_yaml_timestamp)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:omap',
SafeConstructor.construct_yaml_omap)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:pairs',
SafeConstructor.construct_yaml_pairs)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:set',
SafeConstructor.construct_yaml_set)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:str',
SafeConstructor.construct_yaml_str)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:seq',
SafeConstructor.construct_yaml_seq)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:map',
SafeConstructor.construct_yaml_map)
SafeConstructor.add_constructor(None,
SafeConstructor.construct_undefined)
class Constructor(SafeConstructor):
def construct_python_str(self, node):
return self.construct_scalar(node).encode('utf-8')
def construct_python_unicode(self, node):
return self.construct_scalar(node)
def construct_python_long(self, node):
return long(self.construct_yaml_int(node))
def construct_python_complex(self, node):
return complex(self.construct_scalar(node))
def construct_python_tuple(self, node):
return tuple(self.construct_sequence(node))
def find_python_module(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python module", mark,
"expected non-empty name appended to the tag", mark)
try:
__import__(name)
except ImportError, exc:
raise ConstructorError("while constructing a Python module", mark,
"cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
return sys.modules[name]
def find_python_name(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python object", mark,
"expected non-empty name appended to the tag", mark)
if u'.' in name:
# Python 2.4 only
#module_name, object_name = name.rsplit('.', 1)
items = name.split('.')
object_name = items.pop()
module_name = '.'.join(items)
else:
module_name = '__builtin__'
object_name = name
try:
__import__(module_name)
except ImportError, exc:
raise ConstructorError("while constructing a Python object", mark,
"cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
module = sys.modules[module_name]
if not hasattr(module, object_name):
raise ConstructorError("while constructing a Python object", mark,
"cannot find %r in the module %r" % (object_name.encode('utf-8'),
module.__name__), mark)
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python name", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python module", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_module(suffix, node.start_mark)
class classobj: pass
def make_python_instance(self, suffix, node,
args=None, kwds=None, newobj=False):
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
if newobj and isinstance(cls, type(self.classobj)) \
and not args and not kwds:
instance = self.classobj()
instance.__class__ = cls
return instance
elif newobj and isinstance(cls, type):
return cls.__new__(cls, *args, **kwds)
else:
return cls(*args, **kwds)
def set_python_instance_state(self, instance, state):
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
slotstate = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
instance.__dict__.update(state)
elif state:
slotstate.update(state)
for key, value in slotstate.items():
setattr(object, key, value)
def construct_python_object(self, suffix, node):
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
yield instance
deep = hasattr(instance, '__setstate__')
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
def construct_python_object_apply(self, suffix, node, newobj=False):
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
# kwds: { ... keywords ... }
# state: ... state ...
# listitems: [ ... listitems ... ]
# dictitems: { ... dictitems ... }
# or short format:
# !!python/object/apply [ ... arguments ... ]
# The difference between !!python/object/apply and !!python/object/new
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
kwds = {}
state = {}
listitems = []
dictitems = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
kwds = value.get('kwds', {})
state = value.get('state', {})
listitems = value.get('listitems', [])
dictitems = value.get('dictitems', {})
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
if state:
self.set_python_instance_state(instance, state)
if listitems:
instance.extend(listitems)
if dictitems:
for key in dictitems:
instance[key] = dictitems[key]
return instance
def construct_python_object_new(self, suffix, node):
return self.construct_python_object_apply(suffix, node, newobj=True)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/none',
Constructor.construct_yaml_null)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/bool',
Constructor.construct_yaml_bool)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/str',
Constructor.construct_python_str)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/unicode',
Constructor.construct_python_unicode)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/int',
Constructor.construct_yaml_int)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/long',
Constructor.construct_python_long)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/float',
Constructor.construct_yaml_float)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/complex',
Constructor.construct_python_complex)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/list',
Constructor.construct_yaml_seq)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/tuple',
Constructor.construct_python_tuple)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/dict',
Constructor.construct_yaml_map)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/name:',
Constructor.construct_python_name)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/module:',
Constructor.construct_python_module)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object:',
Constructor.construct_python_object)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/apply:',
Constructor.construct_python_object_apply)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/new:',
Constructor.construct_python_object_new)
| apache-2.0 |
alanjw/GreenOpenERP-Win-X86 | python/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py | 4 | 4226 | import win32security,win32api,win32con, win32process
## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody
## other than yourself or your primary group. Most admin logins don't have it by default, so
## enabling it may fail
new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED),
(win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED)
)
all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
pid=win32api.GetCurrentProcessId()
ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS,0,pid)
## PROCESS_ALL_ACCESS does not contain ACCESS_SYSTEM_SECURITY (neccessy to do SACLs)
th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS) ##win32con.TOKEN_ADJUST_PRIVILEGES)
old_privs=win32security.AdjustTokenPrivileges(th,0,new_privs)
my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0]
pwr_sid=win32security.LookupAccountName('','Power Users')[0]
## reopen process with ACCESS_SYSTEM_SECURITY now that sufficent privs are enabled
ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS|win32con.ACCESS_SYSTEM_SECURITY,0,pid)
sd=win32security.GetKernelObjectSecurity(ph,all_info)
dacl=sd.GetSecurityDescriptorDacl()
if dacl is None:
dacl=win32security.ACL()
sacl=sd.GetSecurityDescriptorSacl()
if sacl is None:
sacl=win32security.ACL()
dacl_ace_cnt=dacl.GetAceCount()
sacl_ace_cnt=sacl.GetAceCount()
dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid)
sacl.AddAuditAccessAce(sacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,1,1)
sd.SetSecurityDescriptorDacl(1,dacl,0)
sd.SetSecurityDescriptorSacl(1,sacl,0)
sd.SetSecurityDescriptorGroup(pwr_sid,0)
sd.SetSecurityDescriptorOwner(pwr_sid,0)
win32security.SetKernelObjectSecurity(ph,all_info,sd)
new_sd=win32security.GetKernelObjectSecurity(ph,all_info)
if new_sd.GetSecurityDescriptorDacl().GetAceCount()!=dacl_ace_cnt+1:
print 'New dacl doesn''t contain extra ace ????'
if new_sd.GetSecurityDescriptorSacl().GetAceCount()!=sacl_ace_cnt+1:
print 'New Sacl doesn''t contain extra ace ????'
if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]!='Power Users':
print 'Owner not successfully set to Power Users !!!!!'
if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]!='Power Users':
print 'Group not successfully set to Power Users !!!!!'
sd.SetSecurityDescriptorSacl(0,None,0)
win32security.SetKernelObjectSecurity(ph, win32security.SACL_SECURITY_INFORMATION, sd)
new_sd_1=win32security.GetKernelObjectSecurity(ph,win32security.SACL_SECURITY_INFORMATION)
if new_sd_1.GetSecurityDescriptorSacl() is not None:
print 'Unable to set Sacl to NULL !!!!!!!!'
| agpl-3.0 |
1st/django | django/core/files/storage.py | 281 | 13339 | import errno
import os
import warnings
from datetime import datetime
from django.conf import settings
from django.core.exceptions import SuspiciousFileOperation
from django.core.files import File, locks
from django.core.files.move import file_move_safe
from django.utils._os import abspathu, safe_join
from django.utils.crypto import get_random_string
from django.utils.deconstruct import deconstructible
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import filepath_to_uri, force_text
from django.utils.functional import LazyObject
from django.utils.inspect import func_supports_parameter
from django.utils.module_loading import import_string
from django.utils.six.moves.urllib.parse import urljoin
from django.utils.text import get_valid_filename
__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
class Storage(object):
"""
A base storage class, providing some default behaviors that all other
storage systems can inherit or override, as necessary.
"""
# The following methods represent a public interface to private methods.
# These shouldn't be overridden by subclasses unless absolutely necessary.
def open(self, name, mode='rb'):
"""
Retrieves the specified file from storage.
"""
return self._open(name, mode)
def save(self, name, content, max_length=None):
"""
Saves new content to the file specified by name. The content should be
a proper File object or any python file-like object, ready to be read
from the beginning.
"""
# Get the proper name for the file, as it will actually be saved.
if name is None:
name = content.name
if not hasattr(content, 'chunks'):
content = File(content)
if func_supports_parameter(self.get_available_name, 'max_length'):
name = self.get_available_name(name, max_length=max_length)
else:
warnings.warn(
'Backwards compatibility for storage backends without '
'support for the `max_length` argument in '
'Storage.get_available_name() will be removed in Django 1.10.',
RemovedInDjango110Warning, stacklevel=2
)
name = self.get_available_name(name)
name = self._save(name, content)
# Store filenames with forward slashes, even on Windows
return force_text(name.replace('\\', '/'))
# These methods are part of the public API, with default implementations.
def get_valid_name(self, name):
"""
Returns a filename, based on the provided filename, that's suitable for
use in the target storage system.
"""
return get_valid_filename(name)
def get_available_name(self, name, max_length=None):
"""
Returns a filename that's free on the target storage system, and
available for new content to be written to.
"""
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# If the filename already exists, add an underscore and a random 7
# character alphanumeric string (before the file extension, if one
# exists) to the filename until the generated filename doesn't exist.
# Truncate original name if required, so the new filename does not
# exceed the max_length.
while self.exists(name) or (max_length and len(name) > max_length):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext))
if max_length is None:
continue
# Truncate file_root if max_length exceeded.
truncation = len(name) - max_length
if truncation > 0:
file_root = file_root[:-truncation]
# Entire file_root was truncated in attempt to find an available filename.
if not file_root:
raise SuspiciousFileOperation(
'Storage can not find an available filename for "%s". '
'Please make sure that the corresponding file field '
'allows sufficient "max_length".' % name
)
name = os.path.join(dir_name, "%s_%s%s" % (file_root, get_random_string(7), file_ext))
return name
def path(self, name):
"""
Returns a local filesystem path where the file can be retrieved using
Python's built-in open() function. Storage systems that can't be
accessed using open() should *not* implement this method.
"""
raise NotImplementedError("This backend doesn't support absolute paths.")
# The following methods form the public API for storage systems, but with
# no default implementations. Subclasses must implement *all* of these.
def delete(self, name):
"""
Deletes the specified file from the storage system.
"""
raise NotImplementedError('subclasses of Storage must provide a delete() method')
def exists(self, name):
"""
Returns True if a file referenced by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
raise NotImplementedError('subclasses of Storage must provide an exists() method')
def listdir(self, path):
"""
Lists the contents of the specified path, returning a 2-tuple of lists;
the first item being directories, the second item being files.
"""
raise NotImplementedError('subclasses of Storage must provide a listdir() method')
def size(self, name):
"""
Returns the total size, in bytes, of the file specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide a size() method')
def url(self, name):
"""
Returns an absolute URL where the file's contents can be accessed
directly by a Web browser.
"""
raise NotImplementedError('subclasses of Storage must provide a url() method')
def accessed_time(self, name):
"""
Returns the last accessed time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide an accessed_time() method')
def created_time(self, name):
"""
Returns the creation time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide a created_time() method')
def modified_time(self, name):
"""
Returns the last modified time (as datetime object) of the file
specified by name.
"""
raise NotImplementedError('subclasses of Storage must provide a modified_time() method')
@deconstructible
class FileSystemStorage(Storage):
"""
Standard filesystem storage
"""
def __init__(self, location=None, base_url=None, file_permissions_mode=None,
directory_permissions_mode=None):
if location is None:
location = settings.MEDIA_ROOT
self.base_location = location
self.location = abspathu(self.base_location)
if base_url is None:
base_url = settings.MEDIA_URL
elif not base_url.endswith('/'):
base_url += '/'
self.base_url = base_url
self.file_permissions_mode = (
file_permissions_mode if file_permissions_mode is not None
else settings.FILE_UPLOAD_PERMISSIONS
)
self.directory_permissions_mode = (
directory_permissions_mode if directory_permissions_mode is not None
else settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS
)
def _open(self, name, mode='rb'):
return File(open(self.path(name), mode))
def _save(self, name, content):
full_path = self.path(name)
# Create any intermediate directories that do not exist.
# Note that there is a race between os.path.exists and os.makedirs:
# if os.makedirs fails with EEXIST, the directory was created
# concurrently, and we can continue normally. Refs #16082.
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
try:
if self.directory_permissions_mode is not None:
# os.makedirs applies the global umask, so we reset it,
# for consistency with file_permissions_mode behavior.
old_umask = os.umask(0)
try:
os.makedirs(directory, self.directory_permissions_mode)
finally:
os.umask(old_umask)
else:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if not os.path.isdir(directory):
raise IOError("%s exists and is not a directory." % directory)
# There's a potential race condition between get_available_name and
# saving the file; it's possible that two threads might return the
# same name, at which point all sorts of fun happens. So we need to
# try to create the file, but if it already exists we have to go back
# to get_available_name() and try again.
while True:
try:
# This file has a file path that we can move.
if hasattr(content, 'temporary_file_path'):
file_move_safe(content.temporary_file_path(), full_path)
# This is a normal uploadedfile that we can stream.
else:
# This fun binary flag incantation makes os.open throw an
# OSError if the file already exists before we open it.
flags = (os.O_WRONLY | os.O_CREAT | os.O_EXCL |
getattr(os, 'O_BINARY', 0))
# The current umask value is masked out by os.open!
fd = os.open(full_path, flags, 0o666)
_file = None
try:
locks.lock(fd, locks.LOCK_EX)
for chunk in content.chunks():
if _file is None:
mode = 'wb' if isinstance(chunk, bytes) else 'wt'
_file = os.fdopen(fd, mode)
_file.write(chunk)
finally:
locks.unlock(fd)
if _file is not None:
_file.close()
else:
os.close(fd)
except OSError as e:
if e.errno == errno.EEXIST:
# Ooops, the file exists. We need a new file name.
name = self.get_available_name(name)
full_path = self.path(name)
else:
raise
else:
# OK, the file save worked. Break out of the loop.
break
if self.file_permissions_mode is not None:
os.chmod(full_path, self.file_permissions_mode)
return name
def delete(self, name):
assert name, "The name argument is not allowed to be empty."
name = self.path(name)
# If the file exists, delete it from the filesystem.
# Note that there is a race between os.path.exists and os.remove:
# if os.remove fails with ENOENT, the file was removed
# concurrently, and we can continue normally.
if os.path.exists(name):
try:
os.remove(name)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def exists(self, name):
return os.path.exists(self.path(name))
def listdir(self, path):
path = self.path(path)
directories, files = [], []
for entry in os.listdir(path):
if os.path.isdir(os.path.join(path, entry)):
directories.append(entry)
else:
files.append(entry)
return directories, files
def path(self, name):
return safe_join(self.location, name)
def size(self, name):
return os.path.getsize(self.path(name))
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
return urljoin(self.base_url, filepath_to_uri(name))
def accessed_time(self, name):
return datetime.fromtimestamp(os.path.getatime(self.path(name)))
def created_time(self, name):
return datetime.fromtimestamp(os.path.getctime(self.path(name)))
def modified_time(self, name):
return datetime.fromtimestamp(os.path.getmtime(self.path(name)))
def get_storage_class(import_path=None):
return import_string(import_path or settings.DEFAULT_FILE_STORAGE)
class DefaultStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class()()
default_storage = DefaultStorage()
| bsd-3-clause |
rwl/muntjac | muntjac/addon/refresher/refresher_demo_application.py | 1 | 3392 | # @MUNTJAC_COPYRIGHT@
# @MUNTJAC_LICENSE@
import time
from threading import Thread
from muntjac.application import Application
from muntjac.ui.window import Window
from muntjac.ui.panel import Panel
from muntjac.ui.horizontal_layout import HorizontalLayout
from muntjac.addon.refresher.refresher import Refresher
from muntjac.ui.label import Label
from muntjac.ui import button
from muntjac.ui.button import Button
SLEEP_TIME_IN_MILLIS = 1000 # a second
class RefresherApplication(Application):
def init(self):
mainWindow = Window('Refresher')
self.setMainWindow(mainWindow)
panel = Panel('Refresher example')
layout = HorizontalLayout()
refresher = Refresher()
label = Label('0')
thread = CounterThread(label)
thread.start()
label.setData(0)
panel.addComponent(refresher)
panel.addComponent(Label("<div style='margin-bottom:10px'>"
+ "The Refresher allows you to affect the UI "
+ "from external threads without "
+ "<a href='http://vaadin.com/forum/-/message_boards/message/69792' target='_blank'>"
+ "the ProgressIndicator hack</a>.</div>", Label.CONTENT_XHTML))
panel.addComponent(layout)
layout.setSpacing(True)
layout.addComponent(Button('Start Counting',
StartClickListener(refresher, thread)))
layout.addComponent(Button('Stop Counting',
StopClickListener(refresher, thread)))
layout.addComponent(label)
mainWindow.setContent(panel)
class StartClickListener(button.IClickListener):
def __init__(self, refresher, thread):
self.refresher = refresher
self.thread = thread
def buttonClick(self, event):
self.refresher.setRefreshInterval(SLEEP_TIME_IN_MILLIS)
self.thread.startCounting()
class StopClickListener(button.IClickListener):
def __init__(self, refresher, thread):
self.refresher = refresher
self.thread = thread
def buttonClick(self, event):
self.refresher.setRefreshInterval(0)
self.thread.stopCounting()
class CounterThread(Thread):
def __init__(self, renderLabel):
super(CounterThread, self).__init__()
self._renderLabel = renderLabel
renderLabel.setData(1)
self._running = False
def run(self):
startTime = 1000 * time.time()
lifetime = 1000 * 60
# live for a minute.
try:
while 1000 * time.time() < startTime + lifetime:
if self._running:
# synchronize with the application, to avoid concurrent
# edits on the label's value.
number = self._renderLabel.getData()
self._renderLabel.setValue(number)
self._renderLabel.setData(number + 1)
time.sleep(SLEEP_TIME_IN_MILLIS)
self._renderLabel.setValue('[ counter thread expired ]')
except KeyboardInterrupt:
self._renderLabel.setValue('[ counter thread interrupted ]')
def startCounting(self):
self._running = True
def stopCounting(self):
self._running = False
if __name__ == '__main__':
from muntjac.main import muntjac
muntjac(RefresherApplication, nogui=True, debug=True,
contextRoot='.')
| apache-2.0 |
lzw120/django | build/lib/django/db/models/fields/related.py | 10 | 60039 | from operator import attrgetter
from django.db import connection, router
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import (AutoField, Field, IntegerField,
PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist)
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.db.models.deletion import CASCADE
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _, string_concat
from django.utils.functional import curry, cached_property
from django.core import exceptions
from django import forms
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
except AttributeError:
# If it doesn't have a split it's actually a model class
app_label = relation._meta.app_label
model_name = relation._meta.object_name
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name,
seed_cache=False, only_installed=False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Store the opts for related_query_name()
self.opts = cls._meta
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
other = self.rel.to
if isinstance(other, basestring) or other._meta.pk is None:
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, self.related)
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return self._pk_trace(value, 'get_prep_lookup', lookup_type)
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_prep_lookup', lookup_type) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return [self._pk_trace(value, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)]
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)
for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def _pk_trace(self, value, prep_func, lookup_type, **kwargs):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
# In the case of an FK to 'self', this check allows to_field to be used
# for both forwards and reverse lookups across the FK. (For normal FKs,
# it's only relevant for forward lookups).
if isinstance(v, self.rel.to):
field_name = getattr(self.rel, "field_name", None)
else:
field_name = None
try:
while True:
if field_name is None:
field_name = v._meta.pk.name
v = getattr(v, field_name)
field_name = None
except AttributeError:
pass
except exceptions.ObjectDoesNotExist:
v = None
field = self
while field.rel:
if hasattr(field.rel, 'field_name'):
field = field.rel.to._meta.get_field(field.rel.field_name)
else:
field = field.rel.to._meta.pk
if lookup_type in ('range', 'in'):
v = [v]
v = getattr(field, prep_func)(lookup_type, v, **kwargs)
if isinstance(v, list):
v = v[0]
return v
def related_query_name(self):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or self.opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.related.model, **db_hints)
return self.related.model._base_manager.using(db)
def get_prefetch_query_set(self, instances):
vals = set(instance._get_pk_val() for instance in instances)
params = {'%s__pk__in' % self.related.field.name: vals}
return (self.get_query_set(instance=instances[0]).filter(**params),
attrgetter(self.related.field.attname),
lambda obj: obj._get_pk_val(),
True,
self.cache_name)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
try:
rel_obj = self.get_query_set(instance=instance).get(**params)
except self.related.model.DoesNotExist:
rel_obj = None
else:
setattr(rel_obj, self.related.field.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None:
raise self.related.model.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# Set the value of the related field to the value of the related object's related field
setattr(value, self.related.field.attname, getattr(instance, self.related.field.rel.get_related_field().attname))
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
self.cache_name = self.field.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.field.rel.to, **db_hints)
rel_mgr = self.field.rel.to._default_manager
# If the related manager indicates that it should be used for
# related fields, respect that.
if getattr(rel_mgr, 'use_for_related_fields', False):
return rel_mgr.using(db)
else:
return QuerySet(self.field.rel.to).using(db)
def get_prefetch_query_set(self, instances):
vals = set(getattr(instance, self.field.attname) for instance in instances)
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__pk__in' % self.field.rel.field_name: vals}
else:
params = {'%s__in' % self.field.rel.field_name: vals}
return (self.get_query_set(instance=instances[0]).filter(**params),
attrgetter(self.field.rel.field_name),
attrgetter(self.field.attname),
True,
self.cache_name)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
rel_obj = None
else:
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__pk' % self.field.rel.field_name: val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
qs = self.get_query_set(instance=instance)
# Assuming the database enforces foreign keys, this won't fail.
rel_obj = qs.get(**params)
if not self.field.rel.multiple:
setattr(rel_obj, self.field.related.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None and not self.field.null:
raise self.field.rel.to.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.field.name)
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.cache_name, None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related is not None:
setattr(related, self.field.related.get_cache_name(), None)
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
if value is not None and not self.field.rel.multiple:
setattr(value, self.field.related.get_cache_name(), instance)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self.related_manager_cls(instance)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's default
# manager.
superclass = self.related.model._default_manager.__class__
rel_field = self.related.field
rel_model = self.related.model
attname = rel_field.rel.get_related_field().attname
class RelatedManager(superclass):
def __init__(self, instance):
super(RelatedManager, self).__init__()
self.instance = instance
self.core_filters = {
'%s__%s' % (rel_field.name, attname): getattr(instance, attname)
}
self.model = rel_model
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[rel_field.related_query_name()]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.model, instance=self.instance)
return super(RelatedManager, self).get_query_set().using(db).filter(**self.core_filters)
def get_prefetch_query_set(self, instances):
db = self._db or router.db_for_read(self.model, instance=instances[0])
query = {'%s__%s__in' % (rel_field.name, attname):
set(getattr(obj, attname) for obj in instances)}
qs = super(RelatedManager, self).get_query_set().using(db).filter(**query)
return (qs,
attrgetter(rel_field.get_attname()),
attrgetter(attname),
False,
rel_field.related_query_name())
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
setattr(obj, rel_field.name, self.instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(self.instance, attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, self.instance))
remove.alters_data = True
def clear(self):
self.update(**{rel_field.name: None})
clear.alters_data = True
return RelatedManager
def create_many_related_manager(superclass, rel):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
class ManyRelatedManager(superclass):
def __init__(self, model=None, query_field_name=None, instance=None, symmetrical=None,
source_field_name=None, target_field_name=None, reverse=False,
through=None, prefetch_cache_name=None):
super(ManyRelatedManager, self).__init__()
self.model = model
self.query_field_name = query_field_name
self.core_filters = {'%s__pk' % query_field_name: instance._get_pk_val()}
self.instance = instance
self.symmetrical = symmetrical
self.source_field_name = source_field_name
self.target_field_name = target_field_name
self.reverse = reverse
self.through = through
self.prefetch_cache_name = prefetch_cache_name
self._pk_val = self.instance.pk
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance)
return super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**self.core_filters)
def get_prefetch_query_set(self, instances):
instance = instances[0]
from django.db import connections
db = self._db or router.db_for_read(instance.__class__, instance=instance)
query = {'%s__pk__in' % self.query_field_name:
set(obj._get_pk_val() for obj in instances)}
qs = super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**query)
# M2M: need to annotate the query in order to get the primary model
# that the secondary model was actually related to. We know that
# there will already be a join on the join table, so we can just add
# the select.
# For non-autocreated 'through' models, can't assume we are
# dealing with PK values.
fk = self.through._meta.get_field(self.source_field_name)
source_col = fk.column
join_table = self.through._meta.db_table
connection = connections[db]
qn = connection.ops.quote_name
qs = qs.extra(select={'_prefetch_related_val':
'%s.%s' % (qn(join_table), qn(source_col))})
select_attname = fk.rel.get_related_field().get_attname()
return (qs,
attrgetter('_prefetch_related_val'),
attrgetter(select_attname),
False,
self.prefetch_cache_name)
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if rel.through._meta.auto_created:
def add(self, *objs):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_field_name, self.source_field_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_field_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_field_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = \
super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK fieldname in join table for the source object
# target_field_name: the PK fieldname in join table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db))
new_ids.add(obj.pk)
elif isinstance(obj, Model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: new_ids,
})
new_ids = new_ids - set(vals)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
# Add the ones that aren't there already
self.through._default_manager.using(db).bulk_create([
self.through(**{
'%s_id' % source_field_name: self._pk_val,
'%s_id' % target_field_name: obj_id,
})
for obj_id in new_ids
])
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK colname in join table for the source object
# target_field_name: the PK colname in join table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj.pk)
else:
old_ids.add(obj)
# Work out what DB we're operating on
db = router.db_for_write(self.through, instance=self.instance)
# Send a signal to the other end if need be.
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
# Remove the specified objects from the join table
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: old_ids
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
def _clear_items(self, source_field_name):
db = router.db_for_write(self.through, instance=self.instance)
# source_field_name: the PK colname in join table for the source object
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related
# model's default manager.
return create_many_related_manager(
self.related.model._default_manager.__class__,
self.related.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
rel_model = self.related.model
manager = self.related_manager_cls(
model=rel_model,
query_field_name=self.related.field.name,
prefetch_cache_name=self.related.field.related_query_name(),
instance=instance,
symmetrical=False,
source_field_name=self.related.field.m2m_reverse_field_name(),
target_field_name=self.related.field.m2m_field_name(),
reverse=True,
through=self.related.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
@property
def through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.field.rel.through
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's
# default manager.
return create_many_related_manager(
self.field.rel.to._default_manager.__class__,
self.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
manager = self.related_manager_cls(
model=self.field.rel.to,
query_field_name=self.field.related_query_name(),
prefetch_cache_name=self.field.name,
instance=instance,
symmetrical=self.field.rel.symmetrical,
source_field_name=self.field.m2m_field_name(),
target_field_name=self.field.m2m_reverse_field_name(),
reverse=False,
through=self.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.multiple = True
self.parent_link = parent_link
self.on_delete = on_delete
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
parent_link=parent_link, on_delete=on_delete
)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the field in the to' object to which this relationship is tied
(this is always the primary key on the target model). Provided for
symmetry with ManyToOneRel.
"""
return self.to._meta.pk
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('Model %(model)s with pk %(pk)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
if 'db_index' not in kwargs:
kwargs['db_index'] = True
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
parent_link=kwargs.pop('parent_link', False),
on_delete=kwargs.pop('on_delete', CASCADE),
)
Field.__init__(self, **kwargs)
def validate(self, value, model_instance):
if self.rel.parent_link:
return
super(ForeignKey, self).validate(value, model_instance)
if value is None:
return
using = router.db_for_read(model_instance.__class__, instance=model_instance)
qs = self.rel.to._default_manager.using(using).filter(
**{self.rel.field_name: value}
)
qs = qs.complex_filter(self.rel.limit_choices_to)
if not qs.exists():
raise exceptions.ValidationError(self.error_messages['invalid'] % {
'model': self.rel.to._meta.verbose_name, 'pk': value})
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value,
connection=connection)
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_unicode(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
if self.rel.limit_choices_to:
cls._meta.related_fkey_lookups.append(self.rel.limit_choices_to)
if self.rel.field_name is None:
self.rel.field_name = cls._meta.pk.name
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
if isinstance(self.rel.to, basestring):
raise ValueError("Cannot create form field for %r yet, because "
"its related model %r has not been loaded yet" %
(self.name, self.rel.to))
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self, connection):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
if (isinstance(rel_field, AutoField) or
(not connection.features.related_fields_match_type and
isinstance(rel_field, (PositiveIntegerField,
PositiveSmallIntegerField)))):
return IntegerField().db_type(connection=connection)
return rel_field.db_type(connection=connection)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
description = _("One-to-one relationship")
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.rel.to):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
managed = True
if isinstance(field.rel.to, basestring) and field.rel.to != RECURSIVE_RELATIONSHIP_CONSTANT:
to_model = field.rel.to
to = to_model.split('.')[-1]
def set_managed(field, model, cls):
field.rel.through._meta.managed = model._meta.managed or cls._meta.managed
add_lazy_relation(klass, field, to_model, set_managed)
elif isinstance(field.rel.to, basestring):
to = klass._meta.object_name
to_model = klass
managed = klass._meta.managed
else:
to = field.rel.to._meta.object_name
to_model = field.rel.to
managed = klass._meta.managed or to_model._meta.managed
name = '%s_%s' % (klass._meta.object_name, field.name)
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass._meta.object_name:
from_ = 'from_%s' % to.lower()
to = 'to_%s' % to.lower()
else:
from_ = klass._meta.object_name.lower()
to = to.lower()
meta = type('Meta', (object,), {
'db_table': field._get_m2m_db_table(klass._meta),
'managed': managed,
'auto_created': klass,
'app_label': klass._meta.app_label,
'db_tablespace': klass._meta.db_tablespace,
'unique_together': (from_, to),
'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to},
'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to},
})
# Construct and return the new class.
return type(name, (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name, db_tablespace=field.db_tablespace),
to: models.ForeignKey(to_model, related_name='%s+' % name, db_tablespace=field.db_tablespace)
})
class ManyToManyField(RelatedField, Field):
description = _("Many-to-many relationship")
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
# Python 2.6 and earlier require dictionary keys to be of str type,
# not unicode and class names must be ASCII (in Python 2.x), so we
# forcibly coerce it here (breaks early if there's a problem).
to = str(to)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to==RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
Field.__init__(self, **kwargs)
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"Function that can be curried to provide the source accessor or DB column name for the m2m table"
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.model:
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"Function that can be curried to provide the related accessor or DB column name for the m2m table"
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
else:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_unicode(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and (self.rel.to == "self" or self.rel.to == cls._meta.object_name):
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
if not self.rel.through and not cls._meta.abstract:
self.rel.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, basestring):
def resolve_through_model(field, model, cls):
field.rel.through = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = curry(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = curry(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = curry(self._get_m2m_reverse_attr, related, 'name')
get_m2m_rel = curry(self._get_m2m_attr, related, 'rel')
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = curry(self._get_m2m_reverse_attr, related, 'rel')
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to)
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i._get_pk_val() for i in initial]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.