code
stringlengths 1
199k
|
|---|
from datetime import date
import os
def today():
return str(date.today())
def check_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def write_list_to_text(data, filename):
with open(filename, 'w') as f:
f.write('\n'.join(str(d) for d in data))
print "Written to %s" % filename
|
from basic import Basic
from singleton import Singleton, S
from evalf import EvalfMixin
from numbers import Float, Integer
from sympify import _sympify, sympify, SympifyError
from sympy.mpmath import mpi, mpf
from containers import Tuple
class Set(Basic):
"""
Represents any kind of set.
Real intervals are represented by the Interval class and unions of sets
by the Union class. The empty set is represented by the EmptySet class
and available as a singleton as S.EmptySet.
"""
def union(self, other):
"""
Returns the union of 'self' and 'other'. As a shortcut it is possible
to use the '+' operator:
>>> from sympy import Interval, FiniteSet
>>> Interval(0, 1).union(Interval(2, 3))
[0, 1] U [2, 3]
>>> Interval(0, 1) + Interval(2, 3)
[0, 1] U [2, 3]
>>> Interval(1, 2, True, True) + FiniteSet(2, 3)
(1, 2] U {3}
Similarly it is possible to use the '-' operator for set
differences:
>>> Interval(0, 2) - Interval(0, 1)
(1, 2]
>>> Interval(1, 3) - FiniteSet(2)
[1, 2) U (2, 3]
"""
return Union(self, other)
def intersect(self, other):
"""
Returns the intersection of 'self' and 'other'.
>>> from sympy import Interval
>>> Interval(1, 3).intersect(Interval(1, 2))
[1, 2]
"""
return self._intersect(other)
def _intersect(self, other):
raise NotImplementedError("(%s)._intersect(%s)" % (self, other))
@property
def complement(self):
"""
The complement of 'self'.
As a shortcut it is possible to use the '~' or '-' operators:
>>> from sympy import Interval
>>> Interval(0, 1).complement
(-oo, 0) U (1, oo)
>>> ~Interval(0, 1)
(-oo, 0) U (1, oo)
>>> -Interval(0, 1)
(-oo, 0) U (1, oo)
"""
return self._complement
@property
def _complement(self):
raise NotImplementedError("(%s)._complement" % self)
@property
def inf(self):
"""
The infimum of 'self'.
>>> from sympy import Interval, Union
>>> Interval(0, 1).inf
0
>>> Union(Interval(0, 1), Interval(2, 3)).inf
0
"""
return self._inf
@property
def _inf(self):
raise NotImplementedError("(%s)._inf" % self)
@property
def sup(self):
""" The supremum of 'self'.
>>> from sympy import Interval, Union
>>> Interval(0, 1).sup
1
>>> Union(Interval(0, 1), Interval(2, 3)).sup
3
"""
return self._sup
@property
def _sup(self):
raise NotImplementedError("(%s)._sup" % self)
def contains(self, other):
"""
Returns True if 'other' is contained in 'self' as an element.
As a shortcut it is possible to use the 'in' operator:
>>> from sympy import Interval
>>> Interval(0, 1).contains(0.5)
True
>>> 0.5 in Interval(0, 1)
True
"""
return self._contains(other)
def _contains(self, other):
raise NotImplementedError("(%s)._contains(%s)" % (self, other))
def subset(self, other):
"""
Returns True if 'other' is a subset of 'self'.
>>> from sympy import Interval
>>> Interval(0, 1).contains(0)
True
>>> Interval(0, 1, left_open=True).contains(0)
False
"""
if isinstance(other, Set):
return self.intersect(other) == other
else:
raise ValueError("Unknown argument '%s'" % other)
@property
def measure(self):
"""
The (Lebesgue) measure of 'self'.
>>> from sympy import Interval, Union
>>> Interval(0, 1).measure
1
>>> Union(Interval(0, 1), Interval(2, 3)).measure
2
"""
return self._measure
@property
def _measure(self):
raise NotImplementedError("(%s)._measure" % self)
def __add__(self, other):
return self.union(other)
def __or__(self, other):
return self.union(other)
def __and__(self, other):
return self.intersect(other)
def __mul__(self, other):
return ProductSet(self, other)
def __pow__(self, exp):
if not sympify(exp).is_Integer and exp>=0:
raise ValueError("%s: Exponent must be a positive Integer"%exp)
return ProductSet([self]*exp)
def __sub__(self, other):
return self.intersect(other.complement)
def __neg__(self):
return self.complement
def __invert__(self):
return self.complement
def __contains__(self, other):
result = self.contains(other)
if not isinstance(result, bool):
raise TypeError('contains did not evaluate to a bool: %r' % result)
return result
def _eval_subs(self, old, new):
if self == old:
return new
new_args = []
for arg in self.args:
if arg == old:
new_args.append(new)
elif isinstance(arg, Basic):
new_args.append(arg._eval_subs(old, new))
else:
new_args.append(arg)
return self.__class__(*new_args)
@property
def is_number(self):
return False
@property
def is_real(self):
return False
@property
def is_iterable(self):
return False
@property
def is_interval(self):
return False
@property
def is_FiniteSet(self):
return False
@property
def is_Interval(self):
return False
@property
def is_ProductSet(self):
return False
@property
def is_Union(self):
return False
class RealSet(Set, EvalfMixin):
"""
A set of real values
"""
@property
def is_real(self):
return True
class ProductSet(Set):
"""
Represents a Cartesian Product of Sets.
Usage:
Returns a cartesian product given several sets as either an iterable
or individual arguments.
Can use '*' operator on any sets for convenient shorthand.
Examples:
>>> from sympy import Interval, FiniteSet, ProductSet
>>> I = Interval(0, 5); S = FiniteSet(1, 2, 3)
>>> ProductSet(I, S)
[0, 5] x {1, 2, 3}
>>> (2, 2) in ProductSet(I, S)
True
>>> Interval(0, 1) * Interval(0, 1) # The unit square
[0, 1] x [0, 1]
>>> coin = FiniteSet('H','T')
>>> for pair in coin**2: print pair
(H, H)
(H, T)
(T, H)
(T, T)
Notes:
- Passes most operations down to the argument sets
- Flattens Products of ProductSets
"""
def __new__(cls, *sets, **assumptions):
def flatten(arg):
if isinstance(arg, Set):
if arg.is_ProductSet:
return sum(map(flatten, arg.args), [])
else:
return [arg]
elif is_flattenable(arg):
return sum(map(flatten, arg), [])
raise TypeError("Input must be Sets or iterables of Sets")
sets = flatten(list(sets))
if EmptySet() in sets or len(sets)==0:
return EmptySet()
return Basic.__new__(cls, *sets, **assumptions)
def _contains(self, element):
"""
in operator for ProductSets
>>> from sympy import Interval
>>> (2, 3) in Interval(0, 5) * Interval(0, 5)
True
>>> (10, 10) in Interval(0, 5) * Interval(0, 5)
False
Passes operation on to constitent sets
"""
if len(element) is not len(self.args):
return False
from sympy.logic.boolalg import And
return And(*[set.contains(item) for set,item in zip(self.sets,element)])
def _intersect(self, other):
if other.is_Union:
return Union(self.intersect(set) for set in other.args)
if not other.is_ProductSet:
raise TypeError("%s is not a Product Set."%str(other))
if len(other.args) != len(self.args):
raise ValueError("Sets not the same size Left: %d, Right: %d"
%(len(self.args), len(other.args)))
return ProductSet(a.intersect(b)
for a, b in zip(self.sets, other.sets))
@property
def sets(self):
return self.args
@property
def _complement(self):
# For each set consider it or it's complement
# We need at least one of the sets to be complemented
# Consider all 2^n combinations.
# We can conveniently represent these options easily using a ProductSet
switch_sets = ProductSet(FiniteSet(set, set.complement)
for set in self.sets)
product_sets = (ProductSet(*set) for set in switch_sets)
# Union of all combinations but this one
return Union(p for p in product_sets if p != self)
@property
def is_real(self):
return all(set.is_real for set in self.sets)
@property
def is_iterable(self):
return all(set.is_iterable for set in self.sets)
def __iter__(self):
if self.is_iterable:
from sympy.core.compatibility import product
return product(*self.sets)
else:
raise TypeError("Not all constituent sets are iterable")
@property
def _measure(self):
measure = 1
for set in self.sets:
measure *= set.measure
return measure
@property
def is_ProductSet(self):
return True
class RealSet(Set, EvalfMixin):
"""
A set of real values
"""
@property
def is_real(self):
return True
class CountableSet(Set):
"""
Represents a set of countable numbers such as {1, 2, 3, 4} or {1, 2, 3, ...}
"""
@property
def _measure(self):
return 0
@property
def is_iterable(self):
return True
def __iter__(self):
raise NotImplementedError("Iteration not yet implemented")
class Interval(RealSet):
"""
Represents a real interval as a Set.
Usage:
Returns an interval with end points "start" and "end".
For left_open=True (default left_open is False) the interval
will be open on the left. Similarly, for right_open=True the interval
will be open on the right.
Examples:
>>> from sympy import Symbol, Interval, sets
>>> Interval(0, 1)
[0, 1]
>>> Interval(0, 1, False, True)
[0, 1)
>>> a = Symbol('a', real=True)
>>> Interval(0, a)
[0, a]
Notes:
- Only real end points are supported
- Interval(a, b) with a > b will return the empty set
- Use the evalf() method to turn an Interval into an mpmath
'mpi' interval instance
"""
def __new__(cls, start, end, left_open=False, right_open=False):
start = _sympify(start)
end = _sympify(end)
# Only allow real intervals (use symbols with 'is_real=True').
if not start.is_real or not end.is_real:
raise ValueError("Only real intervals are supported")
# Make sure that the created interval will be valid.
if end.is_comparable and start.is_comparable:
if end < start:
return S.EmptySet
if end == start and (left_open or right_open):
return S.EmptySet
if end == start and not (left_open or right_open):
return FiniteSet(end)
# Make sure infinite interval end points are open.
if start == S.NegativeInfinity:
left_open = True
if end == S.Infinity:
right_open = True
return Basic.__new__(cls, start, end, left_open, right_open)
@property
def start(self):
"""
The left end point of 'self'. This property takes the same value as the
'inf' property.
>>> from sympy import Interval
>>> Interval(0, 1).start
0
"""
return self._args[0]
_inf = left = start
@property
def end(self):
"""
The right end point of 'self'. This property takes the same value as the
'sup' property.
>>> from sympy import Interval
>>> Interval(0, 1).end
1
"""
return self._args[1]
_sup = right = end
@property
def left_open(self):
"""
True if 'self' is left-open.
>>> from sympy import Interval
>>> Interval(0, 1, left_open=True).left_open
True
>>> Interval(0, 1, left_open=False).left_open
False
"""
return self._args[2]
@property
def right_open(self):
"""
True if 'self' is right-open.
>>> from sympy import Interval
>>> Interval(0, 1, right_open=True).right_open
True
>>> Interval(0, 1, right_open=False).right_open
False
"""
return self._args[3]
def _intersect(self, other):
if not isinstance(other, Interval):
return other.intersect(self)
if not self._is_comparable(other):
raise NotImplementedError("Intersection of intervals with symbolic "
"end points is not yet implemented")
empty = False
if self.start <= other.end and other.start <= self.end:
# Get topology right.
if self.start < other.start:
start = other.start
left_open = other.left_open
elif self.start > other.start:
start = self.start
left_open = self.left_open
else:
start = self.start
left_open = self.left_open or other.left_open
if self.end < other.end:
end = self.end
right_open = self.right_open
elif self.end > other.end:
end = other.end
right_open = other.right_open
else:
end = self.end
right_open = self.right_open or other.right_open
if end - start == 0 and (left_open or right_open):
empty = True
else:
empty = True
if empty:
return S.EmptySet
return self.__class__(start, end, left_open, right_open)
@property
def _complement(self):
a = Interval(S.NegativeInfinity, self.start, True, not self.left_open)
b = Interval(self.end, S.Infinity, not self.right_open, True)
return Union(a, b)
def _contains(self, other):
# We use the logic module here so that this method is meaningful
# when used with symbolic end points.
from sympy.logic.boolalg import And
try:
other = _sympify(other)
except SympifyError:
return False
if self.left_open:
expr = other > self.start
else:
expr = other >= self.start
if self.right_open:
expr = And(expr, other < self.end)
else:
expr = And(expr, other <= self.end)
return expr
@property
def _measure(self):
return self.end - self.start
def to_mpi(self, prec=53):
return mpi(mpf(self.start.evalf(prec)), mpf(self.end.evalf(prec)))
def _eval_evalf(self, prec):
return Interval(self.left.evalf(), self.right.evalf(),
left_open=self.left_open, right_open=self.right_open)
def _is_comparable(self, other):
is_comparable = self.start.is_comparable
is_comparable &= self.end.is_comparable
is_comparable &= other.start.is_comparable
is_comparable &= other.end.is_comparable
return is_comparable
@property
def is_Interval(self):
return True
@property
def is_left_unbounded(self):
"""Return ``True`` if the left endpoint is negative infinity. """
return self.left is S.NegativeInfinity or self.left == Float("-inf")
@property
def is_right_unbounded(self):
"""Return ``True`` if the right endpoint is positive infinity. """
return self.right is S.Infinity or self.right == Float("+inf")
def as_relational(self, symbol):
"""Rewrite an interval in terms of inequalities and logic operators. """
from sympy.core.relational import Eq, Lt, Le
from sympy.logic.boolalg import And
if not self.is_left_unbounded:
if self.left_open:
left = Lt(self.start, symbol)
else:
left = Le(self.start, symbol)
if not self.is_right_unbounded:
if self.right_open:
right = Lt(symbol, self.right)
else:
right = Le(symbol, self.right)
if self.is_left_unbounded and self.is_right_unbounded:
return True # XXX: Contained(symbol, Floats)
elif self.is_left_unbounded:
return right
elif self.is_right_unbounded:
return left
else:
return And(left, right)
class Union(Set):
"""
Represents a union of sets as a Set.
Examples:
>>> from sympy import Union, Interval
>>> Union(Interval(1, 2), Interval(3, 4))
[1, 2] U [3, 4]
The Union constructor will always try to merge overlapping intervals,
if possible. For example:
>>> Union(Interval(1, 2), Interval(2, 3))
[1, 3]
"""
def __new__(cls, *args):
# Flatten out Iterators and Unions to form one list of sets
args = list(args)
def flatten(arg):
if arg == S.EmptySet:
return []
if isinstance(arg, Set):
if arg.is_Union:
return sum(map(flatten, arg.args), [])
else:
return [arg]
if is_flattenable(arg): # and not isinstance(arg, Set) (implicit)
return sum(map(flatten, arg), [])
raise TypeError("Input must be Sets or iterables of Sets")
args = flatten(args)
if len(args) == 0:
return S.EmptySet
# Only real parts? Return a RealUnion
if all(arg.is_real for arg in args):
return RealUnion(args)
# Lets find and merge real elements if we have them
# Separate into finite, real and other sets
finite_set = sum([s for s in args if s.is_FiniteSet], S.EmptySet)
real_sets = [s for s in args if s.is_real]
other_sets = [s for s in args if not s.is_FiniteSet and not s.is_real]
# Separate finite_set into real and other part
real_finite = RealFiniteSet(i for i in finite_set if i.is_real)
other_finite = FiniteSet(i for i in finite_set if not i.is_real)
# Merge real part of set
real_union = RealUnion(real_sets+[real_finite])
if not real_union: # Real part was empty
sets = other_sets + [other_finite]
elif real_union.is_FiniteSet: # Real part was just a FiniteSet
sets = other_sets + [real_union+other_finite]
elif real_union.is_Interval: # Real part was just an Interval
sets = [real_union] + other_sets + [other_finite]
# If is_RealUnion then separate
elif real_union.is_Union and real_union.is_real:
intervals = [s for s in real_union.args if s.is_Interval]
finite_set = sum([s for s in real_union.args if s.is_FiniteSet] +
[other_finite], S.EmptySet) # Join FiniteSet back together
sets = intervals + [finite_set] + other_sets
# Clear out Empty Sets
sets = [set for set in sets if set != S.EmptySet]
# If a single set is left over, don't create a new Union object but
# rather return the single set.
if len(sets) == 1:
return sets[0]
return Basic.__new__(cls, *sets)
@property
def _inf(self):
# We use Min so that sup is meaningful in combination with symbolic
# interval end points.
from sympy.functions.elementary.miscellaneous import Min
return Min(*[set.inf for set in self.args])
@property
def _sup(self):
# We use Max so that sup is meaningful in combination with symbolic
# end points.
from sympy.functions.elementary.miscellaneous import Max
return Max(*[set.sup for set in self.args])
def _intersect(self, other):
# Distributivity.
if other.is_Interval:
intersections = []
for interval in self.args:
intersections.append(interval.intersect(other))
return self.__class__(*intersections)
if other.is_FiniteSet:
return other._intersect(self)
elif other.is_Union:
intersections = []
for s in other.args:
intersections.append(self.intersect(s))
return self.__class__(*intersections)
else:
return other.intersect(self)
@property
def _complement(self):
# De Morgan's formula.
complement = self.args[0].complement
for set in self.args[1:]:
complement = complement.intersect(set.complement)
return complement
def _contains(self, other):
from sympy.logic.boolalg import Or
or_args = [the_set.contains(other) for the_set in self.args]
return Or(*or_args)
@property
def _measure(self):
# Measure of a union is the sum of the measures of the sets minus
# the sum of their pairwise intersections plus the sum of their
# triple-wise intersections minus ... etc...
# Sets is a collection of intersections and a set of elementary
# sets which made up those interections (called "sos" for set of sets)
# An example element might of this list might be:
# ( {A,B,C}, A.intersect(B).intersect(C) )
# Start with just elementary sets ( ({A}, A), ({B}, B), ... )
# Then get and subtract ( ({A,B}, (A int B), ... ) while non-zero
sets = [(FiniteSet(s), s) for s in self.args]
measure = 0
parity = 1
while sets:
# Add up the measure of these sets and add or subtract it to total
measure += parity * sum(inter.measure for sos, inter in sets)
# For each intersection in sets, compute the intersection with every
# other set not already part of the intersection.
sets = ((sos + FiniteSet(newset), newset.intersect(intersection))
for sos, intersection in sets for newset in self.args
if newset not in sos)
# Clear out sets with no measure
sets = [(sos, inter) for sos, inter in sets if inter.measure != 0]
# Clear out duplicates
sos_list = []
sets_list = []
for set in sets:
if set[0] in sos_list:
continue
else:
sos_list.append(set[0])
sets_list.append(set)
sets = sets_list
# Flip Parity - next time subtract/add if we added/subtracted here
parity *= -1
return measure
def as_relational(self, symbol):
"""Rewrite a Union in terms of equalities and logic operators.
"""
from sympy.logic.boolalg import Or
return Or(*[set.as_relational(symbol) for set in self.args])
@property
def is_iterable(self):
return all(arg.is_iterable for arg in self.args)
@property
def is_Union(self):
return True
class RealUnion(Union, RealSet):
"""
Represents a union of Real Sets (Intervals, RealFiniteSets)
This class should only be used internally.
Please make unions with Union class.
See Union for details
"""
def __new__(cls, *args):
intervals, finite_sets, other_sets = [], [], []
args = list(args)
for arg in args:
if isinstance(arg, Set):
if arg == S.EmptySet:
continue
elif arg.is_Union:
args += arg.args
elif arg.is_FiniteSet:
finite_sets.append(arg)
elif arg.is_Interval:
intervals.append(arg)
else:
other_sets.append(arg)
elif is_flattenable(arg):
args += arg
else:
raise TypeError("%s: Not a set or iterable of sets"%arg)
# Sort intervals according to their infimum
intervals.sort(key=lambda i: i.start)
# Merge comparable overlapping intervals
i = 0
while i < len(intervals) - 1:
cur = intervals[i]
next = intervals[i + 1]
merge = False
if cur._is_comparable(next):
if next.start < cur.end:
merge = True
elif next.start == cur.end:
# Must be careful with boundaries.
merge = not(next.left_open and cur.right_open)
if merge:
if cur.start == next.start:
left_open = cur.left_open and next.left_open
else:
left_open = cur.left_open
if cur.end < next.end:
right_open = next.right_open
end = next.end
elif cur.end > next.end:
right_open = cur.right_open
end = cur.end
else:
right_open = cur.right_open and next.right_open
end = cur.end
intervals[i] = Interval(cur.start, end, left_open, right_open)
del intervals[i + 1]
else:
i += 1
# Collect all elements in the finite sets not in any interval
if finite_sets:
# Merge Finite Sets
finite_set = sum(finite_sets, S.EmptySet)
# Close open intervals if boundary is in finite_set
for num, i in enumerate(intervals):
closeLeft = i.start in finite_set if i.left_open else False
closeRight = i.end in finite_set if i.right_open else False
if ((closeLeft and i.left_open)
or (closeRight and i.right_open)):
intervals[num] = Interval(i.start, i.end,
not closeLeft, not closeRight)
# All elements in finite_set not in any interval
finite_complement = FiniteSet(
el for el in finite_set
if not el.is_number
or not any(el in i for i in intervals))
if len(finite_complement)>0: # Anything left?
other_sets.append(finite_complement)
# Clear out empty sets
sets = [set for set in (intervals + other_sets) if set]
# If nothing is there then return the empty set
if not sets:
return S.EmptySet
# If a single set is left over, don't create a new Union object but
# rather return the single set.
if len(sets) == 1:
return sets[0]
return Basic.__new__(cls, *sets)
def _eval_evalf(self, prec):
return RealUnion(set.evalf() for set in self.args)
def __iter__(self):
import itertools
if all(set.is_iterable for set in self.args):
return itertools.chain(*(iter(arg) for arg in self.args))
else:
raise TypeError("Not all constituent sets are iterable")
class EmptySet(Set):
"""
Represents the empty set. The empty set is available as a singleton
as S.EmptySet.
Examples:
>>> from sympy import S, Interval
>>> S.EmptySet
EmptySet()
>>> Interval(1, 2).intersect(S.EmptySet)
EmptySet()
"""
__metaclass__ = Singleton
def _intersect(self, other):
return S.EmptySet
@property
def _complement(self):
return Interval(S.NegativeInfinity, S.Infinity)
@property
def _measure(self):
return 0
def _contains(self, other):
return False
def as_relational(self, symbol):
return False
def __len__(self):
return 0
def union(self, other):
return other
def __iter__(self):
return iter([])
class FiniteSet(CountableSet):
"""
Represents a finite set of discrete numbers
Examples:
>>> from sympy import Symbol, FiniteSet, sets
>>> FiniteSet(1, 2, 3, 4)
{1, 2, 3, 4}
>>> 3 in FiniteSet(1, 2, 3, 4)
True
"""
def __new__(cls, *args):
def flatten(arg):
if is_flattenable(arg):
return sum(map(flatten, arg), [])
return [arg]
args = flatten(list(args))
# Sympify Arguments
args = map(sympify, args)
# Turn tuples into Tuples
args = [Tuple(*arg) if arg.__class__ is tuple else arg for arg in args]
if len(args) == 0:
return EmptySet()
if all([arg.is_real and arg.is_number for arg in args]):
cls = RealFiniteSet
elements = frozenset(map(sympify, args))
obj = Basic.__new__(cls, *elements)
obj.elements = elements
return obj
def __iter__(self):
return self.elements.__iter__()
def _intersect(self, other):
if isinstance(other, self.__class__):
return self.__class__(*(self.elements & other.elements))
return self.__class__(el for el in self if el in other)
def union(self, other):
"""
Returns the union of 'self' and 'other'. As a shortcut it is possible
to use the '+' operator:
>>> from sympy import FiniteSet, Interval, Symbol
>>> FiniteSet(0, 1).union(FiniteSet(2, 3))
{0, 1, 2, 3}
>>> FiniteSet(Symbol('x'), 1, 2) + FiniteSet(2, 3)
{1, 2, 3, x}
>>> Interval(1, 2, True, True) + FiniteSet(2, 3)
(1, 2] U {3}
Similarly it is possible to use the '-' operator for set
differences:
>>> FiniteSet(Symbol('x'), 1, 2) - FiniteSet(2, 3)
{1, x}
>>> Interval(1, 2) - FiniteSet(2, 3)
[1, 2)
"""
if other == S.EmptySet:
return self
if other.is_FiniteSet:
return FiniteSet(*(self.elements | other.elements))
return Union(self, other) # Resort to default
def _contains(self, other):
"""
Tests whether an element, other, is in the set.
Relies on Python's set class. This tests for object equality
All inputs are sympified
>>> from sympy import FiniteSet
>>> 1 in FiniteSet(1, 2)
True
>>> 5 in FiniteSet(1, 2)
False
"""
return sympify(other) in self.elements
@property
def _inf(self):
from sympy.functions.elementary.miscellaneous import Min
return Min(*self)
@property
def _sup(self):
from sympy.functions.elementary.miscellaneous import Max
return Max(*self)
def __len__(self):
return len(self.elements)
def __sub__(self, other):
return FiniteSet(el for el in self if el not in other)
def as_relational(self, symbol):
"""Rewrite a FiniteSet in terms of equalities and logic operators.
"""
from sympy.core.relational import Eq
from sympy.logic.boolalg import Or
return Or(*[Eq(symbol, elem) for elem in self])
@property
def is_FiniteSet(self):
return True
@property
def is_real(self):
return all(el.is_real for el in self)
class RealFiniteSet(FiniteSet, RealSet):
"""
A FiniteSet with all elements Real Numbers.
Allows for good integration with Intervals
This class for internal use only. Use FiniteSet to create a RealFiniteSet
See FiniteSet for more details
"""
def _eval_evalf(self, prec):
return RealFiniteSet(elem.evalf(prec) for elem in self)
@property
def _complement(self):
"""
The complement of a real finite set is the Union of open Intervals
between the elements of the set.
>>> from sympy import FiniteSet
>>> FiniteSet(1, 2, 3).complement
(-oo, 1) U (1, 2) U (2, 3) U (3, oo)
"""
if not all(elem.is_number for elem in self.elements):
raise ValueError("%s: Complement not defined for symbolic inputs"
%self)
sorted_elements = sorted(list(self.elements))
intervals = [] # Build up a list of intervals between the elements
intervals += [Interval(S.NegativeInfinity,sorted_elements[0],True,True)]
for a, b in zip(sorted_elements[0:-1], sorted_elements[1:]):
intervals.append(Interval(a, b, True, True)) # open intervals
intervals.append(Interval(sorted_elements[-1], S.Infinity, True, True))
return Union(*intervals)
def as_relational(self, symbol):
"""Rewrite a FiniteSet in terms of equalities and logic operators.
"""
from sympy.core.relational import Eq
from sympy.logic.boolalg import Or
return Or(*[Eq(symbol, elem) for elem in self])
def _eval_evalf(self, prec):
return FiniteSet(elem.evalf(prec) for elem in self)
genclass = (1 for i in xrange(2)).__class__
def is_flattenable(obj):
"""
Checks that an argument to a Set constructor should be flattened
"""
return obj.__class__ in [list, set, genclass]
|
from git_repo import GitRepo, MultiGitRepo
import json;
import os;
import re;
import subprocess;
import sys;
import pandas as pd
import requests
import fnmatch
from IPython.nbformat import current as nbformat
from IPython.nbconvert import PythonExporter
import networkx as nx
import compiler
from compiler.ast import From
from compiler.ast import Import
from config.config import CONFIG
repoLocation = CONFIG.repo_path
examplesLocation = CONFIG.urls_path
nameRegex = re.compile('([^/]*)(\\.git$)')
fileRegex = re.compile('.*\/(.*)')
def repo_already_exists(filepath):
return os.path.exists(filepath);
"""
Converts a github url (e.g. https://github.com/sbenthall/bigbang.git) to
a human-readable name (bigbang) by looking at the word between the last "/" and ".git".
"""
def url_to_name(url):
url = url.replace("\n", "");
name = nameRegex.search(url).group(1);
return name;
"""
Converts a name of a repo to its filepath.
Currently, these go to ../archives/sample_git_repos/{name}/
"""
def name_to_filepath(name):
newLoc = repoLocation + name
return newLoc
"""
Converts a filepath (../archives/sample_git_repos/{name}) to a name.
Note that this will fail if the filepath ends in a "/". It must end
in the name of the folder.
Thus, it should be ../archives/sample_git_repos/{name} not
../archives/sample_git_repos/{name}/
"""
def filepath_to_name(filepath):
name = fileRegex.search(filepath).group(1);
return name;
"""
Converts a dictionary of dependencies into a NetworkX DiGraph.
"""
def create_graph(dic):
G = nx.DiGraph()
for f in dic:
for dependency in dic[f]:
G.add_edge(f, dependency)
return G
"""
Returns a list of the Python files in a directory, and
converts IPython notebooks into Python source code and
includes them with the Python files.
"""
def get_files(filepath):
os.chdir(filepath)
files = []
for root, dirnames, filenames in os.walk("."):
for filename in fnmatch.filter(filenames, '*.py'):
files.append(os.path.join(root, filename))
for filename in fnmatch.filter(filenames, '*.ipynb'):
try:
with open(filename) as fh:
nb = nbformat.reads_json(fh.read())
export_path = filename.replace(".ipynb", ".py")
exporter = PythonExporter()
source, meta = exporter.from_notebook_node(nb)
with open(export_path, 'w+') as fh:
fh.writelines(source)
files.append()
except: #may have issues with JSON encoding
pass
return files
"""
Given a directory, collects all Python and IPython files and
uses the Python AST to create a dictionary of dependencies from them.
Returns the dependencies converted into a NetworkX graph.
"""
def get_dependency_network(filepath):
files = get_files(filepath)
dependencies = {}
for file in set(files):
ast = compiler.parseFile(file)
for node in ast.getChildren()[1].nodes:
if isinstance(node, Import):
if file in dependencies:
dependencies[file].append(node.names[0][0])
else:
dependencies[file] = [node.names[0][0]]
elif isinstance(node, From):
if file in dependencies:
dependencies[file].append(node.modname + "/" + node.names[0][0])
return create_graph(dependencies)
"""
Takes three different options for type:
'remote' : basically a git url
'name' (default): a name like 'scipy' which the method can expand to a url
'local' : a filepath to a file on the local system (basically an existing git directory on this computer)
This returns an initialized GitRepo object with its data and name already loaded.
"""
def get_repo(repo_in, in_type='name', update = False):
# Input is name
if in_type == 'name':
filepath = name_to_filepath(repo_in)
ans = None;
if not update:
ans = get_cache(repo_in);
if ans is not None:
return ans;
print("Checking for " + str(repo_in) + " at " + str(filepath));
ans = get_repo(filepath, 'local', update);
if isinstance(ans, GitRepo):
ans.commit_data.to_csv(cache_path(repo_in), sep='\t', encoding='utf-8') # We cache it hopefully???
else:
print("We failed to find a local copy of this repo")
return ans;
# Input is a local file
if in_type == 'local':
if repo_already_exists(repo_in):
name = filepath_to_name(repo_in);
return GitRepo(url=repo_in, name=name);
else:
print("Invalid filepath: " + repo_in);
return None;
if in_type == 'remote':
name = url_to_name(repo_in);
filepath = name_to_filepath(name);
if not repo_already_exists(filepath):
print("Gloning the repo from remote")
fetch_repo(repo_in);
return get_repo(name, 'name', update);
else:
print("Invalid input") # TODO: Clarify this error
return None
"""
Takes in a git url and uses shell commands
to clone the git repo into sample_git_repos/
TODO: We shouldn't use this with shell=True because of security concerns.
"""
def fetch_repo(url):
# TODO: We are repeatedly calculating name and filepath
url = url.replace("\n", "");
name = url_to_name(url);
newLoc = name_to_filepath(name);
command = ["git " + "clone " + url + " " + newLoc];
subprocess.call(command, shell = True);
"""
Takes in a name (bigbang)
Returns where its cached file should be (../sample_git_repos/bigbang_backup.csv)
"""
def cache_path(name):
return repoLocation + str(name) + "_backup.csv"
"""
Takes in a name (bigbang)
Returns a GitRepo object containing the cache data if the cache exists
Returns None otherwise.
"""
def get_cache(name):
filepath = cache_path(name);
if os.path.exists(filepath):
c = pd.read_csv(filepath, sep='\t', encoding='utf-8');
fp = name_to_filepath(name);
ans = GitRepo(name=name, url=fp, cache=c);
return ans;
return None;
"""
As of now, this only accepts names/repos, not local urls
TODO: This could be optimized
"""
def get_multi_repo(repo_names=None, repos=None):
if repos is None:
repos = list()
for name in repo_names:
repo = get_repo(name, in_type = "name")
repos.append(repo);
for repo in repos:
repo.commit_data["Repo Name"] = repo.name;
ans = MultiGitRepo(repos);
return ans
"""
fetches a list of all repos in an organization from github
and gathers their URL's (of the form *.git)
It dumps these into ../examples/{org_name}_urls.txt
"""
def load_org_repos(org_name):
github_url = "https://api.github.com/orgs/" + org_name + "/repos"
r = requests.get(github_url)
data = r.json()
urls = []
for repo in data:
if "git_url" in repo:
urls.append(repo["git_url"])
if len(urls) == 0:
print("Found no repos in group: " + str(org_name))
return None
else:
addr = examplesLocation + str(org_name) + "_urls.txt"
f = open(addr, 'w')
f.write("\n".join(urls))
print("Wrote git urls to " + addr)
return urls
"""
Checks to see if we have the urls for a given org
If we don't, it fetches them.
Once we do, it returns a list of GitRepo objects from the urls.
"""
def get_org_repos(org_name):
addr = examplesLocation + str(org_name) + "_urls.txt"
urls = None
if not os.path.isfile(addr):
urls = load_org_repos(org_name);
else:
urls = open(addr, "r")
ans = list()
for url in urls:
ans.append(get_repo(url, "remote"))
return ans;
def get_org_multirepo(org_name):
repos = get_org_repos(org_name)
ans = get_multi_repo(repos=repos)
return ans
|
import project_scrum_work_type
|
_api_key = None
_application_key = None
_api_version = 'v1'
_api_host = None
_host_name = None
_cacert = True
_proxies = None
_timeout = 3
_max_timeouts = 3
_max_retries = 3
_backoff_period = 300
_mute = True
from datadog.api.comments import Comment
from datadog.api.downtimes import Downtime
from datadog.api.timeboards import Timeboard
from datadog.api.events import Event
from datadog.api.infrastructure import Infrastructure
from datadog.api.metrics import Metric
from datadog.api.monitors import Monitor
from datadog.api.screenboards import Screenboard
from datadog.api.graphs import Graph, Embed
from datadog.api.hosts import Host
from datadog.api.service_checks import ServiceCheck
from datadog.api.tags import Tag
from datadog.api.users import User
|
""" Copyright (C) 2007 Collabora Limited
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
import dbus.service
class ConnectionInterfaceMailNotification(dbus.service.Interface):
"""\
An interface to support receiving notifications about a e-mail
account associated with this connection.
In protocols where this is possible, this interface also allows the
connection manager to provide the necessary information for clients
to open a web-based mail client without having to re-authenticate.
To use this interface, a client MUST first subscribe by passing the
name of this interface to the Connection.AddClientInterest method. The subscription
mechanic aims at reducing network traffic and memory footprint in the
situation where nobody is currently interesting in provided
information. When done with this interface, clients SHOULD call
Connection.RemoveClientInterest to allow the CM to
release resources.
Protocols have various different levels of Mail Notification support.
To describe the level of support, the interface provides a property
called MailNotificationFlags.
Not all combinations are valid; protocols can be divided into four
categories as follows.
Connections to the most capable protocols, such as Google's XMPP Mail
Notification extension, have the Supports_Unread_Mails flag (this
implies that they must also have Supports_Unread_Mail_Count, but not
Emits_Mails_Received). On these connections, clients
requiring change notification MUST monitor the
UnreadMailsChanged signal, and
either recover the initial state from the
UnreadMails property (if they require
details other than the number of mails) or the
UnreadMailCount property (if they
are only interested in the number of unread mails). The
MailsReceived signal is never emitted
on these connections, so clients that will display a short-term
notification for each new mail MUST do so in response to emission of
the UnreadMailsChanged signal.
The most common situation, seen in protocols like MSN and Yahoo, is
that the number of unread mails is provided and kept up-to-date,
and a separate notification is emitted with some details of each new
mail. This is a combination of the following two features, and clients
SHOULD implement one or both as appropriate for their requirements.
On protocols that have the Emits_Mails_Received flag (which implies
that they do not have Supports_Unread_Mails), the CM does not keep
track of any mails; it simply emits a notification whenever new mail
arrives. Those events may be used for short term display (like a
notification popup) to inform the user. No protocol is known to support
only this feature, but it is useful for integration with libraries that
that do not implement tracking of the number of mails. Clients
requiring these notifications MUST monitor the
MailsReceived signal on any connections
with this flag.
On protocols that have the Supports_Unread_Mail_Count flag but not
the Supports_Unread_Mails flag, clients cannot display complete
details of unread email, but can display an up-to-date count of the
number of unread mails. To do this, they must monitor the
UnreadMailsChanged signal, and
retrieve the initial state from the
UnreadMailCount property.
Orthogonal features described by the
MailNotificationFlags property include the
RequestSomethingURL methods, which are used to obtain URLs allowing
clients to open a webmail client. Connections SHOULD support as many
of these methods as possible.
"""
def __init__(self):
self._interfaces.add('org.freedesktop.Telepathy.Connection.Interface.MailNotification')
@dbus.service.method('org.freedesktop.Telepathy.Connection.Interface.MailNotification', in_signature='', out_signature='(sua(ss))')
def RequestInboxURL(self):
"""
This method creates and returns a URL and an optional POST data that
allow opening the Inbox folder of a webmail account. This URL MAY
contain tokens with a short lifetime, so clients SHOULD request a new
URL for each visit to the webmail interface. This method is implemented
only if the Supports_Request_Inbox_URL flag is set in
MailNotificationFlags.
We are not using properties here because the tokens are unsuitable
for sharing between clients, and network round-trips may be required
to obtain the information that leads to authentication free webmail
access.
"""
raise NotImplementedError
@dbus.service.method('org.freedesktop.Telepathy.Connection.Interface.MailNotification', in_signature='sv', out_signature='(sua(ss))')
def RequestMailURL(self, ID, URL_Data):
"""
This method creates and returns a URL and optional POST data that
allow opening a specific mail in a webmail interface. This
method is implemented only if Supports_Request_Mail_URL flag
is set in MailNotificationFlags.
See RequestInboxURL for design
rationale.
"""
raise NotImplementedError
@dbus.service.signal('org.freedesktop.Telepathy.Connection.Interface.MailNotification', signature='aa{sv}')
def MailsReceived(self, Mails):
"""
Emitted when new e-mails messages arrive to the inbox associated with
this connection. This signal is used for protocols that are not able
to maintain the UnreadMails list, but
do provide real-time notification about newly arrived e-mails. It MUST
NOT be emitted unless Emits_Mails_Received is set in
MailNotificationFlags.
"""
pass
@dbus.service.signal('org.freedesktop.Telepathy.Connection.Interface.MailNotification', signature='uaa{sv}as')
def UnreadMailsChanged(self, Count, Mails_Added, Mails_Removed):
"""
Emitted when UnreadMails or
UnreadMailCount have changed. It MUST
NOT be emited if Supports_Unread_Mail_Count flag is not set
in MailNotificationFlags.
Mails_Added and
Mails_Removed MUST be empty if the
Supports_Unread_Mails flag is not set.
"""
pass
|
from spack import *
class DtDiamondLeft(Package):
"""This package has an indirect diamond dependency on dt-diamond-bottom"""
homepage = "http://www.example.com"
url = "http://www.example.com/dt-diamond-left-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('dt-diamond-bottom', type='build')
def install(self, spec, prefix):
pass
|
from __future__ import print_function
import os
import shutil
import itertools
import tempfile
import subprocess
from distutils.spawn import find_executable
import numpy as np
import mdtraj as md
from mdtraj.testing import get_fn, eq, skipif
HAVE_DSSP = find_executable('mkdssp')
DSSP_MSG = "This tests required mkdssp to be installed, from http://swift.cmbi.ru.nl/gv/dssp/"
tmpdir = None
def setup():
global tmpdir
tmpdir = tempfile.mkdtemp()
def teardown():
shutil.rmtree(tmpdir)
def call_dssp(traj, frame=0):
inp = os.path.join(tmpdir, 'temp.pdb')
out = os.path.join(tmpdir, 'temp.pdb.dssp')
traj[frame].save(inp)
cmd = ['mkdssp', '-i', inp, '-o', out]
subprocess.check_output(' '.join(cmd), shell=True)
KEY_LINE = ' # RESIDUE AA STRUCTURE BP1 BP2 ACC N-H-->O O-->H-N N-H-->O O-->H-N TCO KAPPA ALPHA PHI PSI X-CA Y-CA Z-CA'
with open(out) as f:
# exaust the first entries
max(itertools.takewhile(lambda l: not l.startswith(KEY_LINE), f))
return np.array([line[16] for line in f if line[13] != '!'])
def assert_(a, b):
try:
assert np.all(a == b)
except AssertionError:
if len(a) != len(b):
print('Not the same length: %d vs %s' % (len(a), len(b)))
raise
for i, (aa, bb) in enumerate(zip(a, b)):
if aa == bb:
print("%3d: '%s' '%s'" % (i, aa, bb))
else:
print("%3d: '%s' '%s' <-" % (i, aa, bb))
raise
@skipif(not HAVE_DSSP, DSSP_MSG)
def test_1():
for fn in ['1bpi.pdb', '1vii.pdb', '4K6Q.pdb', '1am7_protein.pdb']:
t = md.load_pdb(get_fn(fn))
t = t.atom_slice(t.top.select_atom_indices('minimal'))
f = lambda : assert_(call_dssp(t), md.compute_dssp(t, simplified=False)[0])
f.description = 'test_1: %s' % fn
yield f
@skipif(not HAVE_DSSP, DSSP_MSG)
def test_2():
t = md.load(get_fn('2EQQ.pdb'))
for i in range(len(t)):
yield lambda: assert_(call_dssp(t[i]), md.compute_dssp(t[i], simplified=False)[0])
@skipif(not HAVE_DSSP, DSSP_MSG)
def test_3():
# 1COY gives a small error, due to a broken chain.
pdbids = ['1GAI', '6gsv', '2AAC']
for pdbid in pdbids:
t = md.load_pdb('http://www.rcsb.org/pdb/files/%s.pdb' % pdbid)
t = t.atom_slice(t.top.select_atom_indices('minimal'))
f = lambda : assert_(call_dssp(t), md.compute_dssp(t, simplified=False)[0])
f.description = 'test_1: %s' % pdbid
yield f
def test_4():
t = md.load_pdb(get_fn('1am7_protein.pdb'))
a = md.compute_dssp(t, simplified=True)
b = md.compute_dssp(t, simplified=False)
assert len(a) == len(b)
assert len(a[0]) == len(b[0])
assert list(np.unique(a[0])) == ['C', 'E', 'H']
def test_5():
t = md.load(get_fn('4waters.pdb'))
a = md.compute_dssp(t, simplified=True)
b = md.compute_dssp(t, simplified=False)
ref = np.array([['NA', 'NA', 'NA', 'NA']])
np.testing.assert_array_equal(a, ref)
np.testing.assert_array_equal(b, ref)
def test_6():
t = md.load(get_fn('alanine-dipeptide-explicit.pdb'))
a = md.compute_dssp(t, simplified=True)
protein_residues = np.array([set(a.name for a in r.atoms).issuperset(('C', 'N', 'O', 'CA')) for r in t.topology.residues])
assert np.unique(a[:, protein_residues]) == "C"
assert np.unique(a[:, np.logical_not(protein_residues)]) == 'NA'
|
from django.test import TestCase
from app.detective.models import Topic
from app.detective.utils import topic_cache, get_leafs_and_edges
import json
class TopicCachierTestCase(TestCase):
fixtures = ['app/detective/fixtures/default_topics.json',]
def setUp(self):
ontology_str = """
[
{
"name": "Person",
"fields": [
{ "name": "first_name","type": "string","verbose_name":"First Name" },
{ "name": "name","type": "string","verbose_name":"Last Name" },
{ "name": "employed_by", "type": "Relationship", "related_model": "Company" }
]
},
{
"name": "Company",
"fields": [
{ "name": "name", "type": "string" },
{"name": "status", "type": "string"}
]
}
]
"""
self.ontology = json.loads(ontology_str)
def create_topic(self, args=None):
default_kwargs = {
'title': 'Test investigation',
'ontology_as_json': self.ontology,
'slug': 'test-investigation-fake'
}
if args == None:
kwargs = default_kwargs
else:
kwargs = args
try:
return Topic.objects.create(**kwargs)
except:
return Topic.objects.get(
slug=kwargs.get('slug', default_kwargs.get('slug'))
)
def test_topic_creation(self):
topic = self.create_topic()
rev = topic_cache.version(topic)
self.assertIsNotNone(rev, 0)
def test_topic_update(self):
# if we update a topic, its revision number should be incremented
topic = self.create_topic()
rev = topic_cache.version(topic)
topic.title = "New title"
topic.save()
self.assertEqual(topic_cache.version(topic), rev+1)
def test_topic_delete(self):
topic = self.create_topic()
rev = topic_cache.version(topic)
topic.delete()
self.assertEqual(topic_cache.version(topic), rev+1)
def test_topic_model_create(self):
topic = self.create_topic()
rev_origin = topic_cache.version(topic)
Person = topic.get_models_module().Person
p = Person.objects.create(first_name='Pierre', name='Bellon')
rev_target = topic_cache.version(topic)
self.assertEqual(rev_target, rev_origin + 1)
def test_topic_model_create(self):
topic = self.create_topic()
Person = topic.get_models_module().Person
p = Person.objects.create(first_name='Pierre', name='Bellon')
rev_origin = topic_cache.version(topic)
p = Person.objects.get(first_name='Pierre', name='Bellon')
p.first_name = 'Matthieu'
p.save()
rev_target = topic_cache.version(topic)
self.assertEqual(rev_target, rev_origin + 1)
def test_cache_get(self):
topic = self.create_topic()
random_data = {
'such': 'data'
}
topic_cache.set(topic, 'random_key', random_data, 3000)
stored_data = topic_cache.get(topic, 'random_key')
self.assertEqual(stored_data, random_data)
def test_cache_delete(self):
topic = self.create_topic()
random_data = {
'such': 'data'
}
topic_cache.set(topic, 'such_random', random_data, 3000)
topic_cache.delete(topic, 'such_random')
self.assertIsNone(topic_cache.get(topic, 'such_random'))
def test_get_leafs_and_edges(self):
topic = self.create_topic()
models = topic.get_models_module()
Person = models.Person
Company = models.Company
# be sure we dont have anybody
[ p.delete() for p in Person.objects.all() ]
c1 = Company.objects.create(name='random', status='random')
p1 = Person.objects.create(first_name='test', name='test')
p1.employed_by.add(c1)
p1.save()
p2 = Person.objects.create(first_name='test', name='test')
p2.employed_by.add(c1)
p2.save()
leafs = get_leafs_and_edges(topic=topic, depth=3)
p3 = Person.objects.create(first_name='test', name='test', employed_by=c1)
new_leafs = get_leafs_and_edges(topic=topic, depth=3)
cached_leafs = topic_cache.get(topic, 'leafs_and_nodes_%s_%s' % (3, '0'))
self.assertEqual(new_leafs, cached_leafs)
self.assertGreater(len(new_leafs[1]), len(leafs[1]))
|
"""Testing support (tools to test IPython itself).
"""
def test():
"""Run the entire IPython test suite.
For fine-grained control, you should use the :file:`iptest` script supplied
with the IPython installation."""
# Do the import internally, so that this function doesn't increase total
# import time
from iptest import run_iptestall
run_iptestall()
test.__test__ = False
|
import os
import sys
import re
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
ROOT = os.path.dirname(__file__)
kwargs = {}
if sys.version_info[0] == 3:
kwargs['encoding'] = 'utf-8'
with open(os.path.join(ROOT, 'pygal', '__init__.py'), **kwargs) as fd:
__version__ = re.search("__version__ = '([^']+)'", fd.read()).group(1)
setup(
name="pygal",
version=__version__,
description="A python svg graph plotting library",
author="Kozea",
url="http://pygal.org/",
author_email="florian.mounier@kozea.fr",
license="GNU LGPL v3+",
platforms="Any",
packages=find_packages(),
provides=['pygal'],
scripts=["pygal_gen.py"],
keywords=[
"svg", "chart", "graph", "diagram", "plot", "histogram", "kiviat"],
tests_require=["pytest", "pyquery", "flask", "cairosvg"],
cmdclass={'test': PyTest},
package_data={'pygal': ['css/*', 'graph/maps/*.svg']},
extras_require={
'lxml': ['lxml'],
'png': ['cairosvg']
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: "
"GNU Lesser General Public License v3 or later (LGPLv3+)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Multimedia :: Graphics :: Presentation"])
|
"""
Show how to display two scales on the left and right y axis -- Fahrenheit and Celsius
"""
import matplotlib.pyplot as plt
fig, ax1 = plt.subplots() # ax1 is the Fahrenheit scale
ax2 = ax1.twinx() # ax2 is the Celsius scale
def Tc(Tf):
return (5./9.)*(Tf-32)
def update_ax2(ax1):
y1, y2 = ax1.get_ylim()
ax2.set_ylim(Tc(y1), Tc(y2))
ax2.figure.canvas.draw()
ax1.callbacks.connect("ylim_changed", update_ax2)
ax1.plot([78, 79, 79, 77])
ax1.set_title('Two scales: Fahrenheit and Celsius')
ax1.set_ylabel('Fahrenheit')
ax2.set_ylabel('Celsius')
plt.show()
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_etree_fromstring,
compat_urllib_request,
)
from ..utils import (
ExtractorError,
int_or_none,
)
class VevoIE(InfoExtractor):
"""
Accepts urls from vevo.com or in the format 'vevo:{id}'
(currently used by MTVIE and MySpaceIE)
"""
_VALID_URL = r'''(?x)
(?:https?://www\.vevo\.com/watch/(?:[^/]+/(?:[^/]+/)?)?|
https?://cache\.vevo\.com/m/html/embed\.html\?video=|
https?://videoplayer\.vevo\.com/embed/embedded\?videoId=|
vevo:)
(?P<id>[^&?#]+)'''
_TESTS = [{
'url': 'http://www.vevo.com/watch/hurts/somebody-to-die-for/GB1101300280',
"md5": "95ee28ee45e70130e3ab02b0f579ae23",
'info_dict': {
'id': 'GB1101300280',
'ext': 'mp4',
"upload_date": "20130624",
"uploader": "Hurts",
"title": "Somebody to Die For",
"duration": 230.12,
"width": 1920,
"height": 1080,
# timestamp and upload_date are often incorrect; seem to change randomly
'timestamp': int,
}
}, {
'note': 'v3 SMIL format',
'url': 'http://www.vevo.com/watch/cassadee-pope/i-wish-i-could-break-your-heart/USUV71302923',
'md5': 'f6ab09b034f8c22969020b042e5ac7fc',
'info_dict': {
'id': 'USUV71302923',
'ext': 'mp4',
'upload_date': '20140219',
'uploader': 'Cassadee Pope',
'title': 'I Wish I Could Break Your Heart',
'duration': 226.101,
'age_limit': 0,
'timestamp': int,
}
}, {
'note': 'Age-limited video',
'url': 'https://www.vevo.com/watch/justin-timberlake/tunnel-vision-explicit/USRV81300282',
'info_dict': {
'id': 'USRV81300282',
'ext': 'mp4',
'age_limit': 18,
'title': 'Tunnel Vision (Explicit)',
'uploader': 'Justin Timberlake',
'upload_date': 're:2013070[34]',
'timestamp': int,
},
'params': {
'skip_download': 'true',
}
}]
_SMIL_BASE_URL = 'http://smil.lvl3.vevo.com/'
def _real_initialize(self):
req = compat_urllib_request.Request(
'http://www.vevo.com/auth', data=b'')
webpage = self._download_webpage(
req, None,
note='Retrieving oauth token',
errnote='Unable to retrieve oauth token',
fatal=False)
if webpage is False:
self._oauth_token = None
else:
self._oauth_token = self._search_regex(
r'access_token":\s*"([^"]+)"',
webpage, 'access token', fatal=False)
def _formats_from_json(self, video_info):
last_version = {'version': -1}
for version in video_info['videoVersions']:
# These are the HTTP downloads, other types are for different manifests
if version['sourceType'] == 2:
if version['version'] > last_version['version']:
last_version = version
if last_version['version'] == -1:
raise ExtractorError('Unable to extract last version of the video')
renditions = compat_etree_fromstring(last_version['data'])
formats = []
# Already sorted from worst to best quality
for rend in renditions.findall('rendition'):
attr = rend.attrib
format_note = '%(videoCodec)s@%(videoBitrate)4sk, %(audioCodec)s@%(audioBitrate)3sk' % attr
formats.append({
'url': attr['url'],
'format_id': attr['name'],
'format_note': format_note,
'height': int(attr['frameheight']),
'width': int(attr['frameWidth']),
})
return formats
def _formats_from_smil(self, smil_xml):
formats = []
smil_doc = compat_etree_fromstring(smil_xml.encode('utf-8'))
els = smil_doc.findall('.//{http://www.w3.org/2001/SMIL20/Language}video')
for el in els:
src = el.attrib['src']
m = re.match(r'''(?xi)
(?P<ext>[a-z0-9]+):
(?P<path>
[/a-z0-9]+ # The directory and main part of the URL
_(?P<cbr>[0-9]+)k
_(?P<width>[0-9]+)x(?P<height>[0-9]+)
_(?P<vcodec>[a-z0-9]+)
_(?P<vbr>[0-9]+)
_(?P<acodec>[a-z0-9]+)
_(?P<abr>[0-9]+)
\.[a-z0-9]+ # File extension
)''', src)
if not m:
continue
format_url = self._SMIL_BASE_URL + m.group('path')
formats.append({
'url': format_url,
'format_id': 'SMIL_' + m.group('cbr'),
'vcodec': m.group('vcodec'),
'acodec': m.group('acodec'),
'vbr': int(m.group('vbr')),
'abr': int(m.group('abr')),
'ext': m.group('ext'),
'width': int(m.group('width')),
'height': int(m.group('height')),
})
return formats
def _download_api_formats(self, video_id):
if not self._oauth_token:
self._downloader.report_warning(
'No oauth token available, skipping API HLS download')
return []
api_url = 'https://apiv2.vevo.com/video/%s/streams/hls?token=%s' % (
video_id, self._oauth_token)
api_data = self._download_json(
api_url, video_id,
note='Downloading HLS formats',
errnote='Failed to download HLS format list', fatal=False)
if api_data is None:
return []
m3u8_url = api_data[0]['url']
return self._extract_m3u8_formats(
m3u8_url, video_id, entry_protocol='m3u8_native', ext='mp4',
preference=0)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
json_url = 'http://videoplayer.vevo.com/VideoService/AuthenticateVideo?isrc=%s' % video_id
response = self._download_json(json_url, video_id)
video_info = response['video']
if not video_info:
if 'statusMessage' in response:
raise ExtractorError('%s said: %s' % (self.IE_NAME, response['statusMessage']), expected=True)
raise ExtractorError('Unable to extract videos')
formats = self._formats_from_json(video_info)
is_explicit = video_info.get('isExplicit')
if is_explicit is True:
age_limit = 18
elif is_explicit is False:
age_limit = 0
else:
age_limit = None
# Download via HLS API
formats.extend(self._download_api_formats(video_id))
# Download SMIL
smil_blocks = sorted((
f for f in video_info['videoVersions']
if f['sourceType'] == 13),
key=lambda f: f['version'])
smil_url = '%s/Video/V2/VFILE/%s/%sr.smil' % (
self._SMIL_BASE_URL, video_id, video_id.lower())
if smil_blocks:
smil_url_m = self._search_regex(
r'url="([^"]+)"', smil_blocks[-1]['data'], 'SMIL URL',
default=None)
if smil_url_m is not None:
smil_url = smil_url_m
if smil_url:
smil_xml = self._download_webpage(
smil_url, video_id, 'Downloading SMIL info', fatal=False)
if smil_xml:
formats.extend(self._formats_from_smil(smil_xml))
self._sort_formats(formats)
timestamp_ms = int_or_none(self._search_regex(
r'/Date\((\d+)\)/',
video_info['launchDate'], 'launch date', fatal=False))
return {
'id': video_id,
'title': video_info['title'],
'formats': formats,
'thumbnail': video_info['imageUrl'],
'timestamp': timestamp_ms // 1000,
'uploader': video_info['mainArtists'][0]['artistName'],
'duration': video_info['duration'],
'age_limit': age_limit,
}
|
import abc
from typing import Union, Any
from py4j.java_gateway import JavaObject
from pyflink.java_gateway import get_gateway
class Function(abc.ABC):
"""
The base class for all user-defined functions.
"""
pass
class MapFunction(Function):
"""
Base class for Map functions. Map functions take elements and transform them, element wise. A
Map function always produces a single result element for each input element. Typical
applications are parsing elements, converting data types, or projecting out fields. Operations
that produce multiple result elements from a single input element can be implemented using the
FlatMapFunction.
The basic syntax for using a MapFunction is as follows:
::
>>> ds = ...
>>> new_ds = ds.map(MyMapFunction())
"""
@abc.abstractmethod
def map(self, value):
"""
The mapping method. Takes an element from the input data and transforms it into exactly one
element.
:param value: The input value.
:return: The transformed value.
"""
pass
class CoMapFunction(Function):
"""
A CoMapFunction implements a map() transformation over two connected streams.
The same instance of the transformation function is used to transform both of
the connected streams. That way, the stream transformations can share state.
The basic syntax for using a CoMapFunction is as follows:
::
>>> ds1 = ...
>>> ds2 = ...
>>> new_ds = ds1.connect(ds2).map(MyCoMapFunction())
"""
@abc.abstractmethod
def map1(self, value):
"""
This method is called for each element in the first of the connected streams.
:param value: The stream element
:return: The resulting element
"""
pass
@abc.abstractmethod
def map2(self, value):
"""
This method is called for each element in the second of the connected streams.
:param value: The stream element
:return: The resulting element
"""
pass
class FlatMapFunction(Function):
"""
Base class for flatMap functions. FlatMap functions take elements and transform them, into zero,
one, or more elements. Typical applications can be splitting elements, or unnesting lists and
arrays. Operations that produce multiple strictly one result element per input element can also
use the MapFunction.
The basic syntax for using a MapFUnction is as follows:
::
>>> ds = ...
>>> new_ds = ds.flat_map(MyFlatMapFunction())
"""
@abc.abstractmethod
def flat_map(self, value):
"""
The core mthod of the FlatMapFunction. Takes an element from the input data and transforms
it into zero, one, or more elements.
A basic implementation of flat map is as follows:
::
>>> class MyFlatMapFunction(FlatMapFunction):
>>> def flat_map(self, value):
>>> for i in range(value):
>>> yield i
:param value: The input value.
:return: A generator
"""
pass
class CoFlatMapFunction(Function):
"""
A CoFlatMapFunction implements a flat-map transformation over two connected streams.
The same instance of the transformation function is used to transform both of the
connected streams. That way, the stream transformations can share state.
An example for the use of connected streams would be to apply rules that change over time
onto elements of a stream. One of the connected streams has the rules, the other stream the
elements to apply the rules to. The operation on the connected stream maintains the
current set of rules in the state. It may receive either a rule update (from the first stream)
and update the state, or a data element (from the second stream) and apply the rules in the
state to the element. The result of applying the rules would be emitted.
The basic syntax for using a CoFlatMapFunction is as follows:
::
>>> ds1 = ...
>>> ds2 = ...
>>> class MyCoFlatMapFunction(CoFlatMapFunction):
>>> def flat_map1(self, value):
>>> for i in range(value):
>>> yield i
>>> def flat_map2(self, value):
>>> for i in range(value):
>>> yield i
>>> new_ds = ds1.connect(ds2).flat_map(MyCoFlatMapFunction())
"""
@abc.abstractmethod
def flat_map1(self, value):
"""
This method is called for each element in the first of the connected streams.
:param value: The input value.
:return: A generator
"""
pass
@abc.abstractmethod
def flat_map2(self, value):
"""
This method is called for each element in the second of the connected streams.
:param value: The input value.
:return: A generator
"""
pass
class ReduceFunction(Function):
"""
Base interface for Reduce functions. Reduce functions combine groups of elements to a single
value, by taking always two elements and combining them into one. Reduce functions may be
used on entire data sets, or on grouped data sets. In the latter case, each group is reduced
individually.
The basic syntax for using a ReduceFunction is as follows:
::
>>> ds = ...
>>> new_ds = ds.key_by(lambda x: x[1]).reduce(MyReduceFunction())
"""
@abc.abstractmethod
def reduce(self, value1, value2):
"""
The core method of ReduceFunction, combining two values into one value of the same type.
The reduce function is consecutively applied to all values of a group until only a single
value remains.
:param value1: The first value to combine.
:param value2: The second value to combine.
:return: The combined value of both input values.
"""
pass
class KeySelector(Function):
"""
The KeySelector allows to use deterministic objects for operations such as reduce, reduceGroup,
join coGroup, etc. If invoked multiple times on the same object, the returned key must be the
same. The extractor takes an object an returns the deterministic key for that object.
"""
@abc.abstractmethod
def get_key(self, value):
"""
User-defined function that deterministically extracts the key from an object.
:param value: The object to get the key from.
:return: The extracted key.
"""
pass
class FilterFunction(Function):
"""
A filter function is a predicate applied individually to each record. The predicate decides
whether to keep the element, or to discard it.
The basic syntax for using a FilterFunction is as follows:
::
>>> ds = ...
>>> result = ds.filter(MyFilterFunction())
Note that the system assumes that the function does not modify the elements on which the
predicate is applied. Violating this assumption can lead to incorrect results.
"""
@abc.abstractmethod
def filter(self, value):
"""
The filter function that evaluates the predicate.
:param value: The value to be filtered.
:return: True for values that should be retained, false for values to be filtered out.
"""
pass
class Partitioner(Function):
"""
Function to implement a custom partition assignment for keys.
"""
@abc.abstractmethod
def partition(self, key: Any, num_partitions: int) -> int:
"""
Computes the partition for the given key.
:param key: The key.
:param num_partitions: The number of partitions to partition into.
:return: The partition index.
"""
pass
class FunctionWrapper(object):
"""
A basic wrapper class for user defined function.
"""
def __init__(self, func):
self._func = func
class MapFunctionWrapper(FunctionWrapper):
"""
A wrapper class for MapFunction. It's used for wrapping up user defined function in a
MapFunction when user does not implement a MapFunction but directly pass a function object or
a lambda function to map() function.
"""
def __init__(self, func):
"""
The constructor of MapFunctionWrapper.
:param func: user defined function object.
"""
super(MapFunctionWrapper, self).__init__(func)
def map(self, value):
"""
A delegated map function to invoke user defined function.
:param value: The input value.
:return: the return value of user defined map function.
"""
return self._func(value)
class FlatMapFunctionWrapper(FunctionWrapper):
"""
A wrapper class for FlatMapFunction. It's used for wrapping up user defined function in a
FlatMapFunction when user does not implement a FlatMapFunction but directly pass a function
object or a lambda function to flat_map() function.
"""
def __init__(self, func):
"""
The constructor of MapFunctionWrapper.
:param func: user defined function object.
"""
super(FlatMapFunctionWrapper, self).__init__(func)
def flat_map(self, value):
"""
A delegated flat_map function to invoke user defined function.
:param value: The input value.
:return: the return value of user defined flat_map function.
"""
return self._func(value)
class FilterFunctionWrapper(FunctionWrapper):
"""
A wrapper class for FilterFunction. It's used for wrapping up user defined function in a
FilterFunction when user does not implement a FilterFunction but directly pass a function
object or a lambda function to filter() function.
"""
def __init__(self, func):
super(FilterFunctionWrapper, self).__init__(func)
def filter(self, value):
return self._func(value)
class ReduceFunctionWrapper(FunctionWrapper):
"""
A wrapper class for ReduceFunction. It's used for wrapping up user defined function in a
ReduceFunction when user does not implement a ReduceFunction but directly pass a function
object or a lambda function to reduce() function.
"""
def __init__(self, func):
"""
The constructor of ReduceFunctionWrapper.
:param func: user defined function object.
"""
super(ReduceFunctionWrapper, self).__init__(func)
def reduce(self, value1, value2):
"""
A delegated reduce function to invoke user defined function.
:param value1: The first value to combine.
:param value2: The second value to combine.
:return: The combined value of both input values.
"""
return self._func(value1, value2)
class KeySelectorFunctionWrapper(FunctionWrapper):
"""
A wrapper class for KeySelector. It's used for wrapping up user defined function in a
KeySelector when user does not implement a KeySelector but directly pass a function
object or a lambda function to key_by() function.
"""
def __init__(self, func):
"""
The constructor of MapFunctionWrapper.
:param func: user defined function object.
"""
super(KeySelectorFunctionWrapper, self).__init__(func)
def get_key(self, value):
"""
A delegated get_key function to invoke user defined function.
:param value: The input value.
:return: the return value of user defined get_key function.
"""
return self._func(value)
class PartitionerFunctionWrapper(FunctionWrapper):
"""
A wrapper class for Partitioner. It's used for wrapping up user defined function in a
Partitioner when user does not implement a Partitioner but directly pass a function
object or a lambda function to partition_custom() function.
"""
def __init__(self, func):
"""
The constructor of PartitionerFunctionWrapper.
:param func: user defined function object.
"""
super(PartitionerFunctionWrapper, self).__init__(func)
def partition(self, key: Any, num_partitions: int) -> int:
"""
A delegated partition function to invoke user defined function.
:param key: The key.
:param num_partitions: The number of partitions to partition into.
:return: The partition index.
"""
return self._func(key, num_partitions)
def _get_python_env():
"""
An util function to get a python user defined function execution environment.
"""
gateway = get_gateway()
exec_type = gateway.jvm.org.apache.flink.table.functions.python.PythonEnv.ExecType.PROCESS
return gateway.jvm.org.apache.flink.table.functions.python.PythonEnv(exec_type)
class JavaFunctionWrapper(object):
"""
A wrapper class that maintains a Function implemented in Java.
"""
def __init__(self, j_function: Union[str, JavaObject]):
if isinstance(j_function, str):
j_func_class = get_gateway().jvm.__getattr__(j_function)
j_function = j_func_class()
self._j_function = j_function
def get_java_function(self):
return self._j_function
class SourceFunction(JavaFunctionWrapper):
"""
Base class for all stream data source in Flink.
"""
def __init__(self, source_func: Union[str, JavaObject]):
"""
Constructor of SinkFunction.
:param source_func: The java SourceFunction object.
"""
super(SourceFunction, self).__init__(source_func)
class SinkFunction(JavaFunctionWrapper):
"""
The base class for SinkFunctions.
"""
def __init__(self, sink_func: Union[str, JavaObject]):
"""
Constructor of SinkFunction.
:param sink_func: The java SinkFunction object or the full name of the SinkFunction class.
"""
super(SinkFunction, self).__init__(sink_func)
|
from .UIElement import UIElement, Root
|
from lib.actions import YammerAction
__all__ = [
'GetUserByIdAction'
]
class GetUserByIdAction(YammerAction):
def run(self, id=None):
yammer = self.authenticate()
user = yammer.users.find(id)
return user
|
"""Framework utilities.
@@assert_same_float_dtype
@@assert_scalar_int
@@convert_to_tensor_or_sparse_tensor
@@get_graph_from_inputs
@@is_numeric_tensor
@@is_non_decreasing
@@is_strictly_increasing
@@reduce_sum_n
@@safe_embedding_lookup_sparse
@@with_shape
@@with_same_shape
@@arg_scope
@@add_arg_scope
@@has_arg_scope
@@arg_scoped_arguments
@@add_model_variable
@@assert_global_step
@@assert_or_get_global_step
@@create_global_step
@@get_global_step
@@get_or_create_global_step
@@get_local_variables
@@get_model_variables
@@get_unique_variable
@@get_variables_by_name
@@get_variables_by_suffix
@@get_variables_to_restore
@@get_variables
@@local_variable
@@model_variable
@@variable
@@VariableDeviceChooser
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from tensorflow.contrib.framework.python.framework import *
from tensorflow.contrib.framework.python.ops import *
from tensorflow.python.util.all_util import make_all
__all__ = make_all(__name__)
|
from st2common.runners.base_action import Action
class IncreaseIndexAndCheckCondition(Action):
def run(self, index, pagesize, input):
if pagesize and pagesize != "":
if len(input) < int(pagesize):
return (False, "Breaking out of the loop")
else:
pagesize = 0
if not index or index == "":
index = 1
return (True, int(index) + 1)
|
import filecmp
import json
import os
import os.path
import re
import subprocess
import sys
import time
import tempfile
import traceback
import uuid
import shutil
from Utils import HandlerUtil
from Common import CommonVariables, CryptItem
from ExtensionParameter import ExtensionParameter
from DiskUtil import DiskUtil
from ResourceDiskUtil import ResourceDiskUtil
from BackupLogger import BackupLogger
from KeyVaultUtil import KeyVaultUtil
from EncryptionConfig import EncryptionConfig
from patch import GetDistroPatcher
from BekUtil import BekUtil
from check_util import CheckUtil
from DecryptionMarkConfig import DecryptionMarkConfig
from EncryptionMarkConfig import EncryptionMarkConfig
from EncryptionEnvironment import EncryptionEnvironment
from OnGoingItemConfig import OnGoingItemConfig
from ProcessLock import ProcessLock
from CommandExecutor import CommandExecutor, ProcessCommunicator
from __builtin__ import int
def install():
hutil.do_parse_context('Install')
hutil.do_exit(0, 'Install', CommonVariables.extension_success_status, str(CommonVariables.success), 'Install Succeeded')
def disable():
hutil.do_parse_context('Disable')
# Archive configs at disable to make them available to new extension version prior to update
# The extension update handshake is [old:disable][new:update][old:uninstall][new:install]
hutil.archive_old_configs()
hutil.do_exit(0, 'Disable', CommonVariables.extension_success_status, '0', 'Disable succeeded')
def uninstall():
hutil.do_parse_context('Uninstall')
hutil.do_exit(0, 'Uninstall', CommonVariables.extension_success_status, '0', 'Uninstall succeeded')
def disable_encryption():
hutil.do_parse_context('DisableEncryption')
logger.log('Disabling encryption')
decryption_marker = DecryptionMarkConfig(logger, encryption_environment)
if decryption_marker.config_file_exists():
logger.log(msg="decryption is marked, starting daemon.", level=CommonVariables.InfoLevel)
start_daemon('DisableEncryption')
hutil.do_exit(exit_code=0,
operation='DisableEncryption',
status=CommonVariables.extension_success_status,
code=str(CommonVariables.success),
message='Decryption started')
exit_status = {
'operation': 'DisableEncryption',
'status': CommonVariables.extension_success_status,
'status_code': str(CommonVariables.success),
'message': 'Decryption completed'
}
hutil.exit_if_same_seq(exit_status)
hutil.save_seq()
try:
extension_parameter = ExtensionParameter(hutil, logger, DistroPatcher, encryption_environment, get_protected_settings(), get_public_settings())
disk_util = DiskUtil(hutil=hutil, patching=DistroPatcher, logger=logger, encryption_environment=encryption_environment)
encryption_status = json.loads(disk_util.get_encryption_status())
if encryption_status["os"] != "NotEncrypted":
raise Exception("Disabling encryption is not supported when OS volume is encrypted")
bek_util = BekUtil(disk_util, logger)
encryption_config = EncryptionConfig(encryption_environment, logger)
bek_passphrase_file = bek_util.get_bek_passphrase_file(encryption_config)
crypt_items = disk_util.get_crypt_items()
logger.log('Found {0} items to decrypt'.format(len(crypt_items)))
for crypt_item in crypt_items:
disk_util.create_cleartext_key(crypt_item.mapper_name)
add_result = disk_util.luks_add_cleartext_key(bek_passphrase_file,
crypt_item.dev_path,
crypt_item.mapper_name,
crypt_item.luks_header_path)
if add_result != CommonVariables.process_success:
if disk_util.is_luks_device(crypt_item.dev_path, crypt_item.luks_header_path):
raise Exception("luksAdd failed with return code {0}".format(add_result))
else:
logger.log("luksAdd failed with return code {0}".format(add_result))
logger.log("Ignoring for now, as device ({0}) does not seem to be a luks device".format(crypt_item.dev_path))
continue
if crypt_item.dev_path.startswith("/dev/sd"):
logger.log('Updating crypt item entry to use mapper name')
logger.log('Device name before update: {0}'.format(crypt_item.dev_path))
crypt_item.dev_path = disk_util.get_persistent_path_by_sdx_path(crypt_item.dev_path)
logger.log('Device name after update: {0}'.format(crypt_item.dev_path))
crypt_item.uses_cleartext_key = True
disk_util.update_crypt_item(crypt_item, None)
logger.log('Added cleartext key for {0}'.format(crypt_item))
decryption_marker.command = extension_parameter.command
decryption_marker.volume_type = extension_parameter.VolumeType
decryption_marker.commit()
hutil.do_exit(exit_code=0,
operation='DisableEncryption',
status=CommonVariables.extension_success_status,
code=str(CommonVariables.success),
message='Decryption started')
except Exception as e:
message = "Failed to disable the extension with error: {0}, stack trace: {1}".format(e, traceback.format_exc())
logger.log(msg=message, level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.unknown_error,
operation='DisableEncryption',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.unknown_error),
message=message)
def get_public_settings():
public_settings_str = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
if isinstance(public_settings_str, basestring):
return json.loads(public_settings_str)
else:
return public_settings_str
def get_protected_settings():
protected_settings_str = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
if isinstance(protected_settings_str, basestring):
return json.loads(protected_settings_str)
else:
return protected_settings_str
def update_encryption_settings():
hutil.do_parse_context('UpdateEncryptionSettings')
logger.log('Updating encryption settings')
# re-install extra packages like cryptsetup if no longer on system from earlier enable
try:
DistroPatcher.install_extras()
except Exception as e:
message = "Failed to update encryption settings with error: {0}, stack trace: {1}".format(e, traceback.format_exc())
hutil.do_exit(exit_code=CommonVariables.missing_dependency,
operation='UpdateEncryptionSettings',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.missing_dependency),
message=message)
encryption_config = EncryptionConfig(encryption_environment, logger)
config_secret_seq = encryption_config.get_secret_seq_num()
current_secret_seq_num = int(config_secret_seq if config_secret_seq else -1)
update_call_seq_num = hutil.get_current_seq()
logger.log("Current secret was created in operation #{0}".format(current_secret_seq_num))
logger.log("The update call is operation #{0}".format(update_call_seq_num))
executor = CommandExecutor(logger)
executor.Execute("mount /boot")
try:
disk_util = DiskUtil(hutil=hutil, patching=DistroPatcher, logger=logger, encryption_environment=encryption_environment)
bek_util = BekUtil(disk_util, logger)
extension_parameter = ExtensionParameter(hutil, logger, DistroPatcher, encryption_environment, get_protected_settings(), get_public_settings())
existing_passphrase_file = bek_util.get_bek_passphrase_file(encryption_config)
if current_secret_seq_num < update_call_seq_num:
if extension_parameter.passphrase is None or extension_parameter.passphrase == "":
extension_parameter.passphrase = bek_util.generate_passphrase(extension_parameter.KeyEncryptionAlgorithm)
logger.log('Recreating secret to store in the KeyVault')
keyVaultUtil = KeyVaultUtil(logger)
temp_keyfile = tempfile.NamedTemporaryFile(delete=False)
temp_keyfile.write(extension_parameter.passphrase)
temp_keyfile.close()
for crypt_item in disk_util.get_crypt_items():
if not crypt_item:
continue
before_keyslots = disk_util.luks_dump_keyslots(crypt_item.dev_path, crypt_item.luks_header_path)
logger.log("Before key addition, keyslots for {0}: {1}".format(crypt_item.dev_path, before_keyslots))
logger.log("Adding new key for {0}".format(crypt_item.dev_path))
luks_add_result = disk_util.luks_add_key(passphrase_file=existing_passphrase_file,
dev_path=crypt_item.dev_path,
mapper_name=crypt_item.mapper_name,
header_file=crypt_item.luks_header_path,
new_key_path=temp_keyfile.name)
logger.log("luks add result is {0}".format(luks_add_result))
after_keyslots = disk_util.luks_dump_keyslots(crypt_item.dev_path, crypt_item.luks_header_path)
logger.log("After key addition, keyslots for {0}: {1}".format(crypt_item.dev_path, after_keyslots))
new_keyslot = list(map(lambda x: x[0] != x[1], zip(before_keyslots, after_keyslots))).index(True)
logger.log("New key was added in keyslot {0}".format(new_keyslot))
# crypt_item.current_luks_slot = new_keyslot
# disk_util.update_crypt_item(crypt_item)
logger.log("New key successfully added to all encrypted devices")
if DistroPatcher.distro_info[0] == "Ubuntu":
logger.log("Updating initrd image with new osluksheader.")
executor.Execute("update-initramfs -u -k all", True)
if DistroPatcher.distro_info[0] == "redhat" or DistroPatcher.distro_info[0] == "centos":
distro_version = DistroPatcher.distro_info[1]
if distro_version.startswith('7.'):
logger.log("Updating initrd image with new osluksheader.")
executor.ExecuteInBash("/usr/sbin/dracut -f -v --kver `grubby --default-kernel | sed 's|/boot/vmlinuz-||g'`", True)
os.unlink(temp_keyfile.name)
# install Python ADAL support if using client certificate authentication
if extension_parameter.AADClientCertThumbprint:
DistroPatcher.install_adal()
kek_secret_id_created = keyVaultUtil.create_kek_secret(Passphrase=extension_parameter.passphrase,
KeyVaultURL=extension_parameter.KeyVaultURL,
KeyEncryptionKeyURL=extension_parameter.KeyEncryptionKeyURL,
AADClientID=extension_parameter.AADClientID,
AADClientCertThumbprint=extension_parameter.AADClientCertThumbprint,
KeyEncryptionAlgorithm=extension_parameter.KeyEncryptionAlgorithm,
AADClientSecret=extension_parameter.AADClientSecret,
DiskEncryptionKeyFileName=extension_parameter.DiskEncryptionKeyFileName)
if kek_secret_id_created is None:
hutil.do_exit(exit_code=CommonVariables.create_encryption_secret_failed,
operation='UpdateEncryptionSettings',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.create_encryption_secret_failed),
message='UpdateEncryptionSettings failed.')
else:
encryption_config.passphrase_file_name = extension_parameter.DiskEncryptionKeyFileName
encryption_config.secret_id = kek_secret_id_created
encryption_config.secret_seq_num = hutil.get_current_seq()
encryption_config.commit()
shutil.copy(existing_passphrase_file, encryption_environment.bek_backup_path)
logger.log("Backed up BEK at {0}".format(encryption_environment.bek_backup_path))
hutil.do_exit(exit_code=0,
operation='UpdateEncryptionSettings',
status=CommonVariables.extension_success_status,
code=str(CommonVariables.success),
message=str(kek_secret_id_created))
else:
logger.log('Secret has already been updated')
mount_encrypted_disks(disk_util, bek_util, existing_passphrase_file, encryption_config)
disk_util.log_lsblk_output()
hutil.exit_if_same_seq()
# remount bek volume
existing_passphrase_file = bek_util.get_bek_passphrase_file(encryption_config)
if extension_parameter.passphrase and extension_parameter.passphrase != file(existing_passphrase_file).read():
logger.log("The new passphrase has not been placed in BEK volume yet")
logger.log("Skipping removal of old passphrase")
exit_without_status_report()
logger.log('Removing old passphrase')
for crypt_item in disk_util.get_crypt_items():
if not crypt_item:
continue
if filecmp.cmp(existing_passphrase_file, encryption_environment.bek_backup_path):
logger.log('Current BEK and backup are the same, skipping removal')
continue
logger.log('Removing old passphrase from {0}'.format(crypt_item.dev_path))
keyslots = disk_util.luks_dump_keyslots(crypt_item.dev_path, crypt_item.luks_header_path)
logger.log("Keyslots before removal: {0}".format(keyslots))
luks_remove_result = disk_util.luks_remove_key(passphrase_file=encryption_environment.bek_backup_path,
dev_path=crypt_item.dev_path,
header_file=crypt_item.luks_header_path)
logger.log("luks remove result is {0}".format(luks_remove_result))
keyslots = disk_util.luks_dump_keyslots(crypt_item.dev_path, crypt_item.luks_header_path)
logger.log("Keyslots after removal: {0}".format(keyslots))
logger.log("Old key successfully removed from all encrypted devices")
if DistroPatcher.distro_info[0] == "Ubuntu":
logger.log("Updating initrd image with new osluksheader.")
executor.Execute("update-initramfs -u -k all", True)
if DistroPatcher.distro_info[0] == "redhat" or DistroPatcher.distro_info[0] == "centos":
distro_version = DistroPatcher.distro_info[1]
if distro_version.startswith('7.'):
logger.log("Updating initrd image with new osluksheader.")
executor.ExecuteInBash("/usr/sbin/dracut -f -v --kver `grubby --default-kernel | sed 's|/boot/vmlinuz-||g'`", True)
hutil.save_seq()
extension_parameter.commit()
os.unlink(encryption_environment.bek_backup_path)
hutil.do_exit(exit_code=0,
operation='UpdateEncryptionSettings',
status=CommonVariables.extension_success_status,
code=str(CommonVariables.success),
message='Encryption settings updated')
except Exception as e:
message = "Failed to update encryption settings with error: {0}, stack trace: {1}".format(e, traceback.format_exc())
logger.log(msg=message, level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.unknown_error,
operation='UpdateEncryptionSettings',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.unknown_error),
message=message)
def update():
# The extension update handshake is [old:disable][new:update][old:uninstall][new:install]
# this method is called when updating an older version of the extension to a newer version
hutil.do_parse_context('Update')
logger.log("Installing pre-requisites")
DistroPatcher.install_extras()
DistroPatcher.update_prereq()
hutil.do_exit(0, 'Update', CommonVariables.extension_success_status, '0', 'Update Succeeded')
def exit_without_status_report():
sys.exit(0)
def not_support_header_option_distro(patching):
if patching.distro_info[0].lower() == "centos" and patching.distro_info[1].startswith('6.'):
return True
if patching.distro_info[0].lower() == "redhat" and patching.distro_info[1].startswith('6.'):
return True
if patching.distro_info[0].lower() == "suse" and patching.distro_info[1].startswith('11'):
return True
return False
def none_or_empty(obj):
if obj is None or obj == "":
return True
else:
return False
def toggle_se_linux_for_centos7(disable):
if DistroPatcher.distro_info[0].lower() == 'centos' and DistroPatcher.distro_info[1].startswith('7.0'):
if disable:
se_linux_status = encryption_environment.get_se_linux()
if se_linux_status.lower() == 'enforcing':
encryption_environment.disable_se_linux()
return True
else:
encryption_environment.enable_se_linux()
return False
def mount_encrypted_disks(disk_util, bek_util, passphrase_file, encryption_config):
# mount encrypted resource disk
volume_type = encryption_config.get_volume_type().lower()
if volume_type == CommonVariables.VolumeTypeData.lower() or volume_type == CommonVariables.VolumeTypeAll.lower():
resource_disk_util = ResourceDiskUtil(logger, disk_util, passphrase_file, get_public_settings(), DistroPatcher.distro_info)
resource_disk_util.automount()
logger.log("mounted encrypted resource disk")
# add walkaround for the centos 7.0
se_linux_status = None
if DistroPatcher.distro_info[0].lower() == 'centos' and DistroPatcher.distro_info[1].startswith('7.0'):
se_linux_status = encryption_environment.get_se_linux()
if se_linux_status.lower() == 'enforcing':
encryption_environment.disable_se_linux()
# mount any data disks - make sure the azure disk config path exists.
for crypt_item in disk_util.get_crypt_items():
if not crypt_item:
continue
if not os.path.exists(os.path.join(CommonVariables.dev_mapper_root, crypt_item.mapper_name)):
luks_open_result = disk_util.luks_open(passphrase_file=passphrase_file,
dev_path=crypt_item.dev_path,
mapper_name=crypt_item.mapper_name,
header_file=crypt_item.luks_header_path,
uses_cleartext_key=crypt_item.uses_cleartext_key)
logger.log("luks open result is {0}".format(luks_open_result))
disk_util.mount_crypt_item(crypt_item, passphrase_file)
if DistroPatcher.distro_info[0].lower() == 'centos' and DistroPatcher.distro_info[1].startswith('7.0'):
if se_linux_status is not None and se_linux_status.lower() == 'enforcing':
encryption_environment.enable_se_linux()
def main():
global hutil, DistroPatcher, logger, encryption_environment
HandlerUtil.LoggerInit('/var/log/waagent.log', '/dev/stdout')
HandlerUtil.waagent.Log("{0} started to handle.".format(CommonVariables.extension_name))
hutil = HandlerUtil.HandlerUtility(HandlerUtil.waagent.Log, HandlerUtil.waagent.Error, CommonVariables.extension_name)
logger = BackupLogger(hutil)
DistroPatcher = GetDistroPatcher(logger)
hutil.patching = DistroPatcher
encryption_environment = EncryptionEnvironment(patching=DistroPatcher, logger=logger)
disk_util = DiskUtil(hutil=hutil, patching=DistroPatcher, logger=logger, encryption_environment=encryption_environment)
hutil.disk_util = disk_util
if DistroPatcher is None:
hutil.do_exit(exit_code=CommonVariables.os_not_supported,
operation='Enable',
status=CommonVariables.extension_error_status,
code=(CommonVariables.os_not_supported),
message='Enable failed: the os is not supported')
for a in sys.argv[1:]:
if re.match("^([-/]*)(disable)", a):
disable()
elif re.match("^([-/]*)(uninstall)", a):
uninstall()
elif re.match("^([-/]*)(install)", a):
install()
elif re.match("^([-/]*)(enable)", a):
enable()
elif re.match("^([-/]*)(update)", a):
update()
elif re.match("^([-/]*)(daemon)", a):
daemon()
def mark_encryption(command, volume_type, disk_format_query):
encryption_marker = EncryptionMarkConfig(logger, encryption_environment)
encryption_marker.command = command
encryption_marker.volume_type = volume_type
encryption_marker.diskFormatQuery = disk_format_query
encryption_marker.commit()
return encryption_marker
def is_daemon_running():
handler_path = os.path.join(os.getcwd(), __file__)
daemon_arg = "-daemon"
psproc = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
pslist, _ = psproc.communicate()
for line in pslist.split("\n"):
if handler_path in line and daemon_arg in line:
return True
return False
def enable():
while True:
hutil.do_parse_context('Enable')
logger.log('Enabling extension')
public_settings = get_public_settings()
logger.log('Public settings:\n{0}'.format(json.dumps(public_settings, sort_keys=True, indent=4)))
cutil = CheckUtil(logger)
# Mount already encrypted disks before running fatal prechecks
disk_util = DiskUtil(hutil=hutil, patching=DistroPatcher, logger=logger, encryption_environment=encryption_environment)
bek_util = BekUtil(disk_util, logger)
existing_passphrase_file = None
encryption_config = EncryptionConfig(encryption_environment=encryption_environment, logger=logger)
existing_passphrase_file = bek_util.get_bek_passphrase_file(encryption_config)
if existing_passphrase_file is not None:
mount_encrypted_disks(disk_util=disk_util,
bek_util=bek_util,
encryption_config=encryption_config,
passphrase_file=existing_passphrase_file)
# Migrate to early unlock if using crypt mount
if disk_util.should_use_azure_crypt_mount():
disk_util.migrate_crypt_items(existing_passphrase_file)
encryption_status = json.loads(disk_util.get_encryption_status())
# run fatal prechecks, report error if exceptions are caught
try:
cutil.precheck_for_fatal_failures(public_settings, encryption_status, DistroPatcher)
except Exception as e:
logger.log("PRECHECK: Fatal Exception thrown during precheck")
logger.log(traceback.format_exc())
msg = e.message
hutil.do_exit(exit_code=CommonVariables.configuration_error,
operation='Enable',
status=CommonVariables.extension_error_status,
code=(CommonVariables.configuration_error),
message=msg)
hutil.disk_util.log_lsblk_output()
# run prechecks and log any failures detected
try:
if cutil.is_non_fatal_precheck_failure():
logger.log("PRECHECK: Precheck failure, incompatible environment suspected")
else:
logger.log("PRECHECK: Prechecks successful")
except Exception:
logger.log("PRECHECK: Exception thrown during precheck")
logger.log(traceback.format_exc())
encryption_operation = public_settings.get(CommonVariables.EncryptionEncryptionOperationKey)
if encryption_operation in [CommonVariables.EnableEncryption, CommonVariables.EnableEncryptionFormat, CommonVariables.EnableEncryptionFormatAll]:
logger.log("handle.py found enable encryption operation")
extension_parameter = ExtensionParameter(hutil, logger, DistroPatcher, encryption_environment, get_protected_settings(), public_settings)
if os.path.exists(encryption_environment.bek_backup_path) or (extension_parameter.config_file_exists() and extension_parameter.config_changed()):
logger.log("Config has changed, updating encryption settings")
update_encryption_settings()
extension_parameter.commit()
else:
logger.log("Config did not change or first call, enabling encryption")
enable_encryption()
elif encryption_operation == CommonVariables.DisableEncryption:
logger.log("handle.py found disable encryption operation")
disable_encryption()
elif encryption_operation == CommonVariables.QueryEncryptionStatus:
logger.log("handle.py found query operation")
encryption_marker = EncryptionMarkConfig(logger, encryption_environment)
if is_daemon_running() or (encryption_marker and not encryption_marker.config_file_exists()):
logger.log("A daemon is already running or no operation in progress, exiting without status report")
hutil.redo_last_status()
exit_without_status_report()
else:
logger.log("No daemon found, trying to find the last non-query operation")
hutil.find_last_nonquery_operation = True
else:
msg = "Encryption operation {0} is not supported".format(encryption_operation)
logger.log(msg)
hutil.do_exit(exit_code=CommonVariables.configuration_error,
operation='Enable',
status=CommonVariables.extension_error_status,
code=(CommonVariables.configuration_error),
message=msg)
def enable_encryption():
hutil.do_parse_context('EnableEncryption')
# we need to start another subprocess to do it, because the initial process
# would be killed by the wala in 5 minutes.
logger.log('Enabling encryption')
"""
trying to mount the crypted items.
"""
disk_util = DiskUtil(hutil=hutil, patching=DistroPatcher, logger=logger, encryption_environment=encryption_environment)
bek_util = BekUtil(disk_util, logger)
existing_passphrase_file = None
encryption_config = EncryptionConfig(encryption_environment=encryption_environment, logger=logger)
config_path_result = disk_util.make_sure_path_exists(encryption_environment.encryption_config_path)
if config_path_result != CommonVariables.process_success:
logger.log(msg="azure encryption path creation failed.",
level=CommonVariables.ErrorLevel)
if encryption_config.config_file_exists():
existing_passphrase_file = bek_util.get_bek_passphrase_file(encryption_config)
if existing_passphrase_file is not None:
mount_encrypted_disks(disk_util=disk_util,
bek_util=bek_util,
encryption_config=encryption_config,
passphrase_file=existing_passphrase_file)
else:
logger.log(msg="EncryptionConfig is present, but could not get the BEK file.",
level=CommonVariables.WarningLevel)
hutil.redo_last_status()
exit_without_status_report()
ps = subprocess.Popen(["ps", "aux"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
ps_stdout, ps_stderr = ps.communicate()
if re.search(r"dd.*of=/dev/mapper/osencrypt", ps_stdout):
logger.log(msg="OS disk encryption already in progress, exiting",
level=CommonVariables.WarningLevel)
exit_without_status_report()
# handle the re-call scenario. the re-call would resume?
# if there's one tag for the next reboot.
encryption_marker = EncryptionMarkConfig(logger, encryption_environment)
try:
protected_settings_str = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('protectedSettings')
public_settings_str = hutil._context._config['runtimeSettings'][0]['handlerSettings'].get('publicSettings')
if isinstance(public_settings_str, basestring):
public_settings = json.loads(public_settings_str)
else:
public_settings = public_settings_str
if isinstance(protected_settings_str, basestring):
protected_settings = json.loads(protected_settings_str)
else:
protected_settings = protected_settings_str
extension_parameter = ExtensionParameter(hutil, logger, DistroPatcher, encryption_environment, protected_settings, public_settings)
kek_secret_id_created = None
encryption_marker = EncryptionMarkConfig(logger, encryption_environment)
if encryption_marker.config_file_exists():
# verify the encryption mark
logger.log(msg="encryption mark is there, starting daemon.", level=CommonVariables.InfoLevel)
start_daemon('EnableEncryption')
else:
encryption_config = EncryptionConfig(encryption_environment, logger)
exit_status = None
if encryption_config.config_file_exists():
exit_status = {
'operation': 'EnableEncryption',
'status': CommonVariables.extension_success_status,
'status_code': str(CommonVariables.success),
'message': encryption_config.get_secret_id()
}
hutil.exit_if_same_seq(exit_status)
hutil.save_seq()
encryption_config.volume_type = extension_parameter.VolumeType
encryption_config.commit()
if encryption_config.config_file_exists() and existing_passphrase_file is not None:
logger.log(msg="config file exists and passphrase file exists.", level=CommonVariables.WarningLevel)
encryption_marker = mark_encryption(command=extension_parameter.command,
volume_type=extension_parameter.VolumeType,
disk_format_query=extension_parameter.DiskFormatQuery)
start_daemon('EnableEncryption')
else:
"""
creating the secret, the secret would be transferred to a bek volume after the updatevm called in powershell.
"""
# store the luks passphrase in the secret.
keyVaultUtil = KeyVaultUtil(logger)
"""
validate the parameters
"""
if(extension_parameter.VolumeType is None or
not any([extension_parameter.VolumeType.lower() == vt.lower() for vt in CommonVariables.SupportedVolumeTypes])):
if encryption_config.config_file_exists():
existing_passphrase_file = bek_util.get_bek_passphrase_file(encryption_config)
if existing_passphrase_file is None:
logger.log("Unsupported volume type specified and BEK volume does not exist, clearing encryption config")
encryption_config.clear_config()
hutil.do_exit(exit_code=CommonVariables.configuration_error,
operation='EnableEncryption',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.configuration_error),
message='VolumeType "{0}" is not supported'.format(extension_parameter.VolumeType))
if extension_parameter.command not in [CommonVariables.EnableEncryption, CommonVariables.EnableEncryptionFormat, CommonVariables.EnableEncryptionFormatAll]:
hutil.do_exit(exit_code=CommonVariables.configuration_error,
operation='EnableEncryption',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.configuration_error),
message='Command "{0}" is not supported'.format(extension_parameter.command))
"""
this is the fresh call case
"""
# handle the passphrase related
if existing_passphrase_file is None:
if extension_parameter.passphrase is None or extension_parameter.passphrase == "":
extension_parameter.passphrase = bek_util.generate_passphrase(extension_parameter.KeyEncryptionAlgorithm)
else:
logger.log(msg="the extension_parameter.passphrase is already defined")
# install Python ADAL support if using client certificate authentication
if extension_parameter.AADClientCertThumbprint:
DistroPatcher.install_adal()
kek_secret_id_created = keyVaultUtil.create_kek_secret(Passphrase=extension_parameter.passphrase,
KeyVaultURL=extension_parameter.KeyVaultURL,
KeyEncryptionKeyURL=extension_parameter.KeyEncryptionKeyURL,
AADClientID=extension_parameter.AADClientID,
AADClientCertThumbprint=extension_parameter.AADClientCertThumbprint,
KeyEncryptionAlgorithm=extension_parameter.KeyEncryptionAlgorithm,
AADClientSecret=extension_parameter.AADClientSecret,
DiskEncryptionKeyFileName=extension_parameter.DiskEncryptionKeyFileName)
if kek_secret_id_created is None:
encryption_config.clear_config()
hutil.do_exit(exit_code=CommonVariables.create_encryption_secret_failed,
operation='EnableEncryption',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.create_encryption_secret_failed),
message='Enable failed.')
else:
encryption_config.passphrase_file_name = extension_parameter.DiskEncryptionKeyFileName
encryption_config.volume_type = extension_parameter.VolumeType
encryption_config.secret_id = kek_secret_id_created
encryption_config.secret_seq_num = hutil.get_current_seq()
encryption_config.commit()
extension_parameter.commit()
encryption_marker = mark_encryption(command=extension_parameter.command,
volume_type=extension_parameter.VolumeType,
disk_format_query=extension_parameter.DiskFormatQuery)
if kek_secret_id_created:
hutil.do_exit(exit_code=0,
operation='EnableEncryption',
status=CommonVariables.extension_success_status,
code=str(CommonVariables.success),
message=str(kek_secret_id_created))
else:
"""
the enabling called again. the passphrase would be re-used.
"""
hutil.do_exit(exit_code=0,
operation='EnableEncryption',
status=CommonVariables.extension_success_status,
code=str(CommonVariables.encrypttion_already_enabled),
message=str(kek_secret_id_created))
except Exception as e:
message = "Failed to enable the extension with error: {0}, stack trace: {1}".format(e, traceback.format_exc())
logger.log(msg=message, level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.unknown_error,
operation='EnableEncryption',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.unknown_error),
message=message)
def enable_encryption_format(passphrase, disk_format_query, disk_util, force=False):
logger.log('enable_encryption_format')
logger.log('disk format query is {0}'.format(disk_format_query))
json_parsed = json.loads(disk_format_query)
if type(json_parsed) is dict:
encryption_format_items = [json_parsed, ]
elif type(json_parsed) is list:
encryption_format_items = json_parsed
else:
raise Exception("JSON parse error. Input: {0}".format(disk_format_query))
for encryption_item in encryption_format_items:
dev_path_in_query = None
if "scsi" in encryption_item and encryption_item["scsi"] != '':
dev_path_in_query = disk_util.query_dev_sdx_path_by_scsi_id(encryption_item["scsi"])
if "dev_path" in encryption_item and encryption_item["dev_path"] != '':
dev_path_in_query = encryption_item["dev_path"]
if not dev_path_in_query:
raise Exception("Could not find a device path for Encryption Item: {0}".format(json.dumps(encryption_item)))
devices = disk_util.get_device_items(dev_path_in_query)
if len(devices) != 1:
logger.log(msg=("the device with this path {0} have more than one sub device. so skip it.".format(dev_path_in_query)), level=CommonVariables.WarningLevel)
continue
else:
device_item = devices[0]
if device_item.file_system is None or device_item.file_system == "" or force:
if device_item.mount_point:
disk_util.swapoff()
disk_util.umount(device_item.mount_point)
mapper_name = str(uuid.uuid4())
logger.log("encrypting " + str(device_item))
encrypted_device_path = os.path.join(CommonVariables.dev_mapper_root, mapper_name)
try:
se_linux_status = None
if DistroPatcher.distro_info[0].lower() == 'centos' and DistroPatcher.distro_info[1].startswith('7.0'):
se_linux_status = encryption_environment.get_se_linux()
if se_linux_status.lower() == 'enforcing':
encryption_environment.disable_se_linux()
encrypt_result = disk_util.encrypt_disk(dev_path=dev_path_in_query, passphrase_file=passphrase, mapper_name=mapper_name, header_file=None)
finally:
if DistroPatcher.distro_info[0].lower() == 'centos' and DistroPatcher.distro_info[1].startswith('7.0'):
if se_linux_status is not None and se_linux_status.lower() == 'enforcing':
encryption_environment.enable_se_linux()
if encrypt_result == CommonVariables.process_success:
# TODO: let customer specify the default file system in the
# parameter
file_system = None
if "file_system" in encryption_item and encryption_item["file_system"] != "":
file_system = encryption_item["file_system"]
else:
file_system = CommonVariables.default_file_system
format_disk_result = disk_util.format_disk(dev_path=encrypted_device_path, file_system=file_system)
if format_disk_result != CommonVariables.process_success:
logger.log(msg=("format of disk {0} failed with result: {1}".format(encrypted_device_path, format_disk_result)), level=CommonVariables.ErrorLevel)
crypt_item_to_update = CryptItem()
crypt_item_to_update.mapper_name = mapper_name
crypt_item_to_update.dev_path = dev_path_in_query
crypt_item_to_update.luks_header_path = None
crypt_item_to_update.file_system = file_system
crypt_item_to_update.uses_cleartext_key = False
crypt_item_to_update.current_luks_slot = 0
if "name" in encryption_item and encryption_item["name"] != "":
crypt_item_to_update.mount_point = os.path.join("/mnt/", str(encryption_item["name"]))
else:
crypt_item_to_update.mount_point = os.path.join("/mnt/", mapper_name)
# allow override through the new full_mount_point field
if "full_mount_point" in encryption_item and encryption_item["full_mount_point"] != "":
crypt_item_to_update.mount_point = os.path.join(str(encryption_item["full_mount_point"]))
logger.log(msg="modifying/removing the entry for unencrypted drive in fstab", level=CommonVariables.InfoLevel)
disk_util.modify_fstab_entry_encrypt(crypt_item_to_update.mount_point, os.path.join(CommonVariables.dev_mapper_root, mapper_name))
disk_util.make_sure_path_exists(crypt_item_to_update.mount_point)
update_crypt_item_result = disk_util.add_crypt_item(crypt_item_to_update, passphrase)
if not update_crypt_item_result:
logger.log(msg="update crypt item failed", level=CommonVariables.ErrorLevel)
mount_result = disk_util.mount_filesystem(dev_path=encrypted_device_path, mount_point=crypt_item_to_update.mount_point)
logger.log(msg=("mount result is {0}".format(mount_result)))
else:
logger.log(msg="encryption failed with code {0}".format(encrypt_result), level=CommonVariables.ErrorLevel)
else:
logger.log(msg=("the item fstype is not empty {0}".format(device_item.file_system)))
def encrypt_inplace_without_seperate_header_file(passphrase_file,
device_item,
disk_util,
bek_util,
status_prefix='',
ongoing_item_config=None):
"""
if ongoing_item_config is not None, then this is a resume case.
this function will return the phase
"""
logger.log("encrypt_inplace_without_seperate_header_file")
current_phase = CommonVariables.EncryptionPhaseBackupHeader
if ongoing_item_config is None:
ongoing_item_config = OnGoingItemConfig(encryption_environment=encryption_environment, logger=logger)
ongoing_item_config.current_block_size = CommonVariables.default_block_size
ongoing_item_config.current_slice_index = 0
ongoing_item_config.device_size = device_item.size
ongoing_item_config.file_system = device_item.file_system
ongoing_item_config.luks_header_file_path = None
ongoing_item_config.mapper_name = str(uuid.uuid4())
ongoing_item_config.mount_point = device_item.mount_point
if os.path.exists(os.path.join('/dev/', device_item.name)):
ongoing_item_config.original_dev_name_path = os.path.join('/dev/', device_item.name)
ongoing_item_config.original_dev_path = os.path.join('/dev/', device_item.name)
else:
ongoing_item_config.original_dev_name_path = os.path.join('/dev/mapper/', device_item.name)
ongoing_item_config.original_dev_path = os.path.join('/dev/mapper/', device_item.name)
ongoing_item_config.phase = CommonVariables.EncryptionPhaseBackupHeader
ongoing_item_config.commit()
else:
logger.log(msg="ongoing item config is not none, this is resuming, info: {0}".format(ongoing_item_config),
level=CommonVariables.WarningLevel)
logger.log(msg=("encrypting device item: {0}".format(ongoing_item_config.get_original_dev_path())))
# we only support ext file systems.
current_phase = ongoing_item_config.get_phase()
original_dev_path = ongoing_item_config.get_original_dev_path()
mapper_name = ongoing_item_config.get_mapper_name()
device_size = ongoing_item_config.get_device_size()
luks_header_size = CommonVariables.luks_header_size
size_shrink_to = (device_size - luks_header_size) / CommonVariables.sector_size
while current_phase != CommonVariables.EncryptionPhaseDone:
if current_phase == CommonVariables.EncryptionPhaseBackupHeader:
logger.log(msg="the current phase is " + str(CommonVariables.EncryptionPhaseBackupHeader),
level=CommonVariables.InfoLevel)
# log an appropriate warning if the file system type is not supported
device_fs = ongoing_item_config.get_file_system().lower()
if not device_fs in CommonVariables.inplace_supported_file_systems:
if device_fs in CommonVariables.format_supported_file_systems:
msg = "Encrypting {0} file system is not supported for data-preserving encryption. Consider using the encrypt-format-all option.".format(device_fs)
else:
msg = "AzureDiskEncryption does not support the {0} file system".format(device_fs)
logger.log(msg=msg, level=CommonVariables.WarningLevel)
ongoing_item_config.clear_config()
return current_phase
chk_shrink_result = disk_util.check_shrink_fs(dev_path=original_dev_path, size_shrink_to=size_shrink_to)
if chk_shrink_result != CommonVariables.process_success:
logger.log(msg="check shrink fs failed with code {0} for {1}".format(chk_shrink_result, original_dev_path),
level=CommonVariables.ErrorLevel)
logger.log(msg="your file system may not have enough space to do the encryption.")
# remove the ongoing item.
ongoing_item_config.clear_config()
return current_phase
else:
ongoing_item_config.current_slice_index = 0
ongoing_item_config.current_source_path = original_dev_path
ongoing_item_config.current_destination = encryption_environment.copy_header_slice_file_path
ongoing_item_config.current_total_copy_size = CommonVariables.default_block_size
ongoing_item_config.from_end = False
ongoing_item_config.header_slice_file_path = encryption_environment.copy_header_slice_file_path
ongoing_item_config.original_dev_path = original_dev_path
ongoing_item_config.commit()
if os.path.exists(encryption_environment.copy_header_slice_file_path):
logger.log(msg="the header slice file is there, remove it.", level=CommonVariables.WarningLevel)
os.remove(encryption_environment.copy_header_slice_file_path)
copy_result = disk_util.copy(ongoing_item_config=ongoing_item_config, status_prefix=status_prefix)
if copy_result != CommonVariables.process_success:
logger.log(msg="copy the header block failed, return code is: {0}".format(copy_result),
level=CommonVariables.ErrorLevel)
return current_phase
else:
ongoing_item_config.current_slice_index = 0
ongoing_item_config.phase = CommonVariables.EncryptionPhaseEncryptDevice
ongoing_item_config.commit()
current_phase = CommonVariables.EncryptionPhaseEncryptDevice
elif current_phase == CommonVariables.EncryptionPhaseEncryptDevice:
logger.log(msg="the current phase is {0}".format(CommonVariables.EncryptionPhaseEncryptDevice),
level=CommonVariables.InfoLevel)
encrypt_result = disk_util.encrypt_disk(dev_path=original_dev_path,
passphrase_file=passphrase_file,
mapper_name=mapper_name,
header_file=None)
# after the encrypt_disk without seperate header, then the uuid
# would change.
if encrypt_result != CommonVariables.process_success:
logger.log(msg="encrypt file system failed.", level=CommonVariables.ErrorLevel)
return current_phase
else:
ongoing_item_config.current_slice_index = 0
ongoing_item_config.phase = CommonVariables.EncryptionPhaseCopyData
ongoing_item_config.commit()
current_phase = CommonVariables.EncryptionPhaseCopyData
elif current_phase == CommonVariables.EncryptionPhaseCopyData:
logger.log(msg="the current phase is {0}".format(CommonVariables.EncryptionPhaseCopyData),
level=CommonVariables.InfoLevel)
device_mapper_path = os.path.join(CommonVariables.dev_mapper_root, mapper_name)
ongoing_item_config.current_destination = device_mapper_path
ongoing_item_config.current_source_path = original_dev_path
ongoing_item_config.current_total_copy_size = (device_size - luks_header_size)
ongoing_item_config.from_end = True
ongoing_item_config.phase = CommonVariables.EncryptionPhaseCopyData
ongoing_item_config.commit()
copy_result = disk_util.copy(ongoing_item_config=ongoing_item_config, status_prefix=status_prefix)
if copy_result != CommonVariables.process_success:
logger.log(msg="copy the main content block failed, return code is: {0}".format(copy_result),
level=CommonVariables.ErrorLevel)
return current_phase
else:
ongoing_item_config.phase = CommonVariables.EncryptionPhaseRecoverHeader
ongoing_item_config.commit()
current_phase = CommonVariables.EncryptionPhaseRecoverHeader
elif current_phase == CommonVariables.EncryptionPhaseRecoverHeader:
logger.log(msg="the current phase is " + str(CommonVariables.EncryptionPhaseRecoverHeader),
level=CommonVariables.InfoLevel)
ongoing_item_config.from_end = False
backed_up_header_slice_file_path = ongoing_item_config.get_header_slice_file_path()
ongoing_item_config.current_slice_index = 0
ongoing_item_config.current_source_path = backed_up_header_slice_file_path
device_mapper_path = os.path.join(CommonVariables.dev_mapper_root, mapper_name)
ongoing_item_config.current_destination = device_mapper_path
ongoing_item_config.current_total_copy_size = CommonVariables.default_block_size
ongoing_item_config.commit()
copy_result = disk_util.copy(ongoing_item_config=ongoing_item_config, status_prefix=status_prefix)
if copy_result == CommonVariables.process_success:
crypt_item_to_update = CryptItem()
crypt_item_to_update.mapper_name = mapper_name
original_dev_name_path = ongoing_item_config.get_original_dev_name_path()
crypt_item_to_update.dev_path = disk_util.get_persistent_path_by_sdx_path(original_dev_name_path)
crypt_item_to_update.luks_header_path = "None"
crypt_item_to_update.file_system = ongoing_item_config.get_file_system()
crypt_item_to_update.uses_cleartext_key = False
crypt_item_to_update.current_luks_slot = 0
# if the original mountpoint is empty, then leave
# it as None
mount_point = ongoing_item_config.get_mount_point()
if mount_point == "" or mount_point is None:
crypt_item_to_update.mount_point = "None"
else:
crypt_item_to_update.mount_point = mount_point
update_crypt_item_result = disk_util.add_crypt_item(crypt_item_to_update, passphrase_file)
if not update_crypt_item_result:
logger.log(msg="update crypt item failed", level=CommonVariables.ErrorLevel)
if mount_point:
logger.log(msg="removing entry for unencrypted drive from fstab",
level=CommonVariables.InfoLevel)
disk_util.modify_fstab_entry_encrypt(mount_point, os.path.join(CommonVariables.dev_mapper_root, mapper_name))
else:
logger.log(msg=original_dev_name_path + " is not defined in fstab, no need to update",
level=CommonVariables.InfoLevel)
if os.path.exists(encryption_environment.copy_header_slice_file_path):
os.remove(encryption_environment.copy_header_slice_file_path)
current_phase = CommonVariables.EncryptionPhaseDone
ongoing_item_config.phase = current_phase
ongoing_item_config.commit()
expand_fs_result = disk_util.expand_fs(dev_path=device_mapper_path)
if crypt_item_to_update.mount_point != "None":
disk_util.mount_filesystem(device_mapper_path, ongoing_item_config.get_mount_point())
else:
logger.log("the crypt_item_to_update.mount_point is None, so we do not mount it.")
ongoing_item_config.clear_config()
if expand_fs_result != CommonVariables.process_success:
logger.log(msg="expand fs result is: {0}".format(expand_fs_result),
level=CommonVariables.ErrorLevel)
return current_phase
else:
logger.log(msg="recover header failed result is: {0}".format(copy_result),
level=CommonVariables.ErrorLevel)
return current_phase
def encrypt_inplace_with_seperate_header_file(passphrase_file,
device_item,
disk_util,
bek_util,
status_prefix='',
ongoing_item_config=None):
"""
if ongoing_item_config is not None, then this is a resume case.
"""
logger.log("encrypt_inplace_with_seperate_header_file")
current_phase = CommonVariables.EncryptionPhaseEncryptDevice
if ongoing_item_config is None:
ongoing_item_config = OnGoingItemConfig(encryption_environment=encryption_environment,
logger=logger)
mapper_name = str(uuid.uuid4())
ongoing_item_config.current_block_size = CommonVariables.default_block_size
ongoing_item_config.current_slice_index = 0
ongoing_item_config.device_size = device_item.size
ongoing_item_config.file_system = device_item.file_system
ongoing_item_config.mapper_name = mapper_name
ongoing_item_config.mount_point = device_item.mount_point
# TODO improve this.
if os.path.exists(os.path.join('/dev/', device_item.name)):
ongoing_item_config.original_dev_name_path = os.path.join('/dev/', device_item.name)
else:
ongoing_item_config.original_dev_name_path = os.path.join('/dev/mapper/', device_item.name)
ongoing_item_config.original_dev_path = os.path.join('/dev/disk/by-uuid', device_item.uuid)
luks_header_file_path = disk_util.create_luks_header(mapper_name=mapper_name)
if luks_header_file_path is None:
logger.log(msg="create header file failed", level=CommonVariables.ErrorLevel)
return current_phase
else:
ongoing_item_config.luks_header_file_path = luks_header_file_path
ongoing_item_config.phase = CommonVariables.EncryptionPhaseEncryptDevice
ongoing_item_config.commit()
else:
logger.log(msg="ongoing item config is not none, this is resuming: {0}".format(ongoing_item_config),
level=CommonVariables.WarningLevel)
current_phase = ongoing_item_config.get_phase()
while current_phase != CommonVariables.EncryptionPhaseDone:
if current_phase == CommonVariables.EncryptionPhaseEncryptDevice:
try:
mapper_name = ongoing_item_config.get_mapper_name()
original_dev_path = ongoing_item_config.get_original_dev_path()
luks_header_file_path = ongoing_item_config.get_header_file_path()
toggle_se_linux_for_centos7(True)
encrypt_result = disk_util.encrypt_disk(dev_path=original_dev_path,
passphrase_file=passphrase_file,
mapper_name=mapper_name,
header_file=luks_header_file_path)
if encrypt_result != CommonVariables.process_success:
logger.log(msg="the encrypton for {0} failed".format(original_dev_path),
level=CommonVariables.ErrorLevel)
return current_phase
else:
ongoing_item_config.phase = CommonVariables.EncryptionPhaseCopyData
ongoing_item_config.commit()
current_phase = CommonVariables.EncryptionPhaseCopyData
finally:
toggle_se_linux_for_centos7(False)
elif current_phase == CommonVariables.EncryptionPhaseCopyData:
try:
mapper_name = ongoing_item_config.get_mapper_name()
original_dev_path = ongoing_item_config.get_original_dev_path()
luks_header_file_path = ongoing_item_config.get_header_file_path()
toggle_se_linux_for_centos7(True)
device_mapper_path = os.path.join("/dev/mapper", mapper_name)
if not os.path.exists(device_mapper_path):
open_result = disk_util.luks_open(passphrase_file=passphrase_file,
dev_path=original_dev_path,
mapper_name=mapper_name,
header_file=luks_header_file_path,
uses_cleartext_key=False)
if open_result != CommonVariables.process_success:
logger.log(msg="the luks open for {0} failed.".format(original_dev_path),
level=CommonVariables.ErrorLevel)
return current_phase
else:
logger.log(msg="the device mapper path existed, so skip the luks open.",
level=CommonVariables.InfoLevel)
device_size = ongoing_item_config.get_device_size()
current_slice_index = ongoing_item_config.get_current_slice_index()
if current_slice_index is None:
ongoing_item_config.current_slice_index = 0
ongoing_item_config.current_source_path = original_dev_path
ongoing_item_config.current_destination = device_mapper_path
ongoing_item_config.current_total_copy_size = device_size
ongoing_item_config.from_end = True
ongoing_item_config.commit()
copy_result = disk_util.copy(ongoing_item_config=ongoing_item_config, status_prefix=status_prefix)
if copy_result != CommonVariables.success:
error_message = "the copying result is {0} so skip the mounting".format(copy_result)
logger.log(msg=(error_message), level=CommonVariables.ErrorLevel)
return current_phase
else:
crypt_item_to_update = CryptItem()
crypt_item_to_update.mapper_name = mapper_name
original_dev_name_path = ongoing_item_config.get_original_dev_name_path()
crypt_item_to_update.dev_path = disk_util.get_persistent_path_by_sdx_path(original_dev_name_path)
crypt_item_to_update.luks_header_path = luks_header_file_path
crypt_item_to_update.file_system = ongoing_item_config.get_file_system()
crypt_item_to_update.uses_cleartext_key = False
crypt_item_to_update.current_luks_slot = 0
# if the original mountpoint is empty, then leave
# it as None
mount_point = ongoing_item_config.get_mount_point()
if mount_point is None or mount_point == "":
crypt_item_to_update.mount_point = "None"
else:
crypt_item_to_update.mount_point = mount_point
update_crypt_item_result = disk_util.add_crypt_item(crypt_item_to_update, passphrase_file)
if not update_crypt_item_result:
logger.log(msg="update crypt item failed", level=CommonVariables.ErrorLevel)
if crypt_item_to_update.mount_point != "None":
disk_util.mount_filesystem(device_mapper_path, mount_point)
else:
logger.log("the crypt_item_to_update.mount_point is None, so we do not mount it.")
if mount_point:
logger.log(msg="removing entry for unencrypted drive from fstab",
level=CommonVariables.InfoLevel)
disk_util.modify_fstab_entry_encrypt(mount_point, os.path.join(CommonVariables.dev_mapper_root, mapper_name))
else:
logger.log(msg=original_dev_name_path + " is not defined in fstab, no need to update",
level=CommonVariables.InfoLevel)
current_phase = CommonVariables.EncryptionPhaseDone
ongoing_item_config.phase = current_phase
ongoing_item_config.commit()
ongoing_item_config.clear_config()
return current_phase
finally:
toggle_se_linux_for_centos7(False)
def decrypt_inplace_copy_data(passphrase_file,
crypt_item,
raw_device_item,
mapper_device_item,
disk_util,
status_prefix='',
ongoing_item_config=None):
logger.log(msg="decrypt_inplace_copy_data")
if ongoing_item_config:
logger.log(msg="ongoing item config is not none, resuming decryption, info: {0}".format(ongoing_item_config),
level=CommonVariables.WarningLevel)
else:
logger.log(msg="starting decryption of {0}".format(crypt_item))
ongoing_item_config = OnGoingItemConfig(encryption_environment=encryption_environment, logger=logger)
ongoing_item_config.current_destination = crypt_item.dev_path
ongoing_item_config.current_source_path = os.path.join(CommonVariables.dev_mapper_root,
crypt_item.mapper_name)
ongoing_item_config.current_total_copy_size = mapper_device_item.size
ongoing_item_config.from_end = True
ongoing_item_config.phase = CommonVariables.DecryptionPhaseCopyData
ongoing_item_config.current_slice_index = 0
ongoing_item_config.current_block_size = CommonVariables.default_block_size
ongoing_item_config.mount_point = crypt_item.mount_point
ongoing_item_config.commit()
current_phase = ongoing_item_config.get_phase()
while current_phase != CommonVariables.DecryptionPhaseDone:
logger.log(msg=("the current phase is {0}".format(CommonVariables.EncryptionPhaseBackupHeader)),
level=CommonVariables.InfoLevel)
if current_phase == CommonVariables.DecryptionPhaseCopyData:
copy_result = disk_util.copy(ongoing_item_config=ongoing_item_config, status_prefix=status_prefix)
if copy_result == CommonVariables.process_success:
mount_point = ongoing_item_config.get_mount_point()
if mount_point and mount_point != "None":
logger.log(msg="restoring entry for unencrypted drive from fstab", level=CommonVariables.InfoLevel)
disk_util.restore_mount_info(ongoing_item_config.get_mount_point())
elif crypt_item.mapper_name:
disk_util.restore_mount_info(crypt_item.mapper_name)
else:
logger.log(msg=crypt_item.dev_path + " was not in fstab when encryption was enabled, no need to restore",
level=CommonVariables.InfoLevel)
ongoing_item_config.phase = CommonVariables.DecryptionPhaseDone
ongoing_item_config.commit()
current_phase = CommonVariables.DecryptionPhaseDone
else:
logger.log(msg="decryption: block copy failed, result: {0}".format(copy_result),
level=CommonVariables.ErrorLevel)
return current_phase
ongoing_item_config.clear_config()
return current_phase
def decrypt_inplace_without_separate_header_file(passphrase_file,
crypt_item,
raw_device_item,
mapper_device_item,
disk_util,
status_prefix='',
ongoing_item_config=None):
logger.log(msg="decrypt_inplace_without_separate_header_file")
proc_comm = ProcessCommunicator()
executor = CommandExecutor(logger)
executor.Execute(DistroPatcher.cryptsetup_path + " luksDump " + crypt_item.dev_path, communicator=proc_comm)
luks_header_size = int(re.findall(r"Payload.*?(\d+)", proc_comm.stdout)[0]) * CommonVariables.sector_size
if raw_device_item.size - mapper_device_item.size != luks_header_size:
logger.log(msg="mismatch between raw and mapper device found for crypt_item {0}".format(crypt_item),
level=CommonVariables.ErrorLevel)
logger.log(msg="raw_device_item: {0}".format(raw_device_item),
level=CommonVariables.ErrorLevel)
logger.log(msg="mapper_device_item {0}".format(mapper_device_item),
level=CommonVariables.ErrorLevel)
return None
return decrypt_inplace_copy_data(passphrase_file,
crypt_item,
raw_device_item,
mapper_device_item,
disk_util,
status_prefix,
ongoing_item_config)
def decrypt_inplace_with_separate_header_file(passphrase_file,
crypt_item,
raw_device_item,
mapper_device_item,
disk_util,
status_prefix='',
ongoing_item_config=None):
logger.log(msg="decrypt_inplace_with_separate_header_file")
if raw_device_item.size != mapper_device_item.size:
logger.log(msg="mismatch between raw and mapper device found for crypt_item {0}".format(crypt_item),
level=CommonVariables.ErrorLevel)
logger.log(msg="raw_device_item: {0}".format(raw_device_item),
level=CommonVariables.ErrorLevel)
logger.log(msg="mapper_device_item {0}".format(mapper_device_item),
level=CommonVariables.ErrorLevel)
return
return decrypt_inplace_copy_data(passphrase_file,
crypt_item,
raw_device_item,
mapper_device_item,
disk_util,
status_prefix,
ongoing_item_config)
def enable_encryption_all_format(passphrase_file, encryption_marker, disk_util, bek_util):
"""
In case of success return None, otherwise return the device item which failed.
"""
logger.log(msg="executing the enable_encryption_all_format command")
device_items = find_all_devices_to_encrypt(encryption_marker, disk_util, bek_util)
# Don't encrypt partitions that are not even mounted
device_items_to_encrypt = filter(lambda di: di.mount_point is not None and di.mount_point != "", device_items)
dev_path_reference_table = disk_util.get_block_device_to_azure_udev_table()
device_items_to_encrypt = filter(lambda di: os.path.join('/dev/', di.name) in dev_path_reference_table, device_items_to_encrypt)
msg = 'Encrypting and formatting {0} data volumes'.format(len(device_items_to_encrypt))
logger.log(msg)
hutil.do_status_report(operation='EnableEncryptionFormatAll',
status=CommonVariables.extension_success_status,
status_code=str(CommonVariables.success),
message=msg)
return encrypt_format_device_items(passphrase_file, device_items_to_encrypt, disk_util, True)
def encrypt_format_device_items(passphrase, device_items, disk_util, force=False):
"""
Formats the block devices represented by the supplied device_item.
This is done by constructing a disk format query based on the supplied device items
and passing it on to the enable_encryption_format method.
Returns None if all items are successfully format-encrypted
Otherwise returns the device item which failed.
"""
# use the new udev names for formatting and later on for cryptmounting
dev_path_reference_table = disk_util.get_block_device_to_azure_udev_table()
def single_device_item_to_format_query_dict(device_item):
"""
Converts a single device_item into an dictionary than will be later "json-stringified"
"""
format_query_element = {}
dev_path = os.path.join('/dev/', device_item.name)
if dev_path in dev_path_reference_table:
format_query_element["dev_path"] = dev_path_reference_table[dev_path]
else:
format_query_element["dev_path"] = dev_path
# introduce a new "full_mount_point" field below to avoid the /mnt/ prefix that automatically gets appended
format_query_element["full_mount_point"] = str(device_item.mount_point)
format_query_element["file_system"] = str(device_item.file_system)
return format_query_element
disk_format_query = json.dumps(map(single_device_item_to_format_query_dict, device_items))
return enable_encryption_format(passphrase, disk_format_query, disk_util, force)
def find_all_devices_to_encrypt(encryption_marker, disk_util, bek_util):
device_items = disk_util.get_device_items(None)
device_items_to_encrypt = []
special_azure_devices_to_skip = disk_util.get_azure_devices()
for device_item in device_items:
logger.log("device_item == " + str(device_item))
should_skip = disk_util.should_skip_for_inplace_encryption(device_item, special_azure_devices_to_skip, encryption_marker.get_volume_type())
if not should_skip and \
not any(di.name == device_item.name for di in device_items_to_encrypt):
device_items_to_encrypt.append(device_item)
return device_items_to_encrypt
def enable_encryption_all_in_place(passphrase_file, encryption_marker, disk_util, bek_util):
"""
if return None for the success case, or return the device item which failed.
"""
logger.log(msg="executing the enable_encryption_all_in_place command.")
device_items_to_encrypt = find_all_devices_to_encrypt(encryption_marker, disk_util, bek_util)
msg = 'Encrypting {0} data volumes'.format(len(device_items_to_encrypt))
logger.log(msg)
hutil.do_status_report(operation='EnableEncryption',
status=CommonVariables.extension_success_status,
status_code=str(CommonVariables.success),
message=msg)
for device_num, device_item in enumerate(device_items_to_encrypt):
umount_status_code = CommonVariables.success
if device_item.mount_point is not None and device_item.mount_point != "":
umount_status_code = disk_util.umount(device_item.mount_point)
if umount_status_code != CommonVariables.success:
logger.log("error occured when do the umount for: {0} with code: {1}".format(device_item.mount_point, umount_status_code))
else:
logger.log(msg=("encrypting: {0}".format(device_item)))
no_header_file_support = not_support_header_option_distro(DistroPatcher)
status_prefix = "Encrypting data volume {0}/{1}".format(device_num + 1,
len(device_items_to_encrypt))
# TODO check the file system before encrypting it.
if no_header_file_support:
logger.log(msg="this is the centos 6 or redhat 6 or sles 11 series, need to resize data drive",
level=CommonVariables.WarningLevel)
encryption_result_phase = encrypt_inplace_without_seperate_header_file(passphrase_file=passphrase_file,
device_item=device_item,
disk_util=disk_util,
bek_util=bek_util,
status_prefix=status_prefix)
else:
encryption_result_phase = encrypt_inplace_with_seperate_header_file(passphrase_file=passphrase_file,
device_item=device_item,
disk_util=disk_util,
bek_util=bek_util,
status_prefix=status_prefix)
if encryption_result_phase == CommonVariables.EncryptionPhaseDone:
continue
else:
# do exit to exit from this round
return device_item
return None
def disable_encryption_all_in_place(passphrase_file, decryption_marker, disk_util):
"""
On success, returns None. Otherwise returns the crypt item for which decryption failed.
"""
logger.log(msg="executing disable_encryption_all_in_place")
device_items = disk_util.get_device_items(None)
crypt_items = disk_util.get_crypt_items()
msg = 'Decrypting {0} data volumes'.format(len(crypt_items))
logger.log(msg)
hutil.do_status_report(operation='DisableEncryption',
status=CommonVariables.extension_success_status,
status_code=str(CommonVariables.success),
message=msg)
for crypt_item_num, crypt_item in enumerate(crypt_items):
logger.log("processing crypt_item: " + str(crypt_item))
def raw_device_item_match(device_item):
sdx_device_name = os.path.join("/dev/", device_item.name)
if crypt_item.dev_path.startswith(CommonVariables.disk_by_id_root):
return crypt_item.dev_path == disk_util.query_dev_id_path_by_sdx_path(sdx_device_name)
else:
return crypt_item.dev_path == sdx_device_name
def mapped_device_item_match(device_item):
return crypt_item.mapper_name == device_item.name
raw_device_item = next((d for d in device_items if raw_device_item_match(d)), None)
mapper_device_item = next((d for d in device_items if mapped_device_item_match(d)), None)
if not raw_device_item:
logger.log("raw device not found for crypt_item {0}".format(crypt_item), level='Warn')
logger.log("Skipping device", level='Warn')
continue
if not mapper_device_item:
logger.log("mapper device not found for crypt_item {0}".format(crypt_item))
if disk_util.is_luks_device(crypt_item.dev_path, crypt_item.luks_header_path):
logger.log("Found a luks device for this device item, yet couldn't open mapper: {0}".format(crypt_item))
logger.log("Failing".format(crypt_item))
return crypt_item
else:
continue
decryption_result_phase = None
status_prefix = "Decrypting data volume {0}/{1}".format(crypt_item_num + 1,
len(crypt_items))
if crypt_item.luks_header_path:
decryption_result_phase = decrypt_inplace_with_separate_header_file(passphrase_file=passphrase_file,
crypt_item=crypt_item,
raw_device_item=raw_device_item,
mapper_device_item=mapper_device_item,
disk_util=disk_util,
status_prefix=status_prefix)
else:
decryption_result_phase = decrypt_inplace_without_separate_header_file(passphrase_file=passphrase_file,
crypt_item=crypt_item,
raw_device_item=raw_device_item,
mapper_device_item=mapper_device_item,
disk_util=disk_util,
status_prefix=status_prefix)
if decryption_result_phase == CommonVariables.DecryptionPhaseDone:
disk_util.luks_close(crypt_item.mapper_name)
disk_util.remove_crypt_item(crypt_item)
#disk_util.mount_all()
continue
else:
# decryption failed for a crypt_item, return the failed item to caller
return crypt_item
disk_util.mount_all()
return None
def daemon_encrypt():
# Ensure the same configuration is executed only once
# If the previous enable failed, we do not have retry logic here.
# TODO Remount all
encryption_marker = EncryptionMarkConfig(logger, encryption_environment)
if encryption_marker.config_file_exists():
logger.log("encryption is marked.")
"""
search for the bek volume, then mount it:)
"""
disk_util = DiskUtil(hutil, DistroPatcher, logger, encryption_environment)
encryption_config = EncryptionConfig(encryption_environment, logger)
bek_passphrase_file = None
"""
try to find the attached bek volume, and use the file to mount the crypted volumes,
and if the passphrase file is found, then we will re-use it for the future.
"""
bek_util = BekUtil(disk_util, logger)
if encryption_config.config_file_exists():
bek_passphrase_file = bek_util.get_bek_passphrase_file(encryption_config)
if bek_passphrase_file is None:
hutil.do_exit(exit_code=CommonVariables.passphrase_file_not_found,
operation='EnableEncryption',
status=CommonVariables.extension_error_status,
code=CommonVariables.passphrase_file_not_found,
message='Passphrase file not found.')
executor = CommandExecutor(logger)
is_not_in_stripped_os = bool(executor.Execute("mountpoint /oldroot"))
volume_type = encryption_config.get_volume_type().lower()
if (volume_type == CommonVariables.VolumeTypeData.lower() or volume_type == CommonVariables.VolumeTypeAll.lower()) and \
is_not_in_stripped_os:
try:
while not daemon_encrypt_data_volumes(encryption_marker=encryption_marker,
encryption_config=encryption_config,
disk_util=disk_util,
bek_util=bek_util,
bek_passphrase_file=bek_passphrase_file):
logger.log("Calling daemon_encrypt_data_volumes again")
except Exception as e:
message = "Failed to encrypt data volumes with error: {0}, stack trace: {1}".format(e, traceback.format_exc())
logger.log(msg=message, level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.encryption_failed,
operation='EnableEncryptionDataVolumes',
status=CommonVariables.extension_error_status,
code=CommonVariables.encryption_failed,
message=message)
else:
hutil.do_status_report(operation='EnableEncryptionDataVolumes',
status=CommonVariables.extension_success_status,
status_code=str(CommonVariables.success),
message='Encryption succeeded for data volumes')
disk_util.log_lsblk_output()
mount_encrypted_disks(disk_util, bek_util, bek_passphrase_file, encryption_config)
if volume_type == CommonVariables.VolumeTypeOS.lower() or \
volume_type == CommonVariables.VolumeTypeAll.lower():
# import OSEncryption here instead of at the top because it relies
# on pre-req packages being installed (specifically, python-six on Ubuntu)
distro_name = DistroPatcher.distro_info[0]
distro_version = DistroPatcher.distro_info[1]
os_encryption = None
if (((distro_name == 'redhat' and distro_version == '7.3') or
(distro_name == 'redhat' and distro_version == '7.4') or
(distro_name == 'redhat' and distro_version == '7.5') or
(distro_name == 'redhat' and distro_version == '7.6') or
(distro_name == 'redhat' and distro_version == '7.7')) and
(disk_util.is_os_disk_lvm() or os.path.exists('/volumes.lvm'))):
from oscrypto.rhel_72_lvm import RHEL72LVMEncryptionStateMachine
os_encryption = RHEL72LVMEncryptionStateMachine(hutil=hutil,
distro_patcher=DistroPatcher,
logger=logger,
encryption_environment=encryption_environment)
elif (((distro_name == 'centos' and distro_version == '7.3.1611') or
(distro_name == 'centos' and distro_version.startswith('7.4')) or
(distro_name == 'centos' and distro_version.startswith('7.5')) or
(distro_name == 'centos' and distro_version.startswith('7.6')) or
(distro_name == 'centos' and distro_version.startswith('7.7'))) and
(disk_util.is_os_disk_lvm() or os.path.exists('/volumes.lvm'))):
from oscrypto.rhel_72_lvm import RHEL72LVMEncryptionStateMachine
os_encryption = RHEL72LVMEncryptionStateMachine(hutil=hutil,
distro_patcher=DistroPatcher,
logger=logger,
encryption_environment=encryption_environment)
elif ((distro_name == 'redhat' and distro_version == '7.2') or
(distro_name == 'redhat' and distro_version == '7.3') or
(distro_name == 'redhat' and distro_version == '7.4') or
(distro_name == 'redhat' and distro_version == '7.5') or
(distro_name == 'redhat' and distro_version == '7.6') or
(distro_name == 'redhat' and distro_version == '7.7') or
(distro_name == 'centos' and distro_version.startswith('7.7')) or
(distro_name == 'centos' and distro_version.startswith('7.6')) or
(distro_name == 'centos' and distro_version.startswith('7.5')) or
(distro_name == 'centos' and distro_version.startswith('7.4')) or
(distro_name == 'centos' and distro_version == '7.3.1611') or
(distro_name == 'centos' and distro_version == '7.2.1511')):
from oscrypto.rhel_72 import RHEL72EncryptionStateMachine
os_encryption = RHEL72EncryptionStateMachine(hutil=hutil,
distro_patcher=DistroPatcher,
logger=logger,
encryption_environment=encryption_environment)
elif distro_name == 'redhat' and distro_version == '6.8':
from oscrypto.rhel_68 import RHEL68EncryptionStateMachine
os_encryption = RHEL68EncryptionStateMachine(hutil=hutil,
distro_patcher=DistroPatcher,
logger=logger,
encryption_environment=encryption_environment)
elif distro_name == 'centos' and (distro_version == '6.8' or distro_version == '6.9'):
from oscrypto.centos_68 import CentOS68EncryptionStateMachine
os_encryption = CentOS68EncryptionStateMachine(hutil=hutil,
distro_patcher=DistroPatcher,
logger=logger,
encryption_environment=encryption_environment)
elif distro_name == 'Ubuntu' and distro_version in ['16.04', '18.04']:
from oscrypto.ubuntu_1604 import Ubuntu1604EncryptionStateMachine
os_encryption = Ubuntu1604EncryptionStateMachine(hutil=hutil,
distro_patcher=DistroPatcher,
logger=logger,
encryption_environment=encryption_environment)
elif distro_name == 'Ubuntu' and distro_version == '14.04':
from oscrypto.ubuntu_1404 import Ubuntu1404EncryptionStateMachine
os_encryption = Ubuntu1404EncryptionStateMachine(hutil=hutil,
distro_patcher=DistroPatcher,
logger=logger,
encryption_environment=encryption_environment)
else:
message = "OS volume encryption is not supported on {0} {1}".format(distro_name,
distro_version)
logger.log(msg=message, level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.encryption_failed,
operation='EnableEncryptionOSVolume',
status=CommonVariables.extension_error_status,
code=CommonVariables.encryption_failed,
message=message)
try:
os_encryption.start_encryption()
if not os_encryption.state == 'completed':
raise Exception("did not reach completed state")
else:
encryption_marker.clear_config()
except Exception as e:
message = "Failed to encrypt OS volume with error: {0}, stack trace: {1}, machine state: {2}".format(e,
traceback.format_exc(),
os_encryption.state)
logger.log(msg=message, level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.encryption_failed,
operation='EnableEncryptionOSVolume',
status=CommonVariables.extension_error_status,
code=CommonVariables.encryption_failed,
message=message)
message = ''
if volume_type == CommonVariables.VolumeTypeAll.lower():
message = 'Encryption succeeded for all volumes'
else:
message = 'Encryption succeeded for OS volume'
logger.log(msg=message)
hutil.do_status_report(operation='EnableEncryptionOSVolume',
status=CommonVariables.extension_success_status,
status_code=str(CommonVariables.success),
message=message)
def daemon_encrypt_data_volumes(encryption_marker, encryption_config, disk_util, bek_util, bek_passphrase_file):
try:
"""
check whether there's a scheduled encryption task
"""
mount_all_result = disk_util.mount_all()
if mount_all_result != CommonVariables.process_success:
logger.log(msg="mount all failed with code:{0}".format(mount_all_result),
level=CommonVariables.ErrorLevel)
"""
TODO: resuming the encryption for rebooting suddenly scenario
we need the special handling is because the half done device can be a error state: say, the file system header missing.so it could be
identified.
"""
ongoing_item_config = OnGoingItemConfig(encryption_environment=encryption_environment, logger=logger)
if ongoing_item_config.config_file_exists():
logger.log("OngoingItemConfig exists.")
ongoing_item_config.load_value_from_file()
header_file_path = ongoing_item_config.get_header_file_path()
mount_point = ongoing_item_config.get_mount_point()
status_prefix = "Resuming encryption after reboot"
if not none_or_empty(mount_point):
logger.log("mount point is not empty {0}, trying to unmount it first.".format(mount_point))
umount_status_code = disk_util.umount(mount_point)
logger.log("unmount return code is {0}".format(umount_status_code))
if none_or_empty(header_file_path):
encryption_result_phase = encrypt_inplace_without_seperate_header_file(passphrase_file=bek_passphrase_file,
device_item=None,
disk_util=disk_util,
bek_util=bek_util,
status_prefix=status_prefix,
ongoing_item_config=ongoing_item_config)
# TODO mount it back when shrink failed
else:
encryption_result_phase = encrypt_inplace_with_seperate_header_file(passphrase_file=bek_passphrase_file,
device_item=None,
disk_util=disk_util,
bek_util=bek_util,
status_prefix=status_prefix,
ongoing_item_config=ongoing_item_config)
"""
if the resuming failed, we should fail.
"""
if encryption_result_phase != CommonVariables.EncryptionPhaseDone:
original_dev_path = ongoing_item_config.get_original_dev_path
message = 'EnableEncryption: resuming encryption for {0} failed'.format(original_dev_path)
raise Exception(message)
else:
ongoing_item_config.clear_config()
else:
logger.log("OngoingItemConfig does not exist")
failed_item = None
if not encryption_marker.config_file_exists():
logger.log("Data volumes are not marked for encryption")
return True
if encryption_marker.get_current_command() == CommonVariables.EnableEncryption:
failed_item = enable_encryption_all_in_place(passphrase_file=bek_passphrase_file,
encryption_marker=encryption_marker,
disk_util=disk_util,
bek_util=bek_util)
elif encryption_marker.get_current_command() == CommonVariables.EnableEncryptionFormat:
disk_format_query = encryption_marker.get_encryption_disk_format_query()
failed_item = enable_encryption_format(passphrase=bek_passphrase_file,
disk_format_query=disk_format_query,
disk_util=disk_util)
elif encryption_marker.get_current_command() == CommonVariables.EnableEncryptionFormatAll:
failed_item = enable_encryption_all_format(passphrase_file=bek_passphrase_file,
encryption_marker=encryption_marker,
disk_util=disk_util,
bek_util=bek_util)
else:
message = "Command {0} not supported.".format(encryption_marker.get_current_command())
logger.log(msg=message, level=CommonVariables.ErrorLevel)
raise Exception(message)
if failed_item:
message = 'Encryption failed for {0}'.format(failed_item)
raise Exception(message)
else:
return True
except Exception:
raise
def daemon_decrypt():
decryption_marker = DecryptionMarkConfig(logger, encryption_environment)
if not decryption_marker.config_file_exists():
logger.log("decryption is not marked.")
return
logger.log("decryption is marked.")
# mount and then unmount all the encrypted items
# in order to set-up all the mapper devices
# we don't need the BEK since all the drives that need decryption were made cleartext-key unlockable by first call to disable
disk_util = DiskUtil(hutil, DistroPatcher, logger, encryption_environment)
encryption_config = EncryptionConfig(encryption_environment, logger)
mount_encrypted_disks(disk_util=disk_util,
bek_util=None,
encryption_config=encryption_config,
passphrase_file=None)
disk_util.umount_all_crypt_items()
# at this point all the /dev/mapper/* crypt devices should be open
ongoing_item_config = OnGoingItemConfig(encryption_environment=encryption_environment, logger=logger)
if ongoing_item_config.config_file_exists():
logger.log("ongoing item config exists.")
else:
logger.log("ongoing item config does not exist.")
failed_item = None
if decryption_marker.get_current_command() == CommonVariables.DisableEncryption:
failed_item = disable_encryption_all_in_place(passphrase_file=None,
decryption_marker=decryption_marker,
disk_util=disk_util)
else:
raise Exception("command {0} not supported.".format(decryption_marker.get_current_command()))
if failed_item is not None:
hutil.do_exit(exit_code=CommonVariables.encryption_failed,
operation='Disable',
status=CommonVariables.extension_error_status,
code=CommonVariables.encryption_failed,
message='Decryption failed for {0}'.format(failed_item))
else:
encryption_config.clear_config()
logger.log("clearing the decryption mark after successful decryption")
decryption_marker.clear_config()
hutil.do_exit(exit_code=0,
operation='Disable',
status=CommonVariables.extension_success_status,
code=str(CommonVariables.success),
message='Decryption succeeded')
def daemon():
hutil.find_last_nonquery_operation = True
hutil.do_parse_context('Executing')
lock = ProcessLock(logger, encryption_environment.daemon_lock_file_path)
if not lock.try_lock():
logger.log("there's another daemon running, please wait it to exit.", level=CommonVariables.WarningLevel)
return
logger.log("daemon lock acquired sucessfully.")
logger.log("waiting for 2 minutes before continuing the daemon")
time.sleep(120)
logger.log("Installing pre-requisites")
DistroPatcher.install_extras()
# try decrypt, if decryption marker exists
decryption_marker = DecryptionMarkConfig(logger, encryption_environment)
if decryption_marker.config_file_exists():
try:
daemon_decrypt()
except Exception as e:
error_msg = ("Failed to disable the extension with error: {0}, stack trace: {1}".format(e, traceback.format_exc()))
logger.log(msg=error_msg,
level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.encryption_failed,
operation='Disable',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.encryption_failed),
message=error_msg)
finally:
lock.release_lock()
logger.log("returned to daemon")
logger.log("exiting daemon")
return
# try encrypt, in absence of decryption marker
try:
daemon_encrypt()
except Exception as e:
# mount the file systems back.
error_msg = ("Failed to enable the extension with error: {0}, stack trace: {1}".format(e, traceback.format_exc()))
logger.log(msg=error_msg,
level=CommonVariables.ErrorLevel)
hutil.do_exit(exit_code=CommonVariables.encryption_failed,
operation='Enable',
status=CommonVariables.extension_error_status,
code=str(CommonVariables.encryption_failed),
message=error_msg)
else:
encryption_marker = EncryptionMarkConfig(logger, encryption_environment)
# TODO not remove it, backed it up.
logger.log("returned to daemon successfully after encryption")
logger.log("clearing the encryption mark.")
encryption_marker.clear_config()
hutil.redo_current_status()
finally:
lock.release_lock()
logger.log("exiting daemon")
def start_daemon(operation):
# This process will start a new background process by calling
# extension_shim.sh -c handle.py -daemon
# to run the script and will exit itself immediatelly.
shim_path = os.path.join(os.getcwd(), CommonVariables.extension_shim_filename)
shim_opts = '-c ' + os.path.join(os.getcwd(), __file__) + ' -daemon'
args = [shim_path, shim_opts]
logger.log("start_daemon with args: {0}".format(args))
# Redirect stdout and stderr to /dev/null. Otherwise daemon process will
# throw broken pipe exception when parent process exit.
devnull = open(os.devnull, 'w')
subprocess.Popen(args, stdout=devnull, stderr=devnull)
encryption_config = EncryptionConfig(encryption_environment, logger)
if encryption_config.config_file_exists():
hutil.do_exit(exit_code=0,
operation=operation,
status=CommonVariables.extension_success_status,
code=str(CommonVariables.success),
message=encryption_config.get_secret_id())
else:
hutil.do_exit(exit_code=CommonVariables.encryption_failed,
operation=operation,
status=CommonVariables.extension_error_status,
code=str(CommonVariables.encryption_failed),
message='Encryption config not found.')
if __name__ == '__main__':
main()
|
"""Kazoo Exceptions"""
from collections import defaultdict
class KazooException(Exception):
"""Base Kazoo exception that all other kazoo library exceptions
inherit from"""
class ZookeeperError(KazooException):
"""Base Zookeeper exception for errors originating from the
Zookeeper server"""
class CancelledError(KazooException):
"""Raised when a process is cancelled by another thread"""
class ConfigurationError(KazooException):
"""Raised if the configuration arguments to an object are
invalid"""
class ZookeeperStoppedError(KazooException):
"""Raised when the kazoo client stopped (and thus not connected)"""
class ConnectionDropped(KazooException):
"""Internal error for jumping out of loops"""
class LockTimeout(KazooException):
"""Raised if failed to acquire a lock.
.. versionadded:: 1.1
"""
class WriterNotClosedException(KazooException):
"""Raised if the writer is unable to stop closing when requested.
.. versionadded:: 1.2
"""
def _invalid_error_code():
raise RuntimeError('Invalid error code')
EXCEPTIONS = defaultdict(_invalid_error_code)
def _zookeeper_exception(code):
def decorator(klass):
def create(*args, **kwargs):
return klass(args, kwargs)
EXCEPTIONS[code] = create
klass.code = code
return klass
return decorator
@_zookeeper_exception(0)
class RolledBackError(ZookeeperError):
pass
@_zookeeper_exception(-1)
class SystemZookeeperError(ZookeeperError):
pass
@_zookeeper_exception(-2)
class RuntimeInconsistency(ZookeeperError):
pass
@_zookeeper_exception(-3)
class DataInconsistency(ZookeeperError):
pass
@_zookeeper_exception(-4)
class ConnectionLoss(ZookeeperError):
pass
@_zookeeper_exception(-5)
class MarshallingError(ZookeeperError):
pass
@_zookeeper_exception(-6)
class UnimplementedError(ZookeeperError):
pass
@_zookeeper_exception(-7)
class OperationTimeoutError(ZookeeperError):
pass
@_zookeeper_exception(-8)
class BadArgumentsError(ZookeeperError):
pass
@_zookeeper_exception(-100)
class APIError(ZookeeperError):
pass
@_zookeeper_exception(-101)
class NoNodeError(ZookeeperError):
pass
@_zookeeper_exception(-102)
class NoAuthError(ZookeeperError):
pass
@_zookeeper_exception(-103)
class BadVersionError(ZookeeperError):
pass
@_zookeeper_exception(-108)
class NoChildrenForEphemeralsError(ZookeeperError):
pass
@_zookeeper_exception(-110)
class NodeExistsError(ZookeeperError):
pass
@_zookeeper_exception(-111)
class NotEmptyError(ZookeeperError):
pass
@_zookeeper_exception(-112)
class SessionExpiredError(ZookeeperError):
pass
@_zookeeper_exception(-113)
class InvalidCallbackError(ZookeeperError):
pass
@_zookeeper_exception(-114)
class InvalidACLError(ZookeeperError):
pass
@_zookeeper_exception(-115)
class AuthFailedError(ZookeeperError):
pass
@_zookeeper_exception(-118)
class SessionMovedError(ZookeeperError):
pass
@_zookeeper_exception(-119)
class NotReadOnlyCallError(ZookeeperError):
"""An API call that is not read-only was used while connected to
a read-only server"""
class ConnectionClosedError(SessionExpiredError):
"""Connection is closed"""
ConnectionLossException = ConnectionLoss
MarshallingErrorException = MarshallingError
SystemErrorException = SystemZookeeperError
RuntimeInconsistencyException = RuntimeInconsistency
DataInconsistencyException = DataInconsistency
UnimplementedException = UnimplementedError
OperationTimeoutException = OperationTimeoutError
BadArgumentsException = BadArgumentsError
ApiErrorException = APIError
NoNodeException = NoNodeError
NoAuthException = NoAuthError
BadVersionException = BadVersionError
NoChildrenForEphemeralsException = NoChildrenForEphemeralsError
NodeExistsException = NodeExistsError
InvalidACLException = InvalidACLError
AuthFailedException = AuthFailedError
NotEmptyException = NotEmptyError
SessionExpiredException = SessionExpiredError
InvalidCallbackException = InvalidCallbackError
|
import sqlalchemy as sa
from quantum.db import model_base
class NetworkState(model_base.BASEV2):
"""Represents state of vlan_id on physical network"""
__tablename__ = 'network_states'
physical_network = sa.Column(sa.String(64), nullable=False,
primary_key=True)
vlan_id = sa.Column(sa.Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = sa.Column(sa.Boolean, nullable=False)
def __init__(self, physical_network, vlan_id):
self.physical_network = physical_network
self.vlan_id = vlan_id
self.allocated = False
def __repr__(self):
return "<NetworkState(%s,%d,%s)>" % (self.physical_network,
self.vlan_id, self.allocated)
class NetworkBinding(model_base.BASEV2):
"""Represents binding of virtual network to physical_network and vlan_id"""
__tablename__ = 'network_bindings'
network_id = sa.Column(sa.String(36),
sa.ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
physical_network = sa.Column(sa.String(64))
vlan_id = sa.Column(sa.Integer, nullable=False)
def __init__(self, network_id, physical_network, vlan_id):
self.network_id = network_id
self.physical_network = physical_network
self.vlan_id = vlan_id
def __repr__(self):
return "<NetworkBinding(%s,%s,%d)>" % (self.network_id,
self.physical_network,
self.vlan_id)
|
"""Train a simple TF classifier for MNIST dataset.
This example comes from the cloudml-samples keras demo.
github.com/GoogleCloudPlatform/cloudml-samples/blob/master/census/tf-keras
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from six.moves import urllib
import tempfile
import numpy as np
import pandas as pd
import tensorflow.compat.v1 as tf
DATA_DIR = os.path.join(tempfile.gettempdir(), "taxi_data")
DATA_URL = ("https://storage.googleapis.com/cloud-samples-data/ml-engine/chicago_taxi/training/small/")
TRAINING_FILE = "taxi_trips_train.csv"
EVAL_FILE = "taxi_trips_eval.csv"
TRAINING_URL = os.path.join(DATA_URL, TRAINING_FILE)
EVAL_URL = os.path.join(DATA_URL, EVAL_FILE)
_CSV_COLUMNS = [
"tip", "trip_miles", "trip_seconds", "fare", "trip_start_month",
"trip_start_hour", "trip_start_day", "pickup_community_area", "dropoff_community_area",
"pickup_census_tract", "dropoff_census_tract", "pickup_latitude", "pickup_longitude",
"dropoff_latitude", "dropoff_longitude", "payment_type", "company",
]
_LABEL_COLUMN = "tip"
_CATEGORICAL_TYPES = {
"payment_type": pd.api.types.CategoricalDtype(categories=[
'No Charge', 'Credit Card', 'Cash', 'Unknown', 'Dispute'
]),
"company": pd.api.types.CategoricalDtype(categories=[
'Northwest Management LLC', 'Blue Ribbon Taxi Association Inc.',
'Taxi Affiliation Services', 'Dispatch Taxi Affiliation',
'Top Cab Affiliation', 'Choice Taxi Association', '5129 - 87128',
'KOAM Taxi Association', 'Chicago Medallion Leasing INC',
'Chicago Medallion Management', '3201 - C&D Cab Co Inc',
'1247 - 72807 Daniel Ayertey', '5776 - Mekonen Cab Company',
'2092 - 61288 Sbeih company', '0694 - 59280 Chinesco Trans Inc',
'4197 - Royal Star', 'C & D Cab Co Inc', '3591 - 63480 Chuks Cab',
'4053 - Adwar H. Nikola', '3141 - Zip Cab',
'6742 - 83735 Tasha ride inc', '0118 - 42111 Godfrey S.Awir',
'3385 - Eman Cab', '4053 - 40193 Adwar H. Nikola',
'3152 - 97284 Crystal Abernathy', '2823 - 73307 Seung Lee',
'6574 - Babylon Express Inc.', '5724 - 75306 KYVI Cab Inc',
'5074 - 54002 Ahzmi Inc', '2733 - 74600 Benny Jona',
'3253 - 91138 Gaither Cab Co.', '3152 - Crystal Abernathy',
'5437 - Great American Cab Co', '1085 - N and W Cab Co',
'6488 - 83287 Zuha Taxi', '2192 - 73487 Zeymane Corp',
'0118 - Godfrey S.Awir', '4197 - 41842 Royal Star',
'3319 - C&D Cab Company', '4787 - Reny Cab Co',
'1085 - 72312 N and W Cab Co', "3591- 63480 Chuk's Cab",
'6743 - 78771 Luhak Corp', '3623-Arrington Enterprises',
'3623 - 72222 Arrington Enterprises', '3141 - 87803 Zip Cab',
'5074 - Ahzmi Inc', '3897 - Ilie Malec', '2092 - Sbeih company',
'6057 - 24657 Richard Addo', '5006 - 39261 Salifu Bawa',
'3620 - David K. Cab Corp.', '3556 - 36214 RC Andrews Cab',
'2733 - Benny Jona', '4615 - 83503 Tyrone Henderson',
'5129 - 98755 Mengisti Taxi', '5724 - 72965 KYVI Cab Inc',
'585 - 88805 Valley Cab Co', '5997 - 65283 AW Services Inc.',
'2809 - 95474 C & D Cab Co Inc.', '6743 - Luhak Corp',
'5874 - 73628 Sergey Cab Corp.', '3897 - 57856 Ilie Malec',
'3319 - CD Cab Co', '6747 - Mueen Abdalla']),
}
def _download_and_clean_file(filename, url):
"""Downloads data from url, and makes changes to match the CSV format.
The CSVs may use spaces after the comma delimters (non-standard) or include
rows which do not represent well-formed examples. This function strips out
some of these problems.
Args:
filename: filename to save url to
url: URL of resource to download
"""
temp_file, _ = urllib.request.urlretrieve(url)
with tf.io.gfile.GFile(temp_file, "r") as temp_file_object:
with tf.io.gfile.GFile(filename, "w") as file_object:
for line in temp_file_object:
line = line.strip()
line = line.replace(", ", ",")
if not line or "," not in line:
continue
if line[-1] == ".":
line = line[:-1]
line += "\n"
file_object.write(line)
tf.io.gfile.remove(temp_file)
def download(data_dir):
"""Downloads census data if it is not already present.
Args:
data_dir: directory where we will access/save the census data
Returns:
foo
"""
tf.io.gfile.makedirs(data_dir)
training_file_path = os.path.join(data_dir, TRAINING_FILE)
if not tf.io.gfile.exists(training_file_path):
_download_and_clean_file(training_file_path, TRAINING_URL)
eval_file_path = os.path.join(data_dir, EVAL_FILE)
if not tf.io.gfile.exists(eval_file_path):
_download_and_clean_file(eval_file_path, EVAL_URL)
return training_file_path, eval_file_path
def upload(train_df, eval_df, train_path, eval_path):
train_df.to_csv(os.path.join(os.path.dirname(train_path), TRAINING_FILE),
index=False, header=False)
eval_df.to_csv(os.path.join(os.path.dirname(eval_path), EVAL_FILE),
index=False, header=False)
def preprocess(dataframe):
"""Converts categorical features to numeric. Removes unused columns.
Args:
dataframe: Pandas dataframe with raw data
Returns:
Dataframe with preprocessed data
"""
# Convert integer valued (numeric) columns to floating point
numeric_columns = dataframe.select_dtypes(["int64"]).columns
dataframe[numeric_columns] = dataframe[numeric_columns].astype("float32")
# Convert categorical columns to numeric
cat_columns = dataframe.select_dtypes(["object"]).columns
dataframe[cat_columns] = dataframe[cat_columns].apply(
lambda x: x.astype(_CATEGORICAL_TYPES[x.name]))
dataframe[cat_columns] = dataframe[cat_columns].apply(
lambda x: x.cat.codes)
return dataframe
def standardize(dataframe):
"""Scales numerical columns using their means and standard deviation.
Args:
dataframe: Pandas dataframe
Returns:
Input dataframe with the numerical columns scaled to z-scores
"""
dtypes = list(zip(dataframe.dtypes.index, map(str, dataframe.dtypes)))
for column, dtype in dtypes:
if dtype == "float32":
dataframe[column] -= dataframe[column].mean()
dataframe[column] /= dataframe[column].std()
return dataframe
def load_data(train_path="", eval_path=""):
"""Loads data into preprocessed (train_x, train_y, eval_y, eval_y) dataframes.
Args:
train_path: Local or GCS path to uploaded train data to.
eval_path: Local or GCS path to uploaded eval data to.
Returns:
A tuple (train_x, train_y, eval_x, eval_y), where train_x and eval_x are
Pandas dataframes with features for training and train_y and eval_y are
numpy arrays with the corresponding labels.
"""
# Download Census dataset: Training and eval csv files.
training_file_path, eval_file_path = download(DATA_DIR)
train_df = pd.read_csv(training_file_path)
eval_df = pd.read_csv(eval_file_path)
train_df = preprocess(train_df)
eval_df = preprocess(eval_df)
# Split train and eval data with labels. The pop method copies and removes
# the label column from the dataframe.
train_x, train_y = train_df, train_df.pop(_LABEL_COLUMN)
eval_x, eval_y = eval_df, eval_df.pop(_LABEL_COLUMN)
# Join train_x and eval_x to normalize on overall means and standard
# deviations. Then separate them again.
all_x = pd.concat([train_x, eval_x], keys=["train", "eval"])
all_x = standardize(all_x)
train_x, eval_x = all_x.xs("train"), all_x.xs("eval")
# Rejoin features and labels and upload to GCS.
if train_path and eval_path:
train_df = train_x.copy()
train_df[_LABEL_COLUMN] = train_y
eval_df = eval_x.copy()
eval_df[_LABEL_COLUMN] = eval_y
upload(train_df, eval_df, train_path, eval_path)
# Reshape label columns for use with tf.data.Dataset
train_y = np.asarray(train_y).astype("float32").reshape((-1, 1))
eval_y = np.asarray(eval_y).astype("float32").reshape((-1, 1))
return train_x, train_y, eval_x, eval_y
|
"""Experimental library that exposes XLA operations directly in TensorFlow.
It is sometimes useful to be able to build HLO programs directly from
TensorFlow. This file provides Tensorflow operators that mirror the semantics of
HLO operators as closely as possible.
Note: There is no promise of backward or forward compatibility for operators
defined in this module. This is primarily because the underlying HLO operators
do not promise backward or forward compatibility.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.compiler.tf2xla.ops import gen_xla_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import bitwise_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_random_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
_max = max
_min = min
_slice = slice # pylint: disable=invalid-name
constant = constant_op.constant
def _unary_op(fn):
"""Wrapper that restricts `fn` to have the correct signature."""
def unary_op_wrapper(x, name=None):
return fn(x, name=name)
return unary_op_wrapper
abs = _unary_op(math_ops.abs)
conj = _unary_op(math_ops.conj)
cos = _unary_op(math_ops.cos)
ceil = _unary_op(math_ops.ceil)
digamma = _unary_op(math_ops.digamma)
erf = _unary_op(math_ops.erf)
erfc = _unary_op(math_ops.erfc)
erfinv = _unary_op(math_ops.erfinv)
ndtri = _unary_op(math_ops.ndtri)
exp = _unary_op(math_ops.exp)
expm1 = _unary_op(math_ops.expm1)
floor = _unary_op(math_ops.floor)
imag = _unary_op(math_ops.imag)
is_finite = _unary_op(math_ops.is_finite)
lgamma = _unary_op(math_ops.lgamma)
log = _unary_op(math_ops.log)
log1p = _unary_op(math_ops.log1p)
logical_not = _unary_op(math_ops.logical_not)
neg = _unary_op(math_ops.neg)
real = _unary_op(math_ops.real)
round = _unary_op(math_ops.round)
sin = _unary_op(math_ops.sin)
sign = _unary_op(math_ops.sign)
tanh = _unary_op(math_ops.tanh)
bessel_i0e = _unary_op(math_ops.bessel_i0e)
bessel_i1e = _unary_op(math_ops.bessel_i1e)
def _broadcasting_binary_op(fn):
"""Wraps a binary Tensorflow operator and performs XLA-style broadcasting."""
def broadcasting_binary_op_wrapper(x, y, broadcast_dims=None, name=None):
"""Inner wrapper function."""
broadcast_dims = broadcast_dims or []
broadcast_dims = ops.convert_to_tensor(broadcast_dims, dtypes.int64)
# Rather than relying on having static shape information in the TensorFlow
# graph, we use an XlaBroadcastHelper op that can compute the correct shapes
# at JIT compilation time.
x, y = gen_xla_ops.xla_broadcast_helper(x, y, broadcast_dims)
return fn(x, y, name=name)
return broadcasting_binary_op_wrapper
_SIGNED_TO_UNSIGNED_TABLE = {
dtypes.int8: dtypes.uint8,
dtypes.int16: dtypes.uint16,
dtypes.int32: dtypes.uint32,
dtypes.int64: dtypes.uint64,
}
_UNSIGNED_TO_SIGNED_TABLE = {
dtypes.uint8: dtypes.int8,
dtypes.uint16: dtypes.int16,
dtypes.uint32: dtypes.int32,
dtypes.uint64: dtypes.int64,
}
def _shift_right_logical_helper(x, y, name=None):
"""Performs an integer right logical shift irrespective of input type."""
assert y.dtype == x.dtype
dtype = x.dtype
signed = dtype in _SIGNED_TO_UNSIGNED_TABLE
if signed:
unsigned_dtype = _SIGNED_TO_UNSIGNED_TABLE[dtype]
x = math_ops.cast(x, unsigned_dtype)
y = math_ops.cast(y, unsigned_dtype)
output = bitwise_ops.right_shift(x, y, name=name)
if signed:
output = math_ops.cast(output, dtype)
return output
def _shift_right_arithmetic_helper(x, y, name=None):
"""Performs an integer right arithmetic shift irrespective of input type."""
assert y.dtype == x.dtype
dtype = x.dtype
unsigned = dtype in _UNSIGNED_TO_SIGNED_TABLE
if unsigned:
signed_dtype = _UNSIGNED_TO_SIGNED_TABLE[dtype]
x = math_ops.cast(x, signed_dtype)
y = math_ops.cast(y, signed_dtype)
output = bitwise_ops.right_shift(x, y, name=name)
if unsigned:
output = math_ops.cast(output, dtype)
return output
add = _broadcasting_binary_op(math_ops.add)
sub = _broadcasting_binary_op(math_ops.sub)
mul = _broadcasting_binary_op(math_ops.mul)
div = _broadcasting_binary_op(math_ops.div)
rem = _broadcasting_binary_op(gen_math_ops.mod)
max = _broadcasting_binary_op(math_ops.maximum)
min = _broadcasting_binary_op(math_ops.minimum)
atan2 = _broadcasting_binary_op(math_ops.atan2)
complex = _broadcasting_binary_op(math_ops.complex)
logical_and = _broadcasting_binary_op(math_ops.logical_and)
logical_or = _broadcasting_binary_op(math_ops.logical_or)
logical_xor = _broadcasting_binary_op(math_ops.logical_xor)
eq = _broadcasting_binary_op(math_ops.equal)
ne = _broadcasting_binary_op(math_ops.not_equal)
ge = _broadcasting_binary_op(math_ops.greater_equal)
gt = _broadcasting_binary_op(math_ops.greater)
le = _broadcasting_binary_op(math_ops.less_equal)
lt = _broadcasting_binary_op(math_ops.less)
pow = _broadcasting_binary_op(math_ops.pow)
shift_left = _broadcasting_binary_op(bitwise_ops.left_shift)
shift_right_logical = _broadcasting_binary_op(_shift_right_logical_helper)
shift_right_arithmetic = _broadcasting_binary_op(_shift_right_arithmetic_helper)
igamma = _broadcasting_binary_op(math_ops.igamma)
igamma_grad_a = _broadcasting_binary_op(gen_math_ops.igamma_grad_a)
random_gamma_grad = _broadcasting_binary_op(gen_random_ops.random_gamma_grad)
igammac = _broadcasting_binary_op(math_ops.igammac)
def _binary_op(fn):
"""Wrapper that restricts `fn` to have the correct signature."""
def binary_op_wrapper(x, y, name=None):
return fn(x, y, name=name)
return binary_op_wrapper
transpose = _binary_op(array_ops.transpose)
rev = _binary_op(array_ops.reverse)
bitcast_convert_type = array_ops.bitcast
def broadcast(x, dims, name=None):
x = ops.convert_to_tensor(x)
shape = array_ops.concat([constant_op.constant(dims),
array_ops.shape(x)],
axis=0)
return array_ops.broadcast_to(x, shape, name=name)
def clamp(a, x, b, name=None):
return min(max(a, x, name=name), b, name=name)
concatenate = array_ops.concat
def conv(lhs,
rhs,
window_strides,
padding,
lhs_dilation,
rhs_dilation,
dimension_numbers,
feature_group_count=1,
precision_config=None,
name=None):
"""Wraps the XLA ConvGeneralDilated operator.
ConvGeneralDilated is the most general form of XLA convolution and is
documented at
https://www.tensorflow.org/performance/xla/operation_semantics#conv_convolution
Args:
lhs: the input tensor
rhs: the kernel tensor
window_strides: the inter-window strides
padding: the padding to apply at the start and end of each input dimensions
lhs_dilation: dilation to apply between input elements
rhs_dilation: dilation to apply between kernel elements
dimension_numbers: a `ConvolutionDimensionNumbers` proto.
feature_group_count: number of feature groups for grouped convolution.
precision_config: a `xla.PrecisionConfig` proto.
name: an optional name for the operator
Returns:
A tensor representing the output of the convolution.
"""
precision_config_proto = ""
if precision_config:
precision_config_proto = precision_config.SerializeToString()
return gen_xla_ops.xla_conv(
lhs,
rhs,
window_strides=window_strides,
padding=padding,
lhs_dilation=lhs_dilation,
rhs_dilation=rhs_dilation,
feature_group_count=feature_group_count,
dimension_numbers=dimension_numbers.SerializeToString(),
precision_config=precision_config_proto,
name=name)
convert_element_type = math_ops.cast
def dot(lhs, rhs, name=None):
return math_ops.tensordot(lhs, rhs, axes=1, name=name)
def dot_general(lhs, rhs, dimension_numbers, precision_config=None, name=None):
precision_config_proto = ""
if precision_config:
precision_config_proto = precision_config.SerializeToString()
return gen_xla_ops.xla_dot(
lhs,
rhs,
dimension_numbers=dimension_numbers.SerializeToString(),
precision_config=precision_config_proto,
name=name)
def self_adjoint_eig(a, lower, max_iter, epsilon):
return gen_xla_ops.xla_self_adjoint_eig(a, lower, max_iter, epsilon)
def svd(a, max_iter, epsilon, precision_config=None):
precision_config_proto = ""
if precision_config:
precision_config_proto = precision_config.SerializeToString()
return gen_xla_ops.xla_svd(a, max_iter, epsilon, precision_config_proto)
dynamic_slice = gen_xla_ops.xla_dynamic_slice
dynamic_update_slice = gen_xla_ops.xla_dynamic_update_slice
einsum = gen_xla_ops.xla_einsum
pad = gen_xla_ops.xla_pad
def random_normal(mu, sigma, dims, name=None):
mu = ops.convert_to_tensor(mu)
return random_ops.random_normal(
dims, mean=mu, stddev=sigma, dtype=mu.dtype, name=name)
def random_uniform(minval, maxval, dims, name=None):
minval = ops.convert_to_tensor(minval)
return random_ops.random_uniform(
dims, minval, maxval, dtype=minval.dtype, name=name)
recv = gen_xla_ops.xla_recv
reduce = gen_xla_ops.xla_reduce
def reduce_window(operand,
init,
reducer,
window_dimensions,
window_strides=None,
base_dilations=None,
window_dilations=None,
padding=None,
name=None):
"""Wraps the XLA ReduceWindow operator.
ReduceWindow is documented at
https://www.tensorflow.org/performance/xla/operation_semantics#reducewindow .
Args:
operand: the input tensor
init: a scalar tensor representing the initial value for the reduction
reducer: a reduction function that combines a pair of scalars.
window_dimensions: shape of the window, as a list of integers
window_strides: inter-window strides, as a list of integers. Optional; if
omitted, defaults to strides of 1.
padding: padding to apply to 'operand'. List of (low, high) pairs of
integers that specify the padding to apply before and after each
dimension. Optional; if omitted, defaults to no padding.
name: the operator name, or None.
Returns:
A tensor that represents the output of the reduce_window operator.
"""
window_strides = window_strides or [1] * len(window_dimensions)
base_dilations = base_dilations or [1] * len(window_dimensions)
window_dilations = window_dilations or [1] * len(window_dimensions)
padding = padding or [(0, 0)] * len(window_dimensions)
return gen_xla_ops.xla_reduce_window(
input=operand,
init_value=init,
window_dimensions=window_dimensions,
window_strides=window_strides,
base_dilations=base_dilations,
window_dilations=window_dilations,
padding=padding,
computation=reducer,
name=name)
replica_id = gen_xla_ops.xla_replica_id
def reshape(x, new_sizes, dimensions=None, name=None):
if dimensions is not None:
x = array_ops.transpose(x, dimensions)
x = array_ops.reshape(x, new_sizes, name=name)
return x
def select(condition, x, y, name=None):
return array_ops.where(condition, x, y, name)
select_and_scatter = gen_xla_ops.xla_select_and_scatter
send = gen_xla_ops.xla_send
def slice(x, start_dims, limit_dims, strides):
spec = [
_slice(start, limit, stride)
for (start, limit, stride) in zip(start_dims, limit_dims, strides)
]
return x[tuple(spec)]
sharding = gen_xla_ops.xla_sharding
@ops.RegisterGradient("XlaSharding")
def _sharding_grad(op, grad):
del op # Unused
return [grad]
sort = gen_xla_ops.xla_sort
key_value_sort = gen_xla_ops.xla_key_value_sort
while_loop = gen_xla_ops.xla_while
dequantize = gen_xla_ops.xla_dequantize
def gather(operand, start_indices, dimension_numbers, slice_sizes,
indices_are_sorted=False, name=None):
return gen_xla_ops.xla_gather(
operand,
start_indices,
slice_sizes=slice_sizes,
dimension_numbers=dimension_numbers.SerializeToString(),
indices_are_sorted=indices_are_sorted,
name=name)
def scatter(operand, scatter_indices, updates, update_computation,
dimension_numbers, indices_are_sorted=False, name=None):
return gen_xla_ops.xla_scatter(
operand,
scatter_indices,
updates,
update_computation=update_computation,
dimension_numbers=dimension_numbers.SerializeToString(),
indices_are_sorted=indices_are_sorted,
name=name)
|
"""
Unit test for lvm - Linux Volume Manager
"""
import os
import shutil
import tempfile
import unittest
import mock
import treadmill
from treadmill import lvm
class LVMTest(unittest.TestCase):
"""Tests for teadmill.fs."""
def setUp(self):
self.root = tempfile.mkdtemp()
def tearDown(self):
if self.root and os.path.isdir(self.root):
shutil.rmtree(self.root)
@mock.patch('treadmill.subproc.check_call', mock.Mock())
def test_pvcreate(self):
"""Test LVM Physical Volume creation"""
lvm.pvcreate('some_blockdev')
treadmill.subproc.check_call.assert_called_with(
[
'lvm', 'pvcreate',
'--force',
'--yes',
'some_blockdev',
]
)
@mock.patch('treadmill.subproc.check_call', mock.Mock())
def test_vgcreate(self):
"""Test LVM Volume Group creation"""
lvm.vgcreate('some_group', 'some_blockdev')
treadmill.subproc.check_call.assert_called_with(
[
'lvm', 'vgcreate',
'--autobackup', 'n',
'some_group',
'some_blockdev',
]
)
@mock.patch('treadmill.subproc.check_call', mock.Mock())
def test_vgremove(self):
"""Test LVM Volume Group deletion"""
lvm.vgremove('some_group')
treadmill.subproc.check_call.assert_called_with(
[
'lvm', 'vgremove',
'--force',
'some_group',
]
)
@mock.patch('treadmill.subproc.check_call', mock.Mock())
def test_vgactivate(self):
"""Test LVM Volume Group activation"""
lvm.vgactivate('some_group')
treadmill.subproc.check_call.assert_called_with(
[
'lvm', 'vgchange',
'--activate', 'y',
'some_group',
]
)
@mock.patch('treadmill.subproc.check_output', mock.Mock())
def test_vgdisplay(self):
"""Test display of LVM group information.
"""
treadmill.subproc.check_output.return_value = (
' test:r/w:772:-1:0:0:0:-1:0:1:1:98304:4096:'
'24:0:24:Vsj4xA-45Ad-v4Rp-VOOf-XzEf-Gxwr-erL7Zu\n'
)
vg = lvm.vgdisplay('test')
treadmill.subproc.check_output.assert_called_with(
[
'lvm',
'vgdisplay',
'--colon',
'test',
]
)
self.assertEqual(
vg,
{
'access': 'r/w',
'extent_alloc': 0,
'extent_free': 24,
'extent_nb': 24,
'extent_size': 4096,
'lv_cur': 0,
'lv_max': 0,
'lv_open_count': 0,
'max_size': -1,
'name': 'test',
'number': -1,
'pv_actual': 1,
'pv_cur': 1,
'pv_max': 0,
'size': 98304,
'status': '772',
'uuid': 'Vsj4xA-45Ad-v4Rp-VOOf-XzEf-Gxwr-erL7Zu',
},
)
@mock.patch('treadmill.subproc.check_output', mock.Mock())
def test_vgsdisplay(self):
"""Test display of list of LVM groups informations.
"""
treadmill.subproc.check_output.return_value = (
' test:r/w:772:-1:0:0:0:-1:0:1:1:98304:4096:'
'24:0:24:Vsj4xA-45Ad-v4Rp-VOOf-XzEf-Gxwr-erL7Zu\n'
' treadmill:r/w:772:-1:0:5:5:-1:0:1:1:35467264:4096:'
'8659:1711:6948:MXvxzQ-gnXF-BXia-1pVo-KOH1-aJ4m-pIfnY8\n'
)
vgs = lvm.vgsdisplay()
treadmill.subproc.check_output.assert_called_with(
[
'lvm',
'vgdisplay',
'--colon',
]
)
self.assertEqual(
vgs,
[
{
'access': 'r/w',
'extent_alloc': 0,
'extent_free': 24,
'extent_nb': 24,
'extent_size': 4096,
'lv_cur': 0,
'lv_max': 0,
'lv_open_count': 0,
'max_size': -1,
'name': 'test',
'number': -1,
'pv_actual': 1,
'pv_cur': 1,
'pv_max': 0,
'size': 98304,
'status': '772',
'uuid': 'Vsj4xA-45Ad-v4Rp-VOOf-XzEf-Gxwr-erL7Zu',
},
{
'access': 'r/w',
'extent_alloc': 1711,
'extent_free': 6948,
'extent_nb': 8659,
'extent_size': 4096,
'lv_cur': 5,
'lv_max': 0,
'lv_open_count': 5,
'max_size': -1,
'name': 'treadmill',
'number': -1,
'pv_actual': 1,
'pv_cur': 1,
'pv_max': 0,
'size': 35467264,
'status': '772',
'uuid': 'MXvxzQ-gnXF-BXia-1pVo-KOH1-aJ4m-pIfnY8',
},
]
)
@mock.patch('treadmill.subproc.check_call', mock.Mock())
def test_lvcreate(self):
"""Test LVM Logical Volume creation.
"""
lvm.lvcreate('some_volume', '1024', 'some_group')
treadmill.subproc.check_call.assert_called_with(
[
'lvm', 'lvcreate',
'--autobackup', 'n',
'--wipesignatures', 'y',
'--size', '1024B',
'--name', 'some_volume',
'some_group',
]
)
@mock.patch('treadmill.subproc.check_call', mock.Mock())
def test_lvremove(self):
"""Test LVM Logical Volume deletion.
"""
lvm.lvremove('some_volume', 'some_group')
treadmill.subproc.check_call.assert_called_with(
[
'lvm', 'lvremove',
'--autobackup', 'n',
'--force',
'some_group/some_volume',
]
)
@mock.patch('treadmill.subproc.check_output', mock.Mock())
def test_lvdisplay(self):
"""Test display of LVM volume information.
"""
treadmill.subproc.check_output.return_value = (
' /dev/test/test-lv:test:3:1:-1:0:24576:'
'3:-1:0:-1:253:5\n'
)
lv = lvm.lvdisplay('test-lv', 'test')
treadmill.subproc.check_output.assert_called_with(
[
'lvm',
'lvdisplay',
'--colon',
'test/test-lv',
]
)
self.assertEqual(
lv,
{
'block_dev': '/dev/test/test-lv',
'dev_major': 253,
'dev_minor': 5,
'extent_alloc': -1,
'extent_size': 3,
'group': 'test',
'name': 'test-lv',
'open_count': 0,
},
)
@mock.patch('treadmill.subproc.check_output', mock.Mock())
def test_lvsdisplay(self):
"""Test display of list of LVM volumes informations.
"""
treadmill.subproc.check_output.return_value = (
' /dev/test/test-lv:test:3:1:-1:0:24576:'
'3:-1:0:-1:253:5\n'
' /dev/treadmill/oRHxZN5QldMdz:treadmill:3:1:-1:1:10485760:'
'1280:-1:0:-1:253:0\n'
' /dev/treadmill/ESE0g3hyf7nxv:treadmill:3:1:-1:1:2097152:'
'256:-1:0:-1:253:1\n'
' /dev/treadmill/p8my37oRJGcd5:treadmill:3:1:-1:1:204800:'
'25:-1:0:-1:253:2\n'
' /dev/treadmill/njZhRefmf6jQp:treadmill:3:1:-1:1:1024000:'
'125:-1:0:-1:253:3\n'
' /dev/treadmill/yRImNK9cnix2T:treadmill:3:1:-1:1:204800:'
'25:-1:0:-1:253:4\n'
)
lvs = lvm.lvsdisplay()
treadmill.subproc.check_output.assert_called_with(
[
'lvm',
'lvdisplay',
'--colon',
]
)
self.assertEqual(
lvs,
[
{
'block_dev': '/dev/test/test-lv',
'dev_major': 253,
'dev_minor': 5,
'extent_alloc': -1,
'extent_size': 3,
'group': 'test',
'name': 'test-lv',
'open_count': 0,
},
{
'block_dev': '/dev/treadmill/oRHxZN5QldMdz',
'dev_major': 253,
'dev_minor': 0,
'extent_alloc': -1,
'extent_size': 1280,
'group': 'treadmill',
'name': 'oRHxZN5QldMdz',
'open_count': 1,
},
{
'block_dev': '/dev/treadmill/ESE0g3hyf7nxv',
'dev_major': 253,
'dev_minor': 1,
'extent_alloc': -1,
'extent_size': 256,
'group': 'treadmill',
'name': 'ESE0g3hyf7nxv',
'open_count': 1,
},
{
'block_dev': '/dev/treadmill/p8my37oRJGcd5',
'dev_major': 253,
'dev_minor': 2,
'extent_alloc': -1,
'extent_size': 25,
'group': 'treadmill',
'name': 'p8my37oRJGcd5',
'open_count': 1,
},
{
'block_dev': '/dev/treadmill/njZhRefmf6jQp',
'dev_major': 253,
'dev_minor': 3,
'extent_alloc': -1,
'extent_size': 125,
'group': 'treadmill',
'name': 'njZhRefmf6jQp',
'open_count': 1,
},
{
'block_dev': '/dev/treadmill/yRImNK9cnix2T',
'dev_major': 253,
'dev_minor': 4,
'extent_alloc': -1,
'extent_size': 25,
'group': 'treadmill',
'name': 'yRImNK9cnix2T',
'open_count': 1,
},
]
)
if __name__ == '__main__':
unittest.main()
|
"""Interface implementation for cloud client."""
import asyncio
from pathlib import Path
from typing import Any, Dict
import aiohttp
from hass_nabucasa.client import CloudClient as Interface
from homeassistant.core import callback
from homeassistant.components.alexa import smart_home as alexa_sh
from homeassistant.components.google_assistant import (
helpers as ga_h, smart_home as ga)
from homeassistant.const import CLOUD_NEVER_EXPOSED_ENTITIES
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util.aiohttp import MockRequest
from . import utils
from .const import (
CONF_ENTITY_CONFIG, CONF_FILTER, DOMAIN, DISPATCHER_REMOTE_UPDATE)
from .prefs import CloudPreferences
class CloudClient(Interface):
"""Interface class for Home Assistant Cloud."""
def __init__(self, hass: HomeAssistantType, prefs: CloudPreferences,
websession: aiohttp.ClientSession,
alexa_config: Dict[str, Any], google_config: Dict[str, Any]):
"""Initialize client interface to Cloud."""
self._hass = hass
self._prefs = prefs
self._websession = websession
self._alexa_user_config = alexa_config
self._google_user_config = google_config
self._alexa_config = None
self._google_config = None
@property
def base_path(self) -> Path:
"""Return path to base dir."""
return Path(self._hass.config.config_dir)
@property
def prefs(self) -> CloudPreferences:
"""Return Cloud preferences."""
return self._prefs
@property
def loop(self) -> asyncio.BaseEventLoop:
"""Return client loop."""
return self._hass.loop
@property
def websession(self) -> aiohttp.ClientSession:
"""Return client session for aiohttp."""
return self._websession
@property
def aiohttp_runner(self) -> aiohttp.web.AppRunner:
"""Return client webinterface aiohttp application."""
return self._hass.http.runner
@property
def cloudhooks(self) -> Dict[str, Dict[str, str]]:
"""Return list of cloudhooks."""
return self._prefs.cloudhooks
@property
def remote_autostart(self) -> bool:
"""Return true if we want start a remote connection."""
return self._prefs.remote_enabled
@property
def alexa_config(self) -> alexa_sh.Config:
"""Return Alexa config."""
if not self._alexa_config:
alexa_conf = self._alexa_user_config
self._alexa_config = alexa_sh.Config(
endpoint=None,
async_get_access_token=None,
should_expose=alexa_conf[CONF_FILTER],
entity_config=alexa_conf.get(CONF_ENTITY_CONFIG),
)
return self._alexa_config
@property
def google_config(self) -> ga_h.Config:
"""Return Google config."""
if not self._google_config:
google_conf = self._google_user_config
def should_expose(entity):
"""If an entity should be exposed."""
if entity.entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
return False
return google_conf['filter'](entity.entity_id)
username = self._hass.data[DOMAIN].claims["cognito:username"]
self._google_config = ga_h.Config(
should_expose=should_expose,
secure_devices_pin=self._prefs.google_secure_devices_pin,
entity_config=google_conf.get(CONF_ENTITY_CONFIG),
agent_user_id=username,
)
# Set it to the latest.
self._google_config.secure_devices_pin = \
self._prefs.google_secure_devices_pin
return self._google_config
@property
def google_user_config(self) -> Dict[str, Any]:
"""Return google action user config."""
return self._google_user_config
async def cleanups(self) -> None:
"""Cleanup some stuff after logout."""
self._alexa_config = None
self._google_config = None
@callback
def user_message(self, identifier: str, title: str, message: str) -> None:
"""Create a message for user to UI."""
self._hass.components.persistent_notification.async_create(
message, title, identifier
)
@callback
def dispatcher_message(self, identifier: str, data: Any = None) -> None:
"""Match cloud notification to dispatcher."""
if identifier.startswith("remote_"):
async_dispatcher_send(self._hass, DISPATCHER_REMOTE_UPDATE, data)
async def async_alexa_message(
self, payload: Dict[Any, Any]) -> Dict[Any, Any]:
"""Process cloud alexa message to client."""
return await alexa_sh.async_handle_message(
self._hass, self.alexa_config, payload,
enabled=self._prefs.alexa_enabled
)
async def async_google_message(
self, payload: Dict[Any, Any]) -> Dict[Any, Any]:
"""Process cloud google message to client."""
if not self._prefs.google_enabled:
return ga.turned_off_response(payload)
return await ga.async_handle_message(
self._hass, self.google_config, self.prefs.cloud_user, payload
)
async def async_webhook_message(
self, payload: Dict[Any, Any]) -> Dict[Any, Any]:
"""Process cloud webhook message to client."""
cloudhook_id = payload['cloudhook_id']
found = None
for cloudhook in self._prefs.cloudhooks.values():
if cloudhook['cloudhook_id'] == cloudhook_id:
found = cloudhook
break
if found is None:
return {
'status': 200
}
request = MockRequest(
content=payload['body'].encode('utf-8'),
headers=payload['headers'],
method=payload['method'],
query_string=payload['query'],
)
response = await self._hass.components.webhook.async_handle_webhook(
found['webhook_id'], request)
response_dict = utils.aiohttp_serialize_response(response)
body = response_dict.get('body')
return {
'body': body,
'status': response_dict['status'],
'headers': {
'Content-Type': response.content_type
}
}
async def async_cloudhooks_update(
self, data: Dict[str, Dict[str, str]]) -> None:
"""Update local list of cloudhooks."""
await self._prefs.async_update(cloudhooks=data)
|
from nova.api.openstack import common
class ViewBuilder(common.ViewBuilder):
_collection_name = 'os-keypairs'
# TODO(takashin): After v2 and v2.1 is no longer supported,
# 'type' can always be included in the response.
_index_params = ('name', 'public_key', 'fingerprint')
_create_params = _index_params + ('user_id',)
_show_params = _create_params + ('created_at', 'deleted', 'deleted_at',
'id', 'updated_at')
_index_params_v2_2 = _index_params + ('type',)
_show_params_v2_2 = _show_params + ('type',)
def get_links(self, request, keypairs):
return self._get_collection_links(request, keypairs,
self._collection_name, 'name')
# TODO(oomichi): It is necessary to filter a response of keypair with
# _build_keypair() when v2.1+microversions for implementing consistent
# behaviors in this keypair resource.
@staticmethod
def _build_keypair(keypair, attrs):
body = {}
for attr in attrs:
body[attr] = keypair[attr]
return body
def create(self, keypair, private_key=False, key_type=False):
params = []
if private_key:
params.append('private_key')
# TODO(takashin): After v2 and v2.1 is no longer supported,
# 'type' can always be included in the response.
if key_type:
params.append('type')
params.extend(self._create_params)
return {'keypair': self._build_keypair(keypair, params)}
def index(self, req, key_pairs, key_type=False, links=False):
keypairs_list = [
{'keypair': self._build_keypair(
key_pair,
self._index_params_v2_2 if key_type else self._index_params)}
for key_pair in key_pairs]
keypairs_dict = {'keypairs': keypairs_list}
if links:
keypairs_links = self.get_links(req, key_pairs)
if keypairs_links:
keypairs_dict['keypairs_links'] = keypairs_links
return keypairs_dict
def show(self, keypair, key_type=False):
return {'keypair': self._build_keypair(
keypair, self._show_params_v2_2 if key_type
else self._show_params)}
|
"""Tests for Python ops defined in nn_grad.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import nn_grad
from tensorflow.python.ops import nn_ops
from tensorflow.python.platform import test
class Relu6OpTest(test.TestCase):
def testRelu6GradGrad(self):
inputs = constant_op.constant(
[[-2, -1, 1, 3], [5, 7, 8, 9]], dtype=dtypes.float32)
x_init_value = np.array([[-3.5, -1.5, 2, 4], [4.5, 7.5, 8.5, 11]])
r = nn_ops.relu6(inputs)
r_g = gradients_impl.gradients(r, inputs)[0]
with self.test_session():
error = gradient_checker.compute_gradient_error(
inputs,
inputs.get_shape().as_list(),
r_g,
r_g.get_shape().as_list(),
x_init_value=x_init_value)
self.assertLess(error, 1e-4)
if __name__ == "__main__":
test.main()
|
import os
from setuptools import find_packages
from setuptools import setup
base_dir = os.path.dirname(__file__)
setup(
name='elastalert',
version='0.0.55',
description='Runs custom filters on Elasticsearch and alerts on matches',
author='Quentin Long',
author_email='qlo@yelp.com',
setup_requires='setuptools',
license='Copyright 2014 Yelp',
entry_points={
'console_scripts': ['elastalert-create-index=elastalert.create_index:main',
'elastalert-test-rule=elastalert.test_rule:main',
'elastalert-rule-from-kibana=elastalert.rule_from_kibana:main']},
packages=find_packages(),
package_data={'elastalert': ['schema.yaml']},
install_requires=[
'argparse',
'elasticsearch',
'jira==0.32', # jira.exceptions is missing from later versions
'jsonschema',
'mock',
'python-dateutil',
'PyStaticConfiguration',
'pyyaml',
'simplejson',
]
)
|
"""Support for Dialogflow webhook."""
import logging
import voluptuous as vol
from aiohttp import web
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import intent, template, config_entry_flow
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['webhook']
DOMAIN = 'dialogflow'
SOURCE = "Home Assistant Dialogflow"
CONFIG_SCHEMA = vol.Schema({
DOMAIN: {}
}, extra=vol.ALLOW_EXTRA)
class DialogFlowError(HomeAssistantError):
"""Raised when a DialogFlow error happens."""
async def async_setup(hass, config):
"""Set up the Dialogflow component."""
return True
async def handle_webhook(hass, webhook_id, request):
"""Handle incoming webhook with Dialogflow requests."""
message = await request.json()
_LOGGER.debug("Received Dialogflow request: %s", message)
try:
response = await async_handle_message(hass, message)
return b'' if response is None else web.json_response(response)
except DialogFlowError as err:
_LOGGER.warning(str(err))
return web.json_response(dialogflow_error_response(message, str(err)))
except intent.UnknownIntent as err:
_LOGGER.warning(str(err))
return web.json_response(
dialogflow_error_response(
message,
"This intent is not yet configured within Home Assistant."
)
)
except intent.InvalidSlotInfo as err:
_LOGGER.warning(str(err))
return web.json_response(
dialogflow_error_response(
message,
"Invalid slot information received for this intent."
)
)
except intent.IntentError as err:
_LOGGER.warning(str(err))
return web.json_response(
dialogflow_error_response(message, "Error handling intent."))
async def async_setup_entry(hass, entry):
"""Configure based on config entry."""
hass.components.webhook.async_register(
DOMAIN, 'DialogFlow', entry.data[CONF_WEBHOOK_ID], handle_webhook)
return True
async def async_unload_entry(hass, entry):
"""Unload a config entry."""
hass.components.webhook.async_unregister(entry.data[CONF_WEBHOOK_ID])
return True
config_entry_flow.register_webhook_flow(
DOMAIN,
'Dialogflow Webhook',
{
'dialogflow_url': 'https://dialogflow.com/docs/fulfillment#webhook',
'docs_url': 'https://www.home-assistant.io/components/dialogflow/'
}
)
def dialogflow_error_response(message, error):
"""Return a response saying the error message."""
dialogflow_response = DialogflowResponse(message['result']['parameters'])
dialogflow_response.add_speech(error)
return dialogflow_response.as_dict()
async def async_handle_message(hass, message):
"""Handle a DialogFlow message."""
req = message.get('result')
action_incomplete = req['actionIncomplete']
if action_incomplete:
return None
action = req.get('action', '')
parameters = req.get('parameters').copy()
parameters["dialogflow_query"] = message
dialogflow_response = DialogflowResponse(parameters)
if action == "":
raise DialogFlowError(
"You have not defined an action in your Dialogflow intent.")
intent_response = await intent.async_handle(
hass, DOMAIN, action,
{key: {'value': value} for key, value
in parameters.items()})
if 'plain' in intent_response.speech:
dialogflow_response.add_speech(
intent_response.speech['plain']['speech'])
return dialogflow_response.as_dict()
class DialogflowResponse:
"""Help generating the response for Dialogflow."""
def __init__(self, parameters):
"""Initialize the Dialogflow response."""
self.speech = None
self.parameters = {}
# Parameter names replace '.' and '-' for '_'
for key, value in parameters.items():
underscored_key = key.replace('.', '_').replace('-', '_')
self.parameters[underscored_key] = value
def add_speech(self, text):
"""Add speech to the response."""
assert self.speech is None
if isinstance(text, template.Template):
text = text.async_render(self.parameters)
self.speech = text
def as_dict(self):
"""Return response in a Dialogflow valid dictionary."""
return {
'speech': self.speech,
'displayText': self.speech,
'source': SOURCE,
}
|
"""High level ssh library.
Usage examples:
Execute command and get output:
ssh = sshclient.SSH("root", "example.com", port=33)
status, stdout, stderr = ssh.execute("ps ax")
if status:
raise Exception("Command failed with non-zero status.")
print stdout.splitlines()
Execute command with huge output:
class PseudoFile(object):
def write(chunk):
if "error" in chunk:
email_admin(chunk)
ssh = sshclient.SSH("root", "example.com")
ssh.run("tail -f /var/log/syslog", stdout=PseudoFile(), timeout=False)
Execute local script on remote side:
ssh = sshclient.SSH("user", "example.com")
status, out, err = ssh.execute("/bin/sh -s arg1 arg2",
stdin=open("~/myscript.sh", "r"))
Upload file:
ssh = sshclient.SSH("user", "example.com")
ssh.run("cat > ~/upload/file.gz", stdin=open("/store/file.gz", "rb"))
Eventlet:
eventlet.monkey_patch(select=True, time=True)
or
eventlet.monkey_patch()
or
sshclient = eventlet.import_patched("opentstack.common.sshclient")
"""
import os
import select
import socket
import time
import paramiko
import six
from heat.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class SSHError(Exception):
pass
class SSHTimeout(SSHError):
pass
class SSH(object):
"""Represent ssh connection."""
def __init__(self, user, host, port=22, pkey=None,
key_filename=None, password=None):
"""Initialize SSH client.
:param user: ssh username
:param host: hostname or ip address of remote ssh server
:param port: remote ssh port
:param pkey: RSA or DSS private key string or file object
:param key_filename: private key filename
:param password: password
"""
self.user = user
self.host = host
self.port = port
self.pkey = self._get_pkey(pkey) if pkey else None
self.password = password
self.key_filename = key_filename
self._client = False
def _get_pkey(self, key):
if isinstance(key, six.string_types):
key = six.moves.StringIO(key)
errors = []
for key_class in (paramiko.rsakey.RSAKey, paramiko.dsskey.DSSKey):
try:
return key_class.from_private_key(key)
except paramiko.SSHException as e:
errors.append(e)
raise SSHError("Invalid pkey: %s" % errors)
def _get_client(self):
if self._client:
return self._client
try:
self._client = paramiko.SSHClient()
self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self._client.connect(self.host, username=self.user,
port=self.port, pkey=self.pkey,
key_filename=self.key_filename,
password=self.password, timeout=5)
return self._client
except Exception as e:
message = ("Exception %(exception_type)s was raised "
"during connect to %(user)s@%(host)s:%(port)s. "
"Exception value is: %(exception)r")
self._client = False
raise SSHError(message % {"exception": e,
"user": self.user,
"host": self.host,
"port": self.port,
"exception_type": type(e)})
def close(self):
if self._client:
self._client.close()
self._client = False
def run(self, cmd, stdin=None, stdout=None, stderr=None,
raise_on_error=True, timeout=3600):
"""Execute specified command on the server.
:param cmd: Command to be executed.
:param stdin: Open file or string to pass to stdin.
:param stdout: Open file to connect to stdout.
:param stderr: Open file to connect to stderr.
:param raise_on_error: If False then exit code will be return. If True
then exception will be raized if non-zero code.
:param timeout: Timeout in seconds for command execution.
Default 1 hour. No timeout if set to 0.
"""
client = self._get_client()
if isinstance(stdin, six.string_types):
stdin = six.moves.StringIO(stdin)
return self._run(client, cmd, stdin=stdin, stdout=stdout,
stderr=stderr, raise_on_error=raise_on_error,
timeout=timeout)
def _run(self, client, cmd, stdin=None, stdout=None, stderr=None,
raise_on_error=True, timeout=3600):
if isinstance(cmd, (list, tuple)):
cmd = " ".join(six.moves.shlex_quote(str(p)) for p in cmd)
transport = client.get_transport()
session = transport.open_session()
session.exec_command(cmd)
start_time = time.time()
data_to_send = ""
stderr_data = None
# If we have data to be sent to stdin then `select' should also
# check for stdin availability.
if stdin and not stdin.closed:
writes = [session]
else:
writes = []
while True:
# Block until data can be read/write.
r, w, e = select.select([session], writes, [session], 1)
if session.recv_ready():
data = session.recv(4096)
LOG.debug("stdout: %r" % data)
if stdout is not None:
stdout.write(data)
continue
if session.recv_stderr_ready():
stderr_data = session.recv_stderr(4096)
LOG.debug("stderr: %r" % stderr_data)
if stderr is not None:
stderr.write(stderr_data)
continue
if session.send_ready():
if stdin is not None and not stdin.closed:
if not data_to_send:
data_to_send = stdin.read(4096)
if not data_to_send:
stdin.close()
session.shutdown_write()
writes = []
continue
sent_bytes = session.send(data_to_send)
LOG.debug("sent: %s" % data_to_send[:sent_bytes])
data_to_send = data_to_send[sent_bytes:]
if session.exit_status_ready():
break
if timeout and (time.time() - timeout) > start_time:
args = {"cmd": cmd, "host": self.host}
raise SSHTimeout(("Timeout executing command "
"'%(cmd)s' on host %(host)s") % args)
if e:
raise SSHError("Socket error.")
exit_status = session.recv_exit_status()
if 0 != exit_status and raise_on_error:
fmt = ("Command '%(cmd)s' failed with exit_status %(status)d.")
details = fmt % {"cmd": cmd, "status": exit_status}
if stderr_data:
details += (" Last stderr data: '%s'.") % stderr_data
raise SSHError(details)
return exit_status
def execute(self, cmd, stdin=None, timeout=3600):
"""Execute the specified command on the server.
:param cmd: Command to be executed, can be a list.
:param stdin: Open file to be sent on process stdin.
:param timeout: Timeout for execution of the command.
:returns: tuple (exit_status, stdout, stderr)
"""
stdout = six.moves.StringIO()
stderr = six.moves.StringIO()
exit_status = self.run(cmd, stderr=stderr,
stdout=stdout, stdin=stdin,
timeout=timeout, raise_on_error=False)
stdout.seek(0)
stderr.seek(0)
return exit_status, stdout.read(), stderr.read()
def wait(self, timeout=120, interval=1):
"""Wait for the host will be available via ssh."""
start_time = time.time()
while True:
try:
return self.execute("uname")
except (socket.error, SSHError) as e:
LOG.debug("Ssh is still unavailable: %r" % e)
time.sleep(interval)
if time.time() > (start_time + timeout):
raise SSHTimeout(("Timeout waiting for '%s'") % self.host)
def _put_file_sftp(self, localpath, remotepath, mode=None):
client = self._get_client()
sftp = client.open_sftp()
sftp.put(localpath, remotepath)
if mode is None:
mode = 0o777 & os.stat(localpath).st_mode
sftp.chmod(remotepath, mode)
def _put_file_shell(self, localpath, remotepath, mode=None):
cmd = ["cat > %s" % remotepath]
if mode is not None:
cmd.append("chmod 0%o %s" % (mode, remotepath))
with open(localpath, "rb") as localfile:
cmd = "; ".join(cmd)
self.run(cmd, stdin=localfile)
def put_file(self, localpath, remotepath, mode=None):
"""Copy specified local file to the server.
:param localpath: Local filename.
:param remotepath: Remote filename.
:param mode: Permissions to set after upload
"""
try:
self._put_file_sftp(localpath, remotepath, mode=mode)
except paramiko.SSHException:
self._put_file_shell(localpath, remotepath, mode=mode)
|
"""Support for media browsing."""
from homeassistant.components.media_player import BrowseMedia
from homeassistant.components.media_player.const import (
MEDIA_CLASS_APP,
MEDIA_CLASS_DIRECTORY,
MEDIA_TYPE_APP,
MEDIA_TYPE_APPS,
)
def build_app_list(app_list):
"""Create response payload for app list."""
app_list = [
{"app_id": app_id, "title": app_name, "type": MEDIA_TYPE_APP}
for app_name, app_id in app_list.items()
]
return BrowseMedia(
media_class=MEDIA_CLASS_DIRECTORY,
media_content_id=None,
media_content_type=MEDIA_TYPE_APPS,
title="Apps",
can_play=True,
can_expand=False,
children=[item_payload(item) for item in app_list],
children_media_class=MEDIA_CLASS_APP,
)
def item_payload(item):
"""
Create response payload for a single media item.
Used by async_browse_media.
"""
return BrowseMedia(
title=item["title"],
media_class=MEDIA_CLASS_APP,
media_content_type=MEDIA_TYPE_APP,
media_content_id=item["app_id"],
can_play=False,
can_expand=False,
)
|
'''
Grains for junos.
NOTE this is a little complicated--junos can only be accessed via salt-proxy-minion.
Thus, some grains make sense to get them from the minion (PYTHONPATH), but others
don't (ip_interfaces)
'''
from __future__ import absolute_import
import logging
__proxyenabled__ = ['junos']
__virtualname__ = 'junos'
log = logging.getLogger(__name__)
def __virtual__():
if 'proxy' not in __opts__:
return False
else:
return __virtualname__
def _remove_complex_types(dictionary):
'''
Linode-python is now returning some complex types that
are not serializable by msgpack. Kill those.
'''
for k, v in dictionary.iteritems():
if isinstance(v, dict):
dictionary[k] = _remove_complex_types(v)
elif hasattr(v, 'to_eng_string'):
dictionary[k] = v.to_eng_string()
return dictionary
def defaults():
return {'os': 'proxy', 'kernel': 'unknown', 'osrelease': 'proxy'}
def facts():
if 'junos.facts' in __proxy__:
facts = __proxy__['junos.facts']()
facts['version_info'] = 'override'
return facts
return None
def os_family():
return {'os_family': 'junos'}
|
"""Generated test for checking pynos based actions
"""
import xml.etree.ElementTree as ET
from st2tests.base import BaseActionTestCase
from interface_private_vlan_type import interface_private_vlan_type
__all__ = [
'TestInterfacePrivateVlanType'
]
class MockCallback(object): # pylint:disable=too-few-public-methods
"""Class to hold mock callback and result
"""
returned_data = None
def callback(self, call, **kwargs): # pylint:disable=unused-argument
"""Mock callback method
"""
xml_result = ET.tostring(call)
self.returned_data = xml_result
class TestInterfacePrivateVlanType(BaseActionTestCase):
"""Test holder class
"""
action_cls = interface_private_vlan_type
def test_action(self):
"""Generated test to check action
"""
action = self.get_action_instance()
mock_callback = MockCallback()
kwargs = {
'username': '',
'name': '10',
'pvlan_type': 'primary',
'ip': '',
'password': '',
'port': '22',
'test': True,
'callback': mock_callback.callback
}
action.run(**kwargs)
expected_xml = (
'<config><interface-vlan xmlns="urn:brocade.com:mgmt:brocade-inter'
'face"><interface><vlan><name>10</name><private-vlan><pvlan-type-l'
'eaf>primary</pvlan-type-leaf></private-vlan></vlan></interface></'
'interface-vlan></config>'
)
self.assertTrue(expected_xml, mock_callback.returned_data)
|
import webob.exc
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova import exception
sa_nsmap = {None: wsgi.XMLNS_V11}
authorize = extensions.extension_authorizer('compute', 'server_action_list')
class ServerActionsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('actions')
elem = xmlutil.SubTemplateElement(root, 'action', selector='actions')
elem.set('created_at')
elem.set('action')
elem.set('error')
return xmlutil.MasterTemplate(root, 1, nsmap=sa_nsmap)
class ServerActionListController(object):
@wsgi.serializers(xml=ServerActionsTemplate)
def index(self, req, server_id):
context = req.environ["nova.context"]
authorize(context)
compute_api = compute.API()
try:
instance = compute_api.get(context, server_id)
except exception.NotFound:
raise webob.exc.HTTPNotFound(_("Instance not found"))
items = compute_api.get_actions(context, instance)
def _format_item(item):
return {
'created_at': str(item['created_at']),
'action': item['action'],
'error': item['error'],
}
return {'actions': [_format_item(item) for item in items]}
class Server_action_list(extensions.ExtensionDescriptor):
"""Allow Admins to view pending server actions"""
name = "ServerActionList"
alias = "os-server-action-list"
namespace = "http://docs.openstack.org/compute/ext/" \
"server-actions-list/api/v1.1"
updated = "2011-12-21T00:00:00+00:00"
def get_resources(self):
parent_def = {'member_name': 'server', 'collection_name': 'servers'}
#NOTE(bcwaldon): This should be prefixed with 'os-'
ext = extensions.ResourceExtension('actions',
ServerActionListController(),
parent=parent_def)
return [ext]
|
import sys
import argparse
import json
import datetime
import logging
import logging.handlers
import time
import re
from opserver_util import OpServerUtils
from sandesh_common.vns.ttypes import Module
from sandesh_common.vns.constants import ModuleNames, NodeTypeNames
import sandesh.viz.constants as VizConstants
from pysandesh.gen_py.sandesh.ttypes import SandeshType, SandeshLevel
from pysandesh.sandesh_logger import SandeshLogger
from pysandesh.util import UTCTimestampUsec
import commands
import ast
OBJECT_TYPE_LIST = [table_info.log_query_name for table_info in \
VizConstants._OBJECT_TABLES.values()]
OBJECT_TABLE_MAP = dict((table_info.log_query_name, table_name) for \
(table_name, table_info) in VizConstants._OBJECT_TABLES.items())
output_file_handle = None
class LogQuerier(object):
def __init__(self):
self._args = None
self._slogger = None
# end __init__
# Public functions
def parse_args(self):
"""
Eg. python log.py --analytics-api-ip 127.0.0.1
--analytics-api-port 8081
--source 127.0.0.1
--node-type Control
--module bgp | cfgm | vnswad
--instance-id 0
--message-type UveVirtualMachineConfigTrace
--category xmpp
--level SYS_INFO | SYS_ERROR
--object-type virtual-network | virtual-machine
--object-id name
--object-select-field ObjectLog | SystemLog
--reverse
--verbose
--raw
--trace BgpPeerTraceBuf
[--start-time now-10m --end-time now] | --last 10m
--send-syslog
--syslog-server 127.0.0.1
--syslog-port 514
--keywords comma,seperated,list
"""
defaults = {
'analytics_api_ip': '127.0.0.1',
'analytics_api_port': '8081',
}
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.set_defaults(**defaults)
parser.add_argument("--analytics-api-ip", help="IP address of Analytics API Server")
parser.add_argument("--analytics-api-port", help="Port of Analytics API Server")
parser.add_argument(
"--start-time", help="Logs start time (format now-10m, now-1h)")
parser.add_argument("--end-time", help="Logs end time")
parser.add_argument(
"--last", help="Logs from last time period (format 10m, 1d)")
parser.add_argument("--source", help="Logs from source address")
parser.add_argument("--node-type", help="Logs from node type",
choices=NodeTypeNames.values())
parser.add_argument(
"--module", help="Logs from module", choices=ModuleNames.values())
parser.add_argument("--instance-id", help="Logs from module instance")
parser.add_argument("--category", help="Logs of category")
parser.add_argument("--level", help="Logs of level")
parser.add_argument("--message-type", help="Logs of message type")
parser.add_argument("--reverse", action="store_true",
help="Show logs in reverse chronological order")
parser.add_argument(
"--verbose", action="store_true", help="Show internal information")
parser.add_argument(
"--raw", action="store_true", help="Show raw XML messages")
parser.add_argument(
"--object-type", help="Logs of object type", choices=OBJECT_TYPE_LIST)
parser.add_argument("--object-values", action="store_true",
help="Display list of object names")
parser.add_argument("--object-id", help="Logs of object name")
parser.add_argument(
"--object-select-field", help="Select field to filter the log",
choices=[VizConstants.OBJECT_LOG, VizConstants.SYSTEM_LOG])
parser.add_argument("--trace", help="Dump trace buffer")
parser.add_argument("--limit", help="Limit the number of messages")
parser.add_argument("--send-syslog", action="store_true",
help="Send syslog to specified server and port")
parser.add_argument("--syslog-server",
help="IP address of syslog server", default='localhost')
parser.add_argument("--syslog-port", help="Port to send syslog to",
type=int, default=514)
parser.add_argument("--tail","-f", help="Tail logs from now", action="store_true")
parser.add_argument("--keywords", help="comma seperated list of keywords")
parser.add_argument("--message-types", \
help="Display list of message type", action="store_true")
parser.add_argument("--output-file", "-o", help="redirect output to file")
parser.add_argument("--json", help="Dump output as json", action="store_true")
parser.add_argument("--all", action="store_true", help=argparse.SUPPRESS)
self._args = parser.parse_args()
return 0
# end parse_args
# Public functions
def query(self):
if self._args.tail and (self._args.send_syslog or self._args.reverse or
self._args.start_time or self._args.end_time):
invalid_combination = " --tail"
if self._args.send_syslog:
invalid_combination += ", --send-syslog"
if self._args.reverse:
invalid_combination += ", --reverse"
if self._args.start_time:
invalid_combination += ", --start-time"
if self._args.end_time:
invalid_combination += ", --end-time"
print "Combination of options" + invalid_combination + " are not valid."
return -1
global output_file_handle
if self._args.output_file is not None:
if output_file_handle is None:
#Open the file for writing
try:
if self._args.tail:
output_file_handle = open(self._args.output_file, "a")
else:
output_file_handle = open(self._args.output_file, "w")
except Exception as e:
print e
print "Exception occured when creating/opening file %s" % \
self._args.output_file
return -1
start_time, end_time = self._start_time, self._end_time
if self._args.message_types is True:
command_str = ("contrail-stats --table FieldNames.fields" +
" --where name=MessageTable:Messagetype --select name fields.value" +
" --start-time " + str(start_time) +
" --end-time " + str(end_time) +
" --analytics-api-ip " + str(self._args.analytics_api_ip) +
" --analytics-api-port " + str(self._args.analytics_api_port))
res = commands.getoutput(command_str)
res = res.splitlines()
res = res[1:]
for r in res:
print ast.literal_eval(r)['fields.value']
return None
messages_url = OpServerUtils.opserver_query_url(
self._args.analytics_api_ip,
self._args.analytics_api_port)
where_msg = []
where_obj = []
and_filter = []
or_filter = []
if self._args.source is not None:
source_match = OpServerUtils.Match(name=VizConstants.SOURCE,
value=self._args.source,
op=OpServerUtils.MatchOp.EQUAL)
where_msg.append(source_match.__dict__)
if self._args.module is not None:
module_match = OpServerUtils.Match(name=VizConstants.MODULE,
value=self._args.module,
op=OpServerUtils.MatchOp.EQUAL)
where_msg.append(module_match.__dict__)
if self._args.category is not None:
category_match = OpServerUtils.Match(
name=VizConstants.CATEGORY,
value=self._args.category,
op=OpServerUtils.MatchOp.EQUAL)
where_msg.append(category_match.__dict__)
if self._args.message_type is not None:
message_type_match = OpServerUtils.Match(
name=VizConstants.MESSAGE_TYPE,
value=self._args.message_type,
op=OpServerUtils.MatchOp.EQUAL)
where_msg.append(message_type_match.__dict__)
if self._args.level is not None:
level_match = OpServerUtils.Match(
name=VizConstants.LEVEL,
value=SandeshLevel._NAMES_TO_VALUES[self._args.level],
op=OpServerUtils.MatchOp.LEQ)
and_filter.append(level_match.__dict__)
if self._args.node_type is not None:
node_type_match = OpServerUtils.Match(
name=VizConstants.NODE_TYPE,
value=self._args.node_type,
op=OpServerUtils.MatchOp.EQUAL)
and_filter.append(node_type_match.__dict__)
if self._args.instance_id is not None:
instance_id_match = OpServerUtils.Match(
name=VizConstants.INSTANCE_ID,
value=self._args.instance_id,
op=OpServerUtils.MatchOp.EQUAL)
and_filter.append(instance_id_match.__dict__)
# Object logs :
# --object-type <> : All logs for the particular object type
# --object-type <> --object-values : Object-id values for the particular
# object tye
# --object-type <> --object-id <> : All logs matching object-id for
# particular object type
if (self._args.object_type is not None or
self._args.object_id is not None or
self._args.object_select_field is not None or
self._args.object_values is True):
# Validate object-type
if self._args.object_type is not None:
if self._args.object_type in OBJECT_TYPE_LIST:
if self._args.object_type in OBJECT_TABLE_MAP:
table = OBJECT_TABLE_MAP[self._args.object_type]
else:
print 'Table not found for object-type [%s]' % \
(self._args.object_type)
return -1
else:
print 'Unknown object-type [%s]' % (self._args.object_type)
return -1
else:
print 'Object-type required for query'
return -1
# Validate object-id and object-values
if self._args.object_id is not None and \
self._args.object_values is False:
object_id = self._args.object_id
if object_id.endswith("*"):
id_match = OpServerUtils.Match(
name=OpServerUtils.OBJECT_ID,
value=object_id[:-1],
op=OpServerUtils.MatchOp.PREFIX)
else:
id_match = OpServerUtils.Match(
name=OpServerUtils.OBJECT_ID,
value=object_id,
op=OpServerUtils.MatchOp.EQUAL)
where_obj.append(id_match.__dict__)
elif self._args.object_id is not None and \
self._args.object_values is True:
print 'Please specify either object-id or object-values but not both'
return -1
if self._args.object_values is False:
if self._args.object_select_field is not None:
obj_sel_field = self._args.object_select_field
if not isinstance(self._args.object_select_field, list):
obj_sel_field = [self._args.object_select_field]
if VizConstants.OBJECT_LOG or VizConstants.SYSTEM_LOG \
in obj_sel_field:
self._args.object_select_field = obj_sel_field
else:
print 'Invalid object-select-field. '\
'Valid values are "%s" or "%s"' \
% (VizConstants.OBJECT_LOG,
VizConstants.SYSTEM_LOG)
return -1
else:
self._args.object_select_field = obj_sel_field = [
VizConstants.OBJECT_LOG, VizConstants.SYSTEM_LOG]
select_list = [
VizConstants.TIMESTAMP,
VizConstants.SOURCE,
VizConstants.MODULE,
VizConstants.MESSAGE_TYPE,
] + obj_sel_field
else:
if self._args.object_select_field:
print 'Please specify either object-id with ' + \
'object-select-field or only object-values'
return -1
if len(where_msg):
options = [where['name'] for where in where_msg]
print 'Invalid/unsupported where-clause options %s for object-values query' % str(options)
return -1
select_list = [
OpServerUtils.OBJECT_ID
]
if len(where_obj) or len(where_msg):
where = [where_obj + where_msg]
else:
where = None
elif self._args.trace is not None:
table = VizConstants.COLLECTOR_GLOBAL_TABLE
if self._args.source is None:
print 'Source is required for trace buffer dump'
return -1
if self._args.module is None:
print 'Module is required for trace buffer dump'
return -1
trace_buf_match = OpServerUtils.Match(
name=VizConstants.CATEGORY,
value=self._args.trace,
op=OpServerUtils.MatchOp.EQUAL)
where_msg.append(trace_buf_match.__dict__)
where = [where_msg]
select_list = [
VizConstants.TIMESTAMP,
VizConstants.MESSAGE_TYPE,
VizConstants.SEQUENCE_NUM,
VizConstants.DATA,
VizConstants.SANDESH_TYPE
]
sandesh_type_filter = OpServerUtils.Match(
name=VizConstants.SANDESH_TYPE,
value=str(
SandeshType.TRACE),
op=OpServerUtils.MatchOp.EQUAL)
and_filter.append(sandesh_type_filter.__dict__)
else:
# Message Table Query
table = VizConstants.COLLECTOR_GLOBAL_TABLE
if len(where_msg):
where = [where_msg]
else:
where = None
select_list = [
VizConstants.TIMESTAMP,
VizConstants.SOURCE,
VizConstants.MODULE,
VizConstants.CATEGORY,
VizConstants.MESSAGE_TYPE,
VizConstants.SEQUENCE_NUM,
VizConstants.DATA,
VizConstants.SANDESH_TYPE,
VizConstants.LEVEL,
VizConstants.NODE_TYPE,
VizConstants.INSTANCE_ID,
]
filter = None
if len(or_filter):
filter = [and_filter+[filt] for filt in or_filter]
elif len(and_filter):
filter = [and_filter]
if self._args.keywords is not None:
p = re.compile('\s*,\s*|\s+')
if where is None:
where = [[]]
for kwd in p.split(self._args.keywords):
message_type_match = OpServerUtils.Match(
name=VizConstants.KEYWORD,
value=kwd,
op=OpServerUtils.MatchOp.EQUAL)
for wc in where:
wc.append(message_type_match.__dict__)
# Add sort by timestamp for non object value queries
sort_op = None
sort_fields = None
if self._args.object_values is False:
if self._args.reverse:
sort_op = OpServerUtils.SortOp.DESCENDING
else:
sort_op = OpServerUtils.SortOp.ASCENDING
sort_fields = [VizConstants.TIMESTAMP]
if self._args.limit:
limit = int(self._args.limit)
else:
limit = None
messages_query = OpServerUtils.Query(table,
start_time=start_time,
end_time=end_time,
select_fields=select_list,
where=where,
filter=filter,
sort=sort_op,
sort_fields=sort_fields,
limit=limit)
if self._args.verbose:
print 'Performing query: {0}'.format(
json.dumps(messages_query.__dict__))
resp = OpServerUtils.post_url_http(
messages_url, json.dumps(messages_query.__dict__))
result = {}
if resp is not None:
resp = json.loads(resp)
qid = resp['href'].rsplit('/', 1)[1]
result = OpServerUtils.get_query_result(
self._args.analytics_api_ip, self._args.analytics_api_port, qid)
return result
# end query
def _output(self, log_str, sandesh_level):
if self._args.json:
if isinstance(log_str,dict):
#convert to json and dump
log_str=json.dumps(log_str)
if self._args.output_file is not None:
#Append to a file specified
try:
output_file_handle.write(log_str)
output_file_handle.write("\n")
return
except Exception as e:
print e
print "Exception occured when writing file %s" % \
self._args.output_file
return -1
if self._args.send_syslog:
syslog_level = SandeshLogger._SANDESH_LEVEL_TO_LOGGER_LEVEL[
sandesh_level]
self._logger.log(syslog_level, log_str)
else:
print log_str
#end _output
def display(self, result):
if result == [] or result is None:
return
messages_dict_list = result
# Setup logger and syslog handler
if self._args.send_syslog:
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
syslog_handler = logging.handlers.SysLogHandler(
address = (self._args.syslog_server, self._args.syslog_port))
contrail_formatter = logging.Formatter('contrail: %(message)s')
syslog_handler.setFormatter(contrail_formatter)
logger.addHandler(syslog_handler)
self._logger = logger
for messages_dict in messages_dict_list:
if VizConstants.TIMESTAMP in messages_dict:
message_dt = datetime.datetime.fromtimestamp(
int(messages_dict[VizConstants.TIMESTAMP]) /
OpServerUtils.USECS_IN_SEC)
message_dt += datetime.timedelta(
microseconds=
(int(messages_dict[VizConstants.TIMESTAMP]) %
OpServerUtils.USECS_IN_SEC))
message_ts = message_dt.strftime(OpServerUtils.TIME_FORMAT_STR)
else:
message_ts = 'Time: NA'
if VizConstants.SOURCE in messages_dict:
source = messages_dict[VizConstants.SOURCE]
else:
source = 'Source: NA'
if VizConstants.NODE_TYPE in messages_dict:
node_type = messages_dict[VizConstants.NODE_TYPE]
else:
node_type = ''
if VizConstants.MODULE in messages_dict:
module = messages_dict[VizConstants.MODULE]
else:
module = 'Module: NA'
if VizConstants.INSTANCE_ID in messages_dict:
instance_id = messages_dict[VizConstants.INSTANCE_ID]
else:
instance_id = ''
if VizConstants.MESSAGE_TYPE in messages_dict:
message_type = messages_dict[VizConstants.MESSAGE_TYPE]
else:
message_type = 'Message Type: NA'
if VizConstants.SANDESH_TYPE in messages_dict:
sandesh_type = messages_dict[VizConstants.SANDESH_TYPE]
else:
sandesh_type = SandeshType.INVALID
# By default SYS_DEBUG
sandesh_level = SandeshLevel.SYS_DEBUG
if self._args.object_type is None:
if VizConstants.CATEGORY in messages_dict:
category = messages_dict[VizConstants.CATEGORY]
else:
category = 'Category: NA'
if VizConstants.LEVEL in messages_dict:
sandesh_level = messages_dict[VizConstants.LEVEL]
level = SandeshLevel._VALUES_TO_NAMES[sandesh_level]
else:
level = 'Level: NA'
if VizConstants.SEQUENCE_NUM in messages_dict:
seq_num = messages_dict[VizConstants.SEQUENCE_NUM]
else:
seq_num = 'Sequence Number: NA'
if VizConstants.DATA in messages_dict:
# Convert XML data to dict
if self._args.raw:
data_str = messages_dict[VizConstants.DATA]
else:
OpServerUtils.messages_xml_data_to_dict(
messages_dict, VizConstants.DATA)
if isinstance(messages_dict[VizConstants.DATA], dict):
data_dict = messages_dict[VizConstants.DATA]
data_str = OpServerUtils.messages_data_dict_to_str(
data_dict, message_type, sandesh_type)
else:
data_str = messages_dict[VizConstants.DATA]
else:
data_str = 'Data not present'
if self._args.json:
self._output(messages_dict, sandesh_level)
else:
if self._args.trace is not None:
trace_str = '{0} {1}:{2} {3}'.format(
message_ts, message_type, seq_num, data_str)
self._output(trace_str, sandesh_level)
else:
log_str = \
'{0} {1} [{2}:{3}:{4}:{5}][{6}] : {7}:{8} {9}'.format(
message_ts, source, node_type, module, instance_id,
category, level, message_type, seq_num, data_str)
self._output(log_str, sandesh_level)
else:
if self._args.object_values is True:
if OpServerUtils.OBJECT_ID in messages_dict:
obj_str = messages_dict[OpServerUtils.OBJECT_ID]
print obj_str
continue
for obj_sel_field in self._args.object_select_field:
if obj_sel_field in messages_dict:
if self._args.raw:
data_str = messages_dict[obj_sel_field]
else:
# Convert XML data to dict
OpServerUtils.messages_xml_data_to_dict(
messages_dict, obj_sel_field)
if isinstance(messages_dict[obj_sel_field], dict):
data_dict = messages_dict[obj_sel_field]
data_str =\
OpServerUtils.messages_data_dict_to_str(
data_dict, message_type,
sandesh_type)
else:
data_str = messages_dict[obj_sel_field]
if data_str:
obj_str = '{0} {1} [{2}:{3}:{4}] : {5}: {6}'.format(
message_ts, source, node_type, module,
instance_id, message_type, data_str)
if self._args.json:
self._output(messages_dict[obj_sel_field], sandesh_level)
else:
self._output(obj_str, sandesh_level)
# end display
def main():
try:
querier = LogQuerier()
if querier.parse_args() != 0:
return
if querier._args.tail:
start_time = UTCTimestampUsec() - 10*pow(10,6)
while True:
querier._start_time = start_time
querier._end_time = UTCTimestampUsec()
start_time = querier._end_time + 1
time.sleep(3)
result = querier.query()
if result == -1:
return
querier.display(result)
else:
start_time = querier._args.start_time
end_time = querier._args.end_time
if not querier._args.start_time:
start_time = "now-10m"
if not querier._args.end_time:
end_time = "now"
try:
querier._start_time, querier._end_time = \
OpServerUtils.parse_start_end_time(
start_time = start_time,
end_time = end_time,
last = querier._args.last)
except:
return -1
result = querier.query()
if result == -1:
return
querier.display(result)
except KeyboardInterrupt:
return
if __name__ == "__main__":
main()
|
A = 1
def f(a):
print("a={}".format(a))
|
from webob import Response as WebobResponse
from functools import update_wrapper
from zope.interface import Interface
from pyramid.interfaces import (
IResponse,
ITraverser,
IResourceURL,
)
from pyramid.config.util import (
action_method,
takes_one_arg,
)
class AdaptersConfiguratorMixin(object):
@action_method
def add_subscriber(self, subscriber, iface=None, **predicates):
"""Add an event :term:`subscriber` for the event stream
implied by the supplied ``iface`` interface.
The ``subscriber`` argument represents a callable object (or a
:term:`dotted Python name` which identifies a callable); it will be
called with a single object ``event`` whenever :app:`Pyramid` emits
an :term:`event` associated with the ``iface``, which may be an
:term:`interface` or a class or a :term:`dotted Python name` to a
global object representing an interface or a class.
Using the default ``iface`` value, ``None`` will cause the subscriber
to be registered for all event types. See :ref:`events_chapter` for
more information about events and subscribers.
Any number of predicate keyword arguments may be passed in
``**predicates``. Each predicate named will narrow the set of
circumstances in which the subscriber will be invoked. Each named
predicate must have been registered via
:meth:`pyramid.config.Configurator.add_subscriber_predicate` before it
can be used. See :ref:`subscriber_predicates` for more information.
.. versionadded:: 1.4
The ``**predicates`` argument.
"""
dotted = self.maybe_dotted
subscriber, iface = dotted(subscriber), dotted(iface)
if iface is None:
iface = (Interface,)
if not isinstance(iface, (tuple, list)):
iface = (iface,)
def register():
predlist = self.get_predlist('subscriber')
order, preds, phash = predlist.make(self, **predicates)
derived_predicates = [ self._derive_predicate(p) for p in preds ]
derived_subscriber = self._derive_subscriber(
subscriber,
derived_predicates,
)
intr.update(
{'phash':phash,
'order':order,
'predicates':preds,
'derived_predicates':derived_predicates,
'derived_subscriber':derived_subscriber,
}
)
self.registry.registerHandler(derived_subscriber, iface)
intr = self.introspectable(
'subscribers',
id(subscriber),
self.object_description(subscriber),
'subscriber'
)
intr['subscriber'] = subscriber
intr['interfaces'] = iface
self.action(None, register, introspectables=(intr,))
return subscriber
def _derive_predicate(self, predicate):
derived_predicate = predicate
if eventonly(predicate):
def derived_predicate(*arg):
return predicate(arg[0])
# seems pointless to try to fix __doc__, __module__, etc as
# predicate will invariably be an instance
return derived_predicate
def _derive_subscriber(self, subscriber, predicates):
derived_subscriber = subscriber
if eventonly(subscriber):
def derived_subscriber(*arg):
return subscriber(arg[0])
if hasattr(subscriber, '__name__'):
update_wrapper(derived_subscriber, subscriber)
if not predicates:
return derived_subscriber
def subscriber_wrapper(*arg):
# We need to accept *arg and pass it along because zope subscribers
# are designed awkwardly. Notification via
# registry.adapter.subscribers will always call an associated
# subscriber with all of the objects involved in the subscription
# lookup, despite the fact that the event sender always has the
# option to attach those objects to the event object itself, and
# almost always does.
#
# The "eventonly" jazz sprinkled in this function and related
# functions allows users to define subscribers and predicates which
# accept only an event argument without needing to accept the rest
# of the adaptation arguments. Had I been smart enough early on to
# use .subscriptions to find the subscriber functions in order to
# call them manually with a single "event" argument instead of
# relying on .subscribers to both find and call them implicitly
# with all args, the eventonly hack would not have been required.
# At this point, though, using .subscriptions and manual execution
# is not possible without badly breaking backwards compatibility.
if all((predicate(*arg) for predicate in predicates)):
return derived_subscriber(*arg)
if hasattr(subscriber, '__name__'):
update_wrapper(subscriber_wrapper, subscriber)
return subscriber_wrapper
@action_method
def add_subscriber_predicate(self, name, factory, weighs_more_than=None,
weighs_less_than=None):
"""
.. versionadded:: 1.4
Adds a subscriber predicate factory. The associated subscriber
predicate can later be named as a keyword argument to
:meth:`pyramid.config.Configurator.add_subscriber` in the
``**predicates`` anonyous keyword argument dictionary.
``name`` should be the name of the predicate. It must be a valid
Python identifier (it will be used as a ``**predicates`` keyword
argument to :meth:`~pyramid.config.Configurator.add_subscriber`).
``factory`` should be a :term:`predicate factory` or :term:`dotted
Python name` which refers to a predicate factory.
See :ref:`subscriber_predicates` for more information.
"""
self._add_predicate(
'subscriber',
name,
factory,
weighs_more_than=weighs_more_than,
weighs_less_than=weighs_less_than
)
@action_method
def add_response_adapter(self, adapter, type_or_iface):
""" When an object of type (or interface) ``type_or_iface`` is
returned from a view callable, Pyramid will use the adapter
``adapter`` to convert it into an object which implements the
:class:`pyramid.interfaces.IResponse` interface. If ``adapter`` is
None, an object returned of type (or interface) ``type_or_iface``
will itself be used as a response object.
``adapter`` and ``type_or_interface`` may be Python objects or
strings representing dotted names to importable Python global
objects.
See :ref:`using_iresponse` for more information."""
adapter = self.maybe_dotted(adapter)
type_or_iface = self.maybe_dotted(type_or_iface)
def register():
reg = self.registry
if adapter is None:
reg.registerSelfAdapter((type_or_iface,), IResponse)
else:
reg.registerAdapter(adapter, (type_or_iface,), IResponse)
discriminator = (IResponse, type_or_iface)
intr = self.introspectable(
'response adapters',
discriminator,
self.object_description(adapter),
'response adapter')
intr['adapter'] = adapter
intr['type'] = type_or_iface
self.action(discriminator, register, introspectables=(intr,))
def add_default_response_adapters(self):
# cope with WebOb response objects that aren't decorated with IResponse
self.add_response_adapter(None, WebobResponse)
@action_method
def add_traverser(self, adapter, iface=None):
"""
The superdefault :term:`traversal` algorithm that :app:`Pyramid` uses
is explained in :ref:`traversal_algorithm`. Though it is rarely
necessary, this default algorithm can be swapped out selectively for
a different traversal pattern via configuration. The section
entitled :ref:`changing_the_traverser` details how to create a
traverser class.
For example, to override the superdefault traverser used by Pyramid,
you might do something like this:
.. code-block:: python
from myapp.traversal import MyCustomTraverser
config.add_traverser(MyCustomTraverser)
This would cause the Pyramid superdefault traverser to never be used;
instead all traversal would be done using your ``MyCustomTraverser``
class, no matter which object was returned by the :term:`root
factory` of this application. Note that we passed no arguments to
the ``iface`` keyword parameter. The default value of ``iface``,
``None`` represents that the registered traverser should be used when
no other more specific traverser is available for the object returned
by the root factory.
However, more than one traversal algorithm can be active at the same
time. The traverser used can depend on the result of the :term:`root
factory`. For instance, if your root factory returns more than one
type of object conditionally, you could claim that an alternate
traverser adapter should be used against one particular class or
interface returned by that root factory. When the root factory
returned an object that implemented that class or interface, a custom
traverser would be used. Otherwise, the default traverser would be
used. The ``iface`` argument represents the class of the object that
the root factory might return or an :term:`interface` that the object
might implement.
To use a particular traverser only when the root factory returns a
particular class:
.. code-block:: python
config.add_traverser(MyCustomTraverser, MyRootClass)
When more than one traverser is active, the "most specific" traverser
will be used (the one that matches the class or interface of the
value returned by the root factory most closely).
Note that either ``adapter`` or ``iface`` can be a :term:`dotted
Python name` or a Python object.
See :ref:`changing_the_traverser` for more information.
"""
iface = self.maybe_dotted(iface)
adapter= self.maybe_dotted(adapter)
def register(iface=iface):
if iface is None:
iface = Interface
self.registry.registerAdapter(adapter, (iface,), ITraverser)
discriminator = ('traverser', iface)
intr = self.introspectable(
'traversers',
discriminator,
'traverser for %r' % iface,
'traverser',
)
intr['adapter'] = adapter
intr['iface'] = iface
self.action(discriminator, register, introspectables=(intr,))
@action_method
def add_resource_url_adapter(self, adapter, resource_iface=None):
"""
.. versionadded:: 1.3
When you add a traverser as described in
:ref:`changing_the_traverser`, it's convenient to continue to use the
:meth:`pyramid.request.Request.resource_url` API. However, since the
way traversal is done may have been modified, the URLs that
``resource_url`` generates by default may be incorrect when resources
are returned by a custom traverser.
If you've added a traverser, you can change how
:meth:`~pyramid.request.Request.resource_url` generates a URL for a
specific type of resource by calling this method.
The ``adapter`` argument represents a class that implements the
:class:`~pyramid.interfaces.IResourceURL` interface. The class
constructor should accept two arguments in its constructor (the
resource and the request) and the resulting instance should provide
the attributes detailed in that interface (``virtual_path`` and
``physical_path``, in particular).
The ``resource_iface`` argument represents a class or interface that
the resource should possess for this url adapter to be used when
:meth:`pyramid.request.Request.resource_url` looks up a resource url
adapter. If ``resource_iface`` is not passed, or it is passed as
``None``, the url adapter will be used for every type of resource.
See :ref:`changing_resource_url` for more information.
"""
adapter = self.maybe_dotted(adapter)
resource_iface = self.maybe_dotted(resource_iface)
def register(resource_iface=resource_iface):
if resource_iface is None:
resource_iface = Interface
self.registry.registerAdapter(
adapter,
(resource_iface, Interface),
IResourceURL,
)
discriminator = ('resource url adapter', resource_iface)
intr = self.introspectable(
'resource url adapters',
discriminator,
'resource url adapter for resource iface %r' % resource_iface,
'resource url adapter',
)
intr['adapter'] = adapter
intr['resource_iface'] = resource_iface
self.action(discriminator, register, introspectables=(intr,))
def eventonly(callee):
return takes_one_arg(callee, argname='event')
|
from django.core.exceptions import ImproperlyConfigured
from django.core.handlers.wsgi import WSGIHandler, WSGIRequest, get_script_name
from django.core.signals import request_finished, request_started
from django.db import close_old_connections, connection
from django.test import (
RequestFactory, SimpleTestCase, TransactionTestCase, override_settings,
)
from django.utils.version import PY37
class HandlerTests(SimpleTestCase):
request_factory = RequestFactory()
def setUp(self):
request_started.disconnect(close_old_connections)
def tearDown(self):
request_started.connect(close_old_connections)
def test_middleware_initialized(self):
handler = WSGIHandler()
self.assertIsNotNone(handler._middleware_chain)
def test_bad_path_info(self):
"""
A non-UTF-8 path populates PATH_INFO with an URL-encoded path and
produces a 404.
"""
environ = self.request_factory.get('/').environ
environ['PATH_INFO'] = '\xed'
handler = WSGIHandler()
response = handler(environ, lambda *a, **k: None)
# The path of the request will be encoded to '/%ED'.
self.assertEqual(response.status_code, 404)
def test_non_ascii_query_string(self):
"""
Non-ASCII query strings are properly decoded (#20530, #22996).
"""
environ = self.request_factory.get('/').environ
raw_query_strings = [
b'want=caf%C3%A9', # This is the proper way to encode 'café'
b'want=caf\xc3\xa9', # UA forgot to quote bytes
b'want=caf%E9', # UA quoted, but not in UTF-8
b'want=caf\xe9', # UA forgot to convert Latin-1 to UTF-8 and to quote (typical of MSIE)
]
got = []
for raw_query_string in raw_query_strings:
# Simulate http.server.BaseHTTPRequestHandler.parse_request handling of raw request
environ['QUERY_STRING'] = str(raw_query_string, 'iso-8859-1')
request = WSGIRequest(environ)
got.append(request.GET['want'])
# %E9 is converted to the Unicode replacement character by parse_qsl
self.assertEqual(got, ['café', 'café', 'caf\ufffd', 'café'])
def test_non_ascii_cookie(self):
"""Non-ASCII cookies set in JavaScript are properly decoded (#20557)."""
environ = self.request_factory.get('/').environ
raw_cookie = 'want="café"'.encode('utf-8').decode('iso-8859-1')
environ['HTTP_COOKIE'] = raw_cookie
request = WSGIRequest(environ)
self.assertEqual(request.COOKIES['want'], "café")
def test_invalid_unicode_cookie(self):
"""
Invalid cookie content should result in an absent cookie, but not in a
crash while trying to decode it (#23638).
"""
environ = self.request_factory.get('/').environ
environ['HTTP_COOKIE'] = 'x=W\x03c(h]\x8e'
request = WSGIRequest(environ)
# We don't test COOKIES content, as the result might differ between
# Python version because parsing invalid content became stricter in
# latest versions.
self.assertIsInstance(request.COOKIES, dict)
@override_settings(ROOT_URLCONF='handlers.urls')
def test_invalid_multipart_boundary(self):
"""
Invalid boundary string should produce a "Bad Request" response, not a
server error (#23887).
"""
environ = self.request_factory.post('/malformed_post/').environ
environ['CONTENT_TYPE'] = 'multipart/form-data; boundary=WRONG\x07'
handler = WSGIHandler()
response = handler(environ, lambda *a, **k: None)
# Expect "bad request" response
self.assertEqual(response.status_code, 400)
@override_settings(ROOT_URLCONF='handlers.urls', MIDDLEWARE=[])
class TransactionsPerRequestTests(TransactionTestCase):
available_apps = []
def test_no_transaction(self):
response = self.client.get('/in_transaction/')
self.assertContains(response, 'False')
def test_auto_transaction(self):
old_atomic_requests = connection.settings_dict['ATOMIC_REQUESTS']
try:
connection.settings_dict['ATOMIC_REQUESTS'] = True
response = self.client.get('/in_transaction/')
finally:
connection.settings_dict['ATOMIC_REQUESTS'] = old_atomic_requests
self.assertContains(response, 'True')
async def test_auto_transaction_async_view(self):
old_atomic_requests = connection.settings_dict['ATOMIC_REQUESTS']
try:
connection.settings_dict['ATOMIC_REQUESTS'] = True
msg = 'You cannot use ATOMIC_REQUESTS with async views.'
with self.assertRaisesMessage(RuntimeError, msg):
await self.async_client.get('/async_regular/')
finally:
connection.settings_dict['ATOMIC_REQUESTS'] = old_atomic_requests
def test_no_auto_transaction(self):
old_atomic_requests = connection.settings_dict['ATOMIC_REQUESTS']
try:
connection.settings_dict['ATOMIC_REQUESTS'] = True
response = self.client.get('/not_in_transaction/')
finally:
connection.settings_dict['ATOMIC_REQUESTS'] = old_atomic_requests
self.assertContains(response, 'False')
@override_settings(ROOT_URLCONF='handlers.urls')
class SignalsTests(SimpleTestCase):
def setUp(self):
self.signals = []
self.signaled_environ = None
request_started.connect(self.register_started)
request_finished.connect(self.register_finished)
def tearDown(self):
request_started.disconnect(self.register_started)
request_finished.disconnect(self.register_finished)
def register_started(self, **kwargs):
self.signals.append('started')
self.signaled_environ = kwargs.get('environ')
def register_finished(self, **kwargs):
self.signals.append('finished')
def test_request_signals(self):
response = self.client.get('/regular/')
self.assertEqual(self.signals, ['started', 'finished'])
self.assertEqual(response.content, b"regular content")
self.assertEqual(self.signaled_environ, response.wsgi_request.environ)
def test_request_signals_streaming_response(self):
response = self.client.get('/streaming/')
self.assertEqual(self.signals, ['started'])
self.assertEqual(b''.join(response.streaming_content), b"streaming content")
self.assertEqual(self.signals, ['started', 'finished'])
def empty_middleware(get_response):
pass
@override_settings(ROOT_URLCONF='handlers.urls')
class HandlerRequestTests(SimpleTestCase):
request_factory = RequestFactory()
def test_async_view(self):
"""Calling an async view down the normal synchronous path."""
response = self.client.get('/async_regular/')
self.assertEqual(response.status_code, 200)
def test_suspiciousop_in_view_returns_400(self):
response = self.client.get('/suspicious/')
self.assertEqual(response.status_code, 400)
def test_bad_request_in_view_returns_400(self):
response = self.client.get('/bad_request/')
self.assertEqual(response.status_code, 400)
def test_invalid_urls(self):
response = self.client.get('~%A9helloworld')
self.assertEqual(response.status_code, 404)
self.assertEqual(response.context['request_path'], '/~%25A9helloworld' if PY37 else '/%7E%25A9helloworld')
response = self.client.get('d%aao%aaw%aan%aal%aao%aaa%aad%aa/')
self.assertEqual(response.context['request_path'], '/d%25AAo%25AAw%25AAn%25AAl%25AAo%25AAa%25AAd%25AA')
response = self.client.get('/%E2%99%E2%99%A5/')
self.assertEqual(response.context['request_path'], '/%25E2%2599%E2%99%A5/')
response = self.client.get('/%E2%98%8E%E2%A9%E2%99%A5/')
self.assertEqual(response.context['request_path'], '/%E2%98%8E%25E2%25A9%E2%99%A5/')
def test_environ_path_info_type(self):
environ = self.request_factory.get('/%E2%A8%87%87%A5%E2%A8%A0').environ
self.assertIsInstance(environ['PATH_INFO'], str)
def test_handle_accepts_httpstatus_enum_value(self):
def start_response(status, headers):
start_response.status = status
environ = self.request_factory.get('/httpstatus_enum/').environ
WSGIHandler()(environ, start_response)
self.assertEqual(start_response.status, '200 OK')
@override_settings(MIDDLEWARE=['handlers.tests.empty_middleware'])
def test_middleware_returns_none(self):
msg = 'Middleware factory handlers.tests.empty_middleware returned None.'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
self.client.get('/')
def test_no_response(self):
msg = "The view %s didn't return an HttpResponse object. It returned None instead."
tests = (
('/no_response_fbv/', 'handlers.views.no_response'),
('/no_response_cbv/', 'handlers.views.NoResponse.__call__'),
)
for url, view in tests:
with self.subTest(url=url), self.assertRaisesMessage(ValueError, msg % view):
self.client.get(url)
class ScriptNameTests(SimpleTestCase):
def test_get_script_name(self):
# Regression test for #23173
# Test first without PATH_INFO
script_name = get_script_name({'SCRIPT_URL': '/foobar/'})
self.assertEqual(script_name, '/foobar/')
script_name = get_script_name({'SCRIPT_URL': '/foobar/', 'PATH_INFO': '/'})
self.assertEqual(script_name, '/foobar')
def test_get_script_name_double_slashes(self):
"""
WSGI squashes multiple successive slashes in PATH_INFO, get_script_name
should take that into account when forming SCRIPT_NAME (#17133).
"""
script_name = get_script_name({
'SCRIPT_URL': '/mst/milestones//accounts/login//help',
'PATH_INFO': '/milestones/accounts/login/help',
})
self.assertEqual(script_name, '/mst')
@override_settings(ROOT_URLCONF='handlers.urls')
class AsyncHandlerRequestTests(SimpleTestCase):
"""Async variants of the normal handler request tests."""
async def test_sync_view(self):
"""Calling a sync view down the asynchronous path."""
response = await self.async_client.get('/regular/')
self.assertEqual(response.status_code, 200)
async def test_async_view(self):
"""Calling an async view down the asynchronous path."""
response = await self.async_client.get('/async_regular/')
self.assertEqual(response.status_code, 200)
async def test_suspiciousop_in_view_returns_400(self):
response = await self.async_client.get('/suspicious/')
self.assertEqual(response.status_code, 400)
async def test_bad_request_in_view_returns_400(self):
response = await self.async_client.get('/bad_request/')
self.assertEqual(response.status_code, 400)
async def test_no_response(self):
msg = (
"The view handlers.views.no_response didn't return an "
"HttpResponse object. It returned None instead."
)
with self.assertRaisesMessage(ValueError, msg):
await self.async_client.get('/no_response_fbv/')
async def test_unawaited_response(self):
msg = (
"The view handlers.views.CoroutineClearingView.__call__ didn't"
" return an HttpResponse object. It returned an unawaited"
" coroutine instead. You may need to add an 'await'"
" into your view."
)
with self.assertRaisesMessage(ValueError, msg):
await self.async_client.get('/unawaited/')
|
"""
common handler,webhandler,apihandler
要获得torngas的中间件等特性需继承这些handler
"""
import json
import tornado.locale
from tornado.web import RequestHandler
from settings_manager import settings
from mixins.exception import UncaughtExceptionMixin
from exception import HttpBadRequestError, Http404
from torngas.cache import close_caches
class _HandlerPatch(RequestHandler):
def get_user_locale(self):
if settings.TRANSLATIONS_CONF.use_accept_language:
return None
return tornado.locale.get(settings.TRANSLATIONS_CONF.locale_default)
def on_finish(self):
try:
close_caches()
except:
pass
class WebHandler(UncaughtExceptionMixin, _HandlerPatch):
def create_template_loader(self, template_path):
loader = self.application.tmpl
if loader is None:
return super(WebHandler, self).create_template_loader(template_path)
else:
return loader(template_path)
class ApiHandler(UncaughtExceptionMixin, _HandlerPatch):
def get_format(self, params_name="format"):
format = self.get_argument(params_name, None)
if not format:
accept = self.request.headers.get('Accept')
if accept:
if 'javascript' in accept.lower():
format = 'jsonp'
else:
format = 'json'
else:
format = format.lower()
return format or 'json'
def write_api(self, obj=None, nofail=False, ensure_ascii=True, fmt=None):
if not obj:
obj = {}
if not fmt:
fmt = self.get_format()
if fmt == 'json':
self.set_header("Content-Type", "application/json; charset=UTF-8")
self.write(json.dumps(obj, ensure_ascii=ensure_ascii))
elif fmt == 'jsonp':
self.set_header("Content-Type", "application/javascript")
callback = self.get_argument('callback', 'callback')
self.write('%s(%s);' % (callback, json.dumps(obj, ensure_ascii=ensure_ascii)))
elif nofail:
self.write(obj)
else:
raise HttpBadRequestError('Unknown response format requested: %s' % format)
class ErrorHandler(UncaughtExceptionMixin, _HandlerPatch):
def initialize(self, *args, **kwargs):
pass
def prepare(self):
super(ErrorHandler, self).prepare()
raise Http404()
if settings.MIDDLEWARE_CLASSES:
from mixins.miiddleware import MiddlewareHandlerMixin
WebHandler.__bases__ = (MiddlewareHandlerMixin,) + WebHandler.__bases__
ApiHandler.__bases__ = (MiddlewareHandlerMixin,) + ApiHandler.__bases__
|
import csv
import jinja2
from collections import namedtuple
Row = namedtuple('Row', ('id', 'title', 'body'))
with open('submissions.csv') as f:
f.readline()
reader = csv.reader(f)
rows = [Row(row[0], row[1].decode('utf-8'), row[2].decode('utf-8')) for row in reader]
template = jinja2.Template(open('submissions.html').read())
rendered = template.render(abstracts=rows)
with open('submissions_rendered.html', 'w') as f:
f.write(rendered.encode('utf-8'))
|
import cgi
import codecs
import logging
import sys
import tempfile
import traceback
import warnings
from asgiref.sync import async_to_sync, sync_to_async
from django import http
from django.conf import settings
from django.core import signals
from django.core.exceptions import RequestDataTooBig
from django.core.handlers import base
from django.http import FileResponse, HttpResponse, HttpResponseServerError
from django.urls import set_script_prefix
from django.utils.functional import cached_property
from channels.exceptions import RequestAborted, RequestTimeout
logger = logging.getLogger("django.request")
class AsgiRequest(http.HttpRequest):
"""
Custom request subclass that decodes from an ASGI-standard request
dict, and wraps request body handling.
"""
# Number of seconds until a Request gives up on trying to read a request
# body and aborts.
body_receive_timeout = 60
def __init__(self, scope, stream):
self.scope = scope
self._content_length = 0
self._post_parse_error = False
self._read_started = False
self.resolver_match = None
self.script_name = self.scope.get("root_path", "")
if self.script_name and scope["path"].startswith(self.script_name):
# TODO: Better is-prefix checking, slash handling?
self.path_info = scope["path"][len(self.script_name) :]
else:
self.path_info = scope["path"]
# django path is different from asgi scope path args, it should combine
# with script name
if self.script_name:
self.path = "%s/%s" % (
self.script_name.rstrip("/"),
self.path_info.replace("/", "", 1),
)
else:
self.path = scope["path"]
# HTTP basics
self.method = self.scope["method"].upper()
# fix https://github.com/django/channels/issues/622
query_string = self.scope.get("query_string", "")
if isinstance(query_string, bytes):
query_string = query_string.decode("utf-8")
self.META = {
"REQUEST_METHOD": self.method,
"QUERY_STRING": query_string,
"SCRIPT_NAME": self.script_name,
"PATH_INFO": self.path_info,
# Old code will need these for a while
"wsgi.multithread": True,
"wsgi.multiprocess": True,
}
if self.scope.get("client", None):
self.META["REMOTE_ADDR"] = self.scope["client"][0]
self.META["REMOTE_HOST"] = self.META["REMOTE_ADDR"]
self.META["REMOTE_PORT"] = self.scope["client"][1]
if self.scope.get("server", None):
self.META["SERVER_NAME"] = self.scope["server"][0]
self.META["SERVER_PORT"] = str(self.scope["server"][1])
else:
self.META["SERVER_NAME"] = "unknown"
self.META["SERVER_PORT"] = "0"
# Handle old style-headers for a transition period
if "headers" in self.scope and isinstance(self.scope["headers"], dict):
self.scope["headers"] = [
(x.encode("latin1"), y) for x, y in self.scope["headers"].items()
]
# Headers go into META
for name, value in self.scope.get("headers", []):
name = name.decode("latin1")
if name == "content-length":
corrected_name = "CONTENT_LENGTH"
elif name == "content-type":
corrected_name = "CONTENT_TYPE"
else:
corrected_name = "HTTP_%s" % name.upper().replace("-", "_")
# HTTPbis say only ASCII chars are allowed in headers, but we
# use latin1 just in case
value = value.decode("latin1")
if corrected_name in self.META:
value = self.META[corrected_name] + "," + value
self.META[corrected_name] = value
# Pull out request encoding if we find it
if "CONTENT_TYPE" in self.META:
self.content_type, self.content_params = cgi.parse_header(
self.META["CONTENT_TYPE"]
)
if "charset" in self.content_params:
try:
codecs.lookup(self.content_params["charset"])
except LookupError:
pass
else:
self.encoding = self.content_params["charset"]
else:
self.content_type, self.content_params = "", {}
# Pull out content length info
if self.META.get("CONTENT_LENGTH", None):
try:
self._content_length = int(self.META["CONTENT_LENGTH"])
except (ValueError, TypeError):
pass
# Body handling
self._stream = stream
# Other bits
self.resolver_match = None
@cached_property
def GET(self):
return http.QueryDict(self.scope.get("query_string", ""))
def _get_scheme(self):
return self.scope.get("scheme", "http")
def _get_post(self):
if not hasattr(self, "_post"):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
def _get_files(self):
if not hasattr(self, "_files"):
self._load_post_and_files()
return self._files
POST = property(_get_post, _set_post)
FILES = property(_get_files)
@cached_property
def COOKIES(self):
return http.parse_cookie(self.META.get("HTTP_COOKIE", ""))
class AsgiHandler(base.BaseHandler):
"""
Handler for ASGI requests for the view system only (it will have got here
after traversing the dispatch-by-channel-name system, which decides it's
a HTTP request)
You can also manually construct it with a get_response callback if you
want to run a single Django view yourself. If you do this, though, it will
not do any URL routing or middleware (Channels uses it for staticfiles'
serving code)
"""
request_class = AsgiRequest
# Size to chunk response bodies into for multiple response messages
chunk_size = 512 * 1024
def __init__(self):
msg = (
"Channel's inbuilt http protocol AsgiHandler is deprecated. "
"Use Django's get_asgi_application() instead."
)
warnings.warn(msg, DeprecationWarning, stacklevel=2)
super(AsgiHandler, self).__init__()
self.load_middleware()
async def __call__(self, scope, receive, send):
"""
Async entrypoint - uses the sync_to_async wrapper to run things in a
threadpool.
"""
if scope["type"] != "http":
raise ValueError(
"The AsgiHandler can only handle HTTP connections, not %s"
% scope["type"]
)
# Receive the HTTP request body as a stream object.
try:
body_stream = await self.read_body(receive)
except RequestAborted:
return
# Launch into body handling (and a synchronous subthread).
await self.handle(scope, async_to_sync(send), body_stream)
async def read_body(self, receive):
"""Reads a HTTP body from an ASGI connection."""
# Use the tempfile that auto rolls-over to a disk file as it fills up.
body_file = tempfile.SpooledTemporaryFile(
max_size=settings.FILE_UPLOAD_MAX_MEMORY_SIZE, mode="w+b"
)
while True:
message = await receive()
if message["type"] == "http.disconnect":
# Early client disconnect.
raise RequestAborted()
# Add a body chunk from the message, if provided.
if "body" in message:
body_file.write(message["body"])
# Quit out if that's the end.
if not message.get("more_body", False):
break
body_file.seek(0)
return body_file
@sync_to_async
def handle(self, scope, send, body):
"""
Synchronous message processing.
"""
# Set script prefix from message root_path, turning None into empty string
script_prefix = scope.get("root_path", "") or ""
if settings.FORCE_SCRIPT_NAME:
script_prefix = settings.FORCE_SCRIPT_NAME
set_script_prefix(script_prefix)
signals.request_started.send(sender=self.__class__, scope=scope)
# Run request through view system
try:
request = self.request_class(scope, body)
except UnicodeDecodeError:
logger.warning(
"Bad Request (UnicodeDecodeError)",
exc_info=sys.exc_info(),
extra={"status_code": 400},
)
response = http.HttpResponseBadRequest()
except RequestTimeout:
# Parsing the request failed, so the response is a Request Timeout error
response = HttpResponse("408 Request Timeout (upload too slow)", status=408)
except RequestAborted:
# Client closed connection on us mid request. Abort!
return
except RequestDataTooBig:
response = HttpResponse("413 Payload too large", status=413)
else:
response = self.get_response(request)
# Fix chunk size on file responses
if isinstance(response, FileResponse):
response.block_size = 1024 * 512
# Transform response into messages, which we yield back to caller
for response_message in self.encode_response(response):
send(response_message)
# Close the response now we're done with it
response.close()
def handle_uncaught_exception(self, request, resolver, exc_info):
"""
Last-chance handler for exceptions.
"""
# There's no WSGI server to catch the exception further up if this fails,
# so translate it into a plain text response.
try:
return super(AsgiHandler, self).handle_uncaught_exception(
request, resolver, exc_info
)
except Exception:
return HttpResponseServerError(
traceback.format_exc() if settings.DEBUG else "Internal Server Error",
content_type="text/plain",
)
def load_middleware(self):
"""
Loads the Django middleware chain and caches it on the class.
"""
# Because we create an AsgiHandler on every HTTP request
# we need to preserve the Django middleware chain once we load it.
if (
hasattr(self.__class__, "_middleware_chain")
and self.__class__._middleware_chain
):
self._middleware_chain = self.__class__._middleware_chain
self._view_middleware = self.__class__._view_middleware
self._template_response_middleware = (
self.__class__._template_response_middleware
)
self._exception_middleware = self.__class__._exception_middleware
else:
super(AsgiHandler, self).load_middleware()
self.__class__._middleware_chain = self._middleware_chain
self.__class__._view_middleware = self._view_middleware
self.__class__._template_response_middleware = (
self._template_response_middleware
)
self.__class__._exception_middleware = self._exception_middleware
@classmethod
def encode_response(cls, response):
"""
Encodes a Django HTTP response into ASGI http.response message(s).
"""
# Collect cookies into headers.
# Note that we have to preserve header case as there are some non-RFC
# compliant clients that want things like Content-Type correct. Ugh.
response_headers = []
for header, value in response.items():
if isinstance(header, str):
header = header.encode("latin")
if isinstance(value, str):
value = value.encode("latin1")
response_headers.append((bytes(header), bytes(value)))
for c in response.cookies.values():
response_headers.append(
(b"Set-Cookie", c.output(header="").encode("latin1").strip())
)
# Make initial response message
yield {
"type": "http.response.start",
"status": response.status_code,
"headers": response_headers,
}
# Streaming responses need to be pinned to their iterator
if response.streaming:
# Access `__iter__` and not `streaming_content` directly in case
# it has been overridden in a subclass.
for part in response:
for chunk, _ in cls.chunk_bytes(part):
yield {
"type": "http.response.body",
"body": chunk,
# We ignore "more" as there may be more parts; instead,
# we use an empty final closing message with False.
"more_body": True,
}
# Final closing message
yield {"type": "http.response.body"}
# Other responses just need chunking
else:
# Yield chunks of response
for chunk, last in cls.chunk_bytes(response.content):
yield {
"type": "http.response.body",
"body": chunk,
"more_body": not last,
}
@classmethod
def chunk_bytes(cls, data):
"""
Chunks some data up so it can be sent in reasonable size messages.
Yields (chunk, last_chunk) tuples.
"""
position = 0
if not data:
yield data, True
return
while position < len(data):
yield (
data[position : position + cls.chunk_size],
(position + cls.chunk_size) >= len(data),
)
position += cls.chunk_size
|
import datetime
import pytz
from unittest import TestCase
from mock import patch, Mock, call
from purchasing.jobs.job_base import JobBase, EmailJobBase
from purchasing_test.factories import JobStatusFactory
import logging
logging.getLogger("factory").setLevel(logging.WARN)
class FakeJobBase(JobBase):
jobs = []
@property
def start_time(self):
return None
@property
def job_status_model(self):
return JobStatusFactory
@FakeJobBase.register
class FakeJob(FakeJobBase):
pass
class PastJob(FakeJobBase):
@property
def start_time(self):
return (datetime.datetime.utcnow() - datetime.timedelta(minutes=1)).replace(tzinfo=pytz.UTC)
class FutureJob(FakeJobBase):
@property
def start_time(self):
return (datetime.datetime.utcnow() + datetime.timedelta(minutes=1)).replace(tzinfo=pytz.UTC)
class FakeEmailJob(EmailJobBase):
@property
def job_status_model(self):
return JobStatusFactory
class TestJobBase(TestCase):
def test_register_job(self):
self.assertEquals(len(FakeJobBase.jobs), 1)
self.assertTrue(FakeJob in FakeJobBase.jobs)
@patch('purchasing.jobs.job_base.get_or_create', return_value=[JobStatusFactory.build(), True])
def test_schedule_timer_no_time(self, get_or_create):
FakeJob().schedule_job()
self.assertTrue(get_or_create.called)
@patch('purchasing.jobs.job_base.get_or_create', return_value=[JobStatusFactory.build(), True])
def test_schedule_timer_past_job(self, get_or_create):
PastJob().schedule_job()
self.assertTrue(get_or_create.called)
@patch('purchasing.jobs.job_base.get_or_create', return_value=[JobStatusFactory.build(), True])
def test_schedule_timer_future_job(self, get_or_create):
FutureJob().schedule_job()
self.assertFalse(get_or_create.called)
class TestEmailJobBase(TestCase):
def setUp(self):
job_mock = Mock()
job_mock.update = Mock()
self.job = job_mock
notification_mock = Mock()
notification_mock.send = Mock()
notification_fail = Mock()
notification_fail.send = Mock(side_effect=Exception('something went wrong!'))
self.success_notification = notification_mock
self.failure_notification = notification_fail
def tearDown(self):
self.job.reset_mock()
def test_all_successful(self):
send_mock = Mock()
send_mock.return_value = [self.success_notification, self.success_notification]
FakeEmailJob.build_notifications = send_mock
expected_updates = [call.update(status='started'), call.update(status='success')]
FakeEmailJob().run_job(self.job)
self.assertEquals(self.job.mock_calls, expected_updates)
def test_some_failures(self):
send_mock = Mock()
send_mock.return_value = [self.success_notification, self.failure_notification]
FakeEmailJob.build_notifications = send_mock
expected_updates = [
call.update(status='started'),
call.update(status='failed', info='something went wrong!')
]
FakeEmailJob().run_job(self.job)
self.assertEquals(self.job.mock_calls, expected_updates)
|
import wx
import wx.html
from ..utils.generic_class import GenericClass
from ..utils.constants import control, dtype
from ..utils.validator import CharValidator
import pkg_resources as p
class GroupAnalysis(wx.html.HtmlWindow):
def __init__(self, parent, counter = 0):
from urllib2 import urlopen
wx.html.HtmlWindow.__init__(self, parent, style= wx.html.HW_SCROLLBAR_AUTO)
self.SetStandardFonts()
self.counter = counter
self.LoadFile(p.resource_filename('CPAC', 'GUI/resources/html/fsl_ga.html'))
def get_counter(self):
return self.counter
class GPASettings(wx.ScrolledWindow):
def __init__(self, parent, counter = 0):
wx.ScrolledWindow.__init__(self, parent)
self.counter = counter
self.page = GenericClass(self, "FSL/FEAT Group Analysis Options")
self.page.add(label="Number of Models to Run Simultaneously ",
control=control.INT_CTRL,
name='numGPAModelsAtOnce',
type=dtype.NUM,
comment="This number depends on computing resources.",
values=1)
self.page.add(label = "Models to Run ",
control = control.LISTBOX_COMBO,
name = 'modelConfigs',
type = dtype.LSTR,
values = "",
comment="Use the + to add FSL model configuration to be run.",
size = (400,100),
combo_type = 3)
self.page.set_sizer()
parent.get_page_list().append(self)
def get_counter(self):
return self.counter
class BASC(wx.html.HtmlWindow):
def __init__(self, parent, counter = 0):
from urllib2 import urlopen
wx.html.HtmlWindow.__init__(self, parent, style= wx.html.HW_SCROLLBAR_AUTO)
self.SetStandardFonts()
self.counter = counter
self.LoadFile(p.resource_filename('CPAC', 'GUI/resources/html/basc.html'))
def get_counter(self):
return self.counter
class BASCSettings(wx.ScrolledWindow):
def __init__(self, parent, counter = 0):
wx.ScrolledWindow.__init__(self, parent)
self.counter = counter
self.page = GenericClass(self, "Bootstrap Analysis of Stable Clusters (BASC)")
self.page.add(label="Run BASC ",
control=control.CHOICE_BOX,
name='runBASC',
type=dtype.LSTR,
comment="Run Bootstrap Analysis of Stable Clusters",
values=["Off","On"],
wkf_switch = True)
self.page.add(label="Mask File ",
control=control.COMBO_BOX,
name='bascROIFile',
type=dtype.STR,
values = "None",
comment="Full path to a mask file to be used when running BASC. Voxels outside this mask will be excluded from analysis.\n\nIf you do not wish to use a mask, set this field to None.\n\nNote: BASC is very computationally intensive, we strongly recommend you limit your analysis to specific brain areas of interest.")
self.page.add(label= "Number of Time Series Bootstraps ",
control=control.INT_CTRL,
name='bascTimeseriesBootstraps',
type=dtype.NUM,
comment="Number of bootstraps to apply to individual time series.",
values=100)
self.page.add(label= "Number of Dataset Bootstraps ",
control=control.INT_CTRL,
name='bascDatasetBootstraps',
type=dtype.NUM,
comment="Number of bootstraps to apply to the original dataset.",
values=100)
self.page.add(label="Correlation Threshold File ",
control=control.COMBO_BOX,
name='bascAffinityThresholdFile',
type=dtype.STR,
values = "",
comment="Path to a text file containing correlation threshold for each subject. These thresholds will be applied to the correlation matrix before clustering.\n\nThis file should contain one value per line, with each line corresponding to the subject on the same line in the group analysis subject list file.\n\nIn most cases, the same threshold can be used for all subjects. Different thresholds are useful when subjects have time series of different lengths.")
self.page.add(label= "Number of Clusters ",
control=control.INT_CTRL,
name='bascClusters',
type=dtype.NUM,
comment="Number of clusters to create during clustering at both the individual and group levels.",
values=6)
self.page.set_sizer()
parent.get_page_list().append(self)
def get_counter(self):
return self.counter
class CWAS(wx.html.HtmlWindow):
def __init__(self, parent, counter = 0):
from urllib2 import urlopen
wx.html.HtmlWindow.__init__(self, parent, style= wx.html.HW_SCROLLBAR_AUTO)
self.SetStandardFonts()
self.counter = counter
self.LoadFile(p.resource_filename('CPAC', 'GUI/resources/html/cwas.html'))
def get_counter(self):
return self.counter
class CWASSettings(wx.ScrolledWindow):
def __init__(self, parent, counter = 0):
wx.ScrolledWindow.__init__(self, parent)
self.counter = counter
self.page = GenericClass(self, "Connectome-wide Association Studies (CWAS)")
self.page.add(label="Run CWAS ",
control=control.CHOICE_BOX,
name='runCWAS',
type=dtype.LSTR,
comment="Run CWAS",
values=["Off","On"],
wkf_switch = True)
self.page.add(label="CWAS ROI File ",
control=control.COMBO_BOX,
name='cwasROIFile',
type=dtype.STR,
values = "None",
comment="Path to a mask file. Voxels outside this mask will be excluded from CWAS.")
self.page.add(label="CWAS Regressor File ",
control=control.COMBO_BOX,
name='cwasRegressorFile',
type=dtype.STR,
values= "None",
comment = "Path to a text file containing phenotypic regressor.")
self.page.add(label= "CWAS FSamples ",
control=control.INT_CTRL,
name='cwasFSamples',
type=dtype.NUM,
comment="Number of permutation tests to run on the Psuedo-F statistic.",
values=5000)
self.page.add(label= "CWAS Parallel Nodes ",
control=control.INT_CTRL,
name='cwasParallelNodes',
type=dtype.NUM,
comment="Number of NiPype nodes to be created while computing CWAS.\n"\
"This number depends on computing resources.",
values=10)
self.page.add(label= "Column Number with Regressor of Interest ",
control=control.TEXT_BOX,
name='cwasRegressorCols',
type=dtype.LNUM,
values = "",
size = (300,-1),
validator = CharValidator("no-alpha"),
comment="Column Number with Regressor of Interest.\n\nRemember this is 0 indexed so the 1st column is 0.\n\n"\
"For instance, assuming the 1st column is the intercept, column number with regressor of interest = 1")
self.page.add(label= "CWAS Regressor Strata ",
control=control.TEXT_BOX,
name='cwasRegressorStrata',
type=dtype.STR,
values = "None",
size = (300,-1),
comment="A list with length equal to the total number of rows in your regressor file.\n"\
"Each element of the list, indicates that elements group. Leave it as None.\n"\
"if you have a between-subject design and give it a value if not.\n"\
"For instance, if you have multiple scans per subject, then you would want to\n"\
"do a permutation within-subject between scans. For this to occur, the list\n"\
"below could be something like ['s1', 's1', 's2', 's2', 's3', 's3', ...], \n"\
"indicating what subject each element/scan is associated with and permutations"\
"would only be done between scans within each subject.")
self.page.set_sizer()
parent.get_page_list().append(self)
def get_counter(self):
return self.counter
|
"""Usage: <win-path-to-pdb.pdb>
This tool will take a PDB on the command line, extract the source files that
were used in building the PDB, query SVN for which repository and revision
these files are at, and then finally write this information back into the PDB
in a format that the debugging tools understand. This allows for automatic
source debugging, as all of the information is contained in the PDB, and the
debugger can go out and fetch the source files via SVN.
You most likely want to run these immediately after a build, since the source
input files need to match the generated PDB, and we want the correct SVN
revision information for the exact files that were used for the build.
The following files from a windbg + source server installation are expected
to reside in the same directory as this python script:
dbghelp.dll
pdbstr.exe
srctool.exe
NOTE: Expected to run under a native win32 python, NOT cygwin. All paths are
dealt with as win32 paths, since we have to interact with the Microsoft tools.
"""
import sys
import os
import time
import subprocess
import tempfile
REPO_MAP = {
"svn://chrome-svn/chrome": "http://src.chromium.org/svn",
"svn://chrome-svn.corp.google.com/chrome": "http://src.chromium.org/svn",
"http://v8.googlecode.com/svn": None,
"http://google-breakpad.googlecode.com/svn": None,
"http://googletest.googlecode.com/svn": None,
"http://open-vcdiff.googlecode.com/svn": None,
"http://google-url.googlecode.com/svn": None,
}
def FindFile(filename):
"""Return the full windows path to a file in the same dir as this code."""
thisdir = os.path.dirname(os.path.join(os.path.curdir, __file__))
return os.path.abspath(os.path.join(thisdir, filename))
def ExtractSourceFiles(pdb_filename):
"""Extract a list of local paths of the source files from a PDB."""
srctool = subprocess.Popen([FindFile('srctool.exe'), '-r', pdb_filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
filelist = srctool.stdout.read()
res = srctool.wait()
if res != 0 or filelist.startswith("srctool: "):
raise "srctool failed: " + filelist
return [x for x in filelist.split('\r\n') if len(x) != 0]
def ReadSourceStream(pdb_filename):
"""Read the contents of the source information stream from a PDB."""
srctool = subprocess.Popen([FindFile('pdbstr.exe'),
'-r', '-s:srcsrv',
'-p:%s' % pdb_filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
data = srctool.stdout.read()
res = srctool.wait()
if (res != 0 and res != -1) or data.startswith("pdbstr: "):
raise "pdbstr failed: " + data
return data
def WriteSourceStream(pdb_filename, data):
"""Write the contents of the source information stream to a PDB."""
# Write out the data to a temporary filename that we can pass to pdbstr.
(f, fname) = tempfile.mkstemp()
f = os.fdopen(f, "wb")
f.write(data)
f.close()
srctool = subprocess.Popen([FindFile('pdbstr.exe'),
'-w', '-s:srcsrv',
'-i:%s' % fname,
'-p:%s' % pdb_filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
data = srctool.stdout.read()
res = srctool.wait()
if (res != 0 and res != -1) or data.startswith("pdbstr: "):
raise "pdbstr failed: " + data
os.unlink(fname)
def ExtractSvnInfo(local_filename):
"""Calls svn info to extract the repository, path, and revision."""
# We call svn.bat to make sure and get the depot tools SVN and not cygwin.
srctool = subprocess.Popen(['svn.bat', 'info', local_filename],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
info = srctool.stdout.read()
res = srctool.wait()
if res != 0:
return None
# Hack up into a dictionary of the fields printed by svn info.
vals = dict((y.split(': ', 2) for y in info.split('\r\n') if y))
root = vals['Repository Root']
if not vals['URL'].startswith(root):
raise "URL is not inside of the repository root?!?"
path = vals['URL'][len(root):]
rev = int(vals['Revision'])
return [root, path, rev]
def UpdatePDB(pdb_filename):
"""Update a pdb file with source information."""
dir_blacklist = { }
# TODO(deanm) look into "compressing" our output, by making use of vars
# and other things, so we don't need to duplicate the repo path and revs.
lines = [
'SRCSRV: ini ------------------------------------------------',
'VERSION=1',
'INDEXVERSION=2',
'VERCTRL=Subversion',
'DATETIME=%s' % time.asctime(),
'SRCSRV: variables ------------------------------------------',
'SVN_EXTRACT_TARGET=%targ%\%fnbksl%(%var3%)\%var4%\%fnfile%(%var1%)',
'SVN_EXTRACT_CMD=cmd /c svn cat "%var2%%var3%@%var4%" --non-interactive > "%svn_extract_target%"',
'SRCSRVTRG=%SVN_extract_target%',
'SRCSRVCMD=%SVN_extract_cmd%',
'SRCSRV: source files ---------------------------------------',
]
if ReadSourceStream(pdb_filename):
raise "PDB already has source indexing information!"
filelist = ExtractSourceFiles(pdb_filename)
for filename in filelist:
filedir = os.path.dirname(filename)
print "Processing: %s" % filename
# This directory is blacklisted, either because it's not part of the SVN
# repository, or from one we're not interested in indexing.
if dir_blacklist.get(filedir, False):
print " skipping, directory is blacklisted."
continue
info = ExtractSvnInfo(filename)
# Skip the file if it's not under an svn repository. To avoid constantly
# querying SVN for files outside of SVN control (for example, the CRT
# sources), check if the directory is outside of SVN and blacklist it.
if not info:
if not ExtractSvnInfo(filedir):
dir_blacklist[filedir] = True
print " skipping, file is not in an SVN repository"
continue
root = info[0]
path = info[1]
rev = info[2]
# Check if file was from a svn repository we don't know about, or don't
# want to index. Blacklist the entire directory.
if not REPO_MAP.has_key(info[0]):
print " skipping, file is from an unknown SVN repository %s" % root
dir_blacklist[filedir] = True
continue
# We might want to map an internal repository URL to an external repository.
if REPO_MAP[root]:
root = REPO_MAP[root]
lines.append('%s*%s*%s*%s' % (filename, root, path, rev))
print " indexed file."
lines.append('SRCSRV: end ------------------------------------------------')
WriteSourceStream(pdb_filename, '\r\n'.join(lines))
if __name__ == '__main__':
if len(sys.argv) != 2:
print "usage: file.pdb"
sys.exit(1)
UpdatePDB(sys.argv[1])
|
"""Truncated SVD for sparse matrices, aka latent semantic analysis (LSA).
"""
import numpy as np
import scipy.sparse as sp
try:
from scipy.sparse.linalg import svds
except ImportError:
from ..utils.arpack import svds
from ..base import BaseEstimator, TransformerMixin
from ..utils import check_array, as_float_array, check_random_state
from ..utils.extmath import randomized_svd, safe_sparse_dot, svd_flip
from ..utils.sparsefuncs import mean_variance_axis0
__all__ = ["TruncatedSVD"]
class TruncatedSVD(BaseEstimator, TransformerMixin):
"""Dimensionality reduction using truncated SVD (aka LSA).
This transformer performs linear dimensionality reduction by means of
truncated singular value decomposition (SVD). It is very similar to PCA,
but operates on sample vectors directly, instead of on a covariance matrix.
This means it can work with scipy.sparse matrices efficiently.
In particular, truncated SVD works on term count/tf-idf matrices as
returned by the vectorizers in sklearn.feature_extraction.text. In that
context, it is known as latent semantic analysis (LSA).
This estimator supports two algorithm: a fast randomized SVD solver, and
a "naive" algorithm that uses ARPACK as an eigensolver on (X * X.T) or
(X.T * X), whichever is more efficient.
Parameters
----------
n_components : int, default = 2
Desired dimensionality of output data.
Must be strictly less than the number of features.
The default value is useful for visualisation. For LSA, a value of
100 is recommended.
algorithm : string, default = "randomized"
SVD solver to use. Either "arpack" for the ARPACK wrapper in SciPy
(scipy.sparse.linalg.svds), or "randomized" for the randomized
algorithm due to Halko (2009).
n_iter : int, optional
Number of iterations for randomized SVD solver. Not used by ARPACK.
random_state : int or RandomState, optional
(Seed for) pseudo-random number generator. If not given, the
numpy.random singleton is used.
tol : float, optional
Tolerance for ARPACK. 0 means machine precision. Ignored by randomized
SVD solver.
Attributes
----------
components_ : array, shape (n_components, n_features)
explained_variance_ratio_ : array, [n_components]
Percentage of variance explained by each of the selected components.
explained_variance_ : array, [n_components]
The variance of the training samples transformed by a projection to
each component.
Examples
--------
>>> from sklearn.decomposition import TruncatedSVD
>>> from sklearn.random_projection import sparse_random_matrix
>>> X = sparse_random_matrix(100, 100, density=0.01, random_state=42)
>>> svd = TruncatedSVD(n_components=5, random_state=42)
>>> svd.fit(X) # doctest: +NORMALIZE_WHITESPACE
TruncatedSVD(algorithm='randomized', n_components=5, n_iter=5,
random_state=42, tol=0.0)
>>> print(svd.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.07825... 0.05528... 0.05445... 0.04997... 0.04134...]
>>> print(svd.explained_variance_ratio_.sum()) # doctest: +ELLIPSIS
0.27930...
See also
--------
PCA
RandomizedPCA
References
----------
Finding structure with randomness: Stochastic algorithms for constructing
approximate matrix decompositions
Halko, et al., 2009 (arXiv:909) http://arxiv.org/pdf/0909.4061
Notes
-----
SVD suffers from a problem called "sign indeterminancy", which means the
sign of the ``components_`` and the output from transform depend on the
algorithm and random state. To work around this, fit instances of this
class to data once, then keep the instance around to do transformations.
"""
def __init__(self, n_components=2, algorithm="randomized", n_iter=5,
random_state=None, tol=0.):
self.algorithm = algorithm
self.n_components = n_components
self.n_iter = n_iter
self.random_state = random_state
self.tol = tol
def fit(self, X, y=None):
"""Fit LSI model on training data X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data.
Returns
-------
self : object
Returns the transformer object.
"""
self.fit_transform(X)
return self
def fit_transform(self, X, y=None):
"""Fit LSI model to X and perform dimensionality reduction on X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data.
Returns
-------
X_new : array, shape (n_samples, n_components)
Reduced version of X. This will always be a dense array.
"""
X = as_float_array(X, copy=False)
random_state = check_random_state(self.random_state)
# If sparse and not csr or csc, convert to csr
if sp.issparse(X) and X.getformat() not in ["csr", "csc"]:
X = X.tocsr()
if self.algorithm == "arpack":
U, Sigma, VT = svds(X, k=self.n_components, tol=self.tol)
# svds doesn't abide by scipy.linalg.svd/randomized_svd
# conventions, so reverse its outputs.
Sigma = Sigma[::-1]
U, VT = svd_flip(U[:, ::-1], VT[::-1])
elif self.algorithm == "randomized":
k = self.n_components
n_features = X.shape[1]
if k >= n_features:
raise ValueError("n_components must be < n_features;"
" got %d >= %d" % (k, n_features))
U, Sigma, VT = randomized_svd(X, self.n_components,
n_iter=self.n_iter,
random_state=random_state)
else:
raise ValueError("unknown algorithm %r" % self.algorithm)
self.components_ = VT
# Calculate explained variance & explained variance ratio
X_transformed = np.dot(U, np.diag(Sigma))
self.explained_variance_ = exp_var = np.var(X_transformed, axis=0)
if sp.issparse(X):
_, full_var = mean_variance_axis0(X)
full_var = full_var.sum()
else:
full_var = np.var(X, axis=0).sum()
self.explained_variance_ratio_ = exp_var / full_var
return X_transformed
def transform(self, X):
"""Perform dimensionality reduction on X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
New data.
Returns
-------
X_new : array, shape (n_samples, n_components)
Reduced version of X. This will always be a dense array.
"""
X = check_array(X, accept_sparse='csr')
return safe_sparse_dot(X, self.components_.T)
def inverse_transform(self, X):
"""Transform X back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data.
Returns
-------
X_original : array, shape (n_samples, n_features)
Note that this is always a dense array.
"""
X = check_array(X)
return np.dot(X, self.components_)
|
import subprocess
import sys
import threading
import time
bundle_id = sys.argv[1]
args = sys.argv[2:]
logp = subprocess.Popen(['idevicesyslog'], stdout=subprocess.PIPE, bufsize=-1)
log = ''
def collect_log():
global log
while True:
out = logp.stdout.read()
if out:
log = log + out
else:
return
logt = threading.Thread(target=collect_log)
logt.start()
rv = subprocess.call(['idevicedebug', '--debug', 'run', bundle_id] + args)
print('\n\nreturned %d' % rv)
logp.terminate()
print('\n\nreading syslog...')
logt.join()
print('syslog follows')
print(log)
exit(rv)
|
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from __future__ import print_function, division
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef TRUTHCOIN_CHAINPARAMSSEEDS_H\n')
g.write('#define TRUTHCOIN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the truthcoin network\n')
g.write(' * AUTOGENERATED by share/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 59533)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 64533)
g.write('#endif // TRUTHCOIN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
from __future__ import division, print_function, absolute_import
import petl as etl
table1 = [['foo', 'bar', 'baz'],
[1, 'a', None],
[1, None, .23],
[1, 'b', None],
[2, None, None],
[2, None, .56],
[2, 'c', None],
[None, 'c', .72]]
table2 = etl.filldown(table1)
table2.lookall()
table3 = etl.filldown(table1, 'bar')
table3.lookall()
table4 = etl.filldown(table1, 'bar', 'baz')
table4.lookall()
import petl as etl
table1 = [['foo', 'bar', 'baz'],
[1, 'a', None],
[1, None, .23],
[1, 'b', None],
[2, None, None],
[2, None, .56],
[2, 'c', None],
[None, 'c', .72]]
table2 = etl.fillright(table1)
table2.lookall()
import petl as etl
table1 = [['foo', 'bar', 'baz'],
[1, 'a', None],
[1, None, .23],
[1, 'b', None],
[2, None, None],
[2, None, .56],
[2, 'c', None],
[None, 'c', .72]]
table2 = etl.fillleft(table1)
table2.lookall()
|
"""
homeassistant.components.switch.hikvision
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support turning on/off motion detection on Hikvision cameras.
Note: Currently works using default https port only.
CGI API Guide: http://bit.ly/1RuyUuF
Configuration:
To use the Hikvision motion detection switch you will need to add something
like the following to your config/configuration.yaml
switch:
platform: hikvisioncam
name: Hikvision Cam 1 Motion Detection
host: 192.168.1.32
username: YOUR_USERNAME
password: YOUR_PASSWORD
Variables:
host
*Required
This is the IP address of your Hikvision camera. Example: 192.168.1.32
username
*Required
Your Hikvision camera username.
password
*Required
Your Hikvision camera username.
name
*Optional
The name to use when displaying this switch instance.
"""
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import STATE_ON, STATE_OFF
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
import logging
try:
import hikvision.api
from hikvision.error import HikvisionError, MissingParamError
except ImportError:
hikvision.api = None
_LOGGING = logging.getLogger(__name__)
REQUIREMENTS = ['hikvision>=0.4']
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Setup Hikvision Camera config. """
host = config.get(CONF_HOST, None)
port = config.get('port', "80")
name = config.get('name', "Hikvision Camera Motion Detection")
username = config.get(CONF_USERNAME, "admin")
password = config.get(CONF_PASSWORD, "12345")
if hikvision.api is None:
_LOGGING.error((
"Failed to import hikvision. Did you maybe not install the "
"'hikvision' dependency?"))
return False
try:
hikvision_cam = hikvision.api.CreateDevice(
host, port=port, username=username,
password=password, is_https=False)
except MissingParamError as param_err:
_LOGGING.error("Missing required param: %s", param_err)
return False
except HikvisionError as conn_err:
_LOGGING.error("Unable to connect: %s", conn_err)
return False
add_devices_callback([
HikvisionMotionSwitch(name, hikvision_cam)
])
class HikvisionMotionSwitch(ToggleEntity):
""" Provides a switch to toggle on/off motion detection. """
def __init__(self, name, hikvision_cam):
self._name = name
self._hikvision_cam = hikvision_cam
self._state = STATE_OFF
@property
def should_poll(self):
""" Poll for status regularly. """
return True
@property
def name(self):
""" Returns the name of the device if any. """
return self._name
@property
def state(self):
""" Returns the state of the device if any. """
return self._state
@property
def is_on(self):
""" True if device is on. """
return self._state == STATE_ON
def turn_on(self, **kwargs):
""" Turn the device on. """
_LOGGING.info("Turning on Motion Detection ")
self._hikvision_cam.enable_motion_detection()
def turn_off(self, **kwargs):
""" Turn the device off. """
_LOGGING.info("Turning off Motion Detection ")
self._hikvision_cam.disable_motion_detection()
def update(self):
""" Update Motion Detection state """
enabled = self._hikvision_cam.is_motion_detection_enabled()
_LOGGING.info('enabled: %s', enabled)
self._state = STATE_ON if enabled else STATE_OFF
|
import codecs
import os
from plasTeX.Base import Command
from plasTeX.Logging import getLogger
log = getLogger()
status = getLogger('status')
class import_sty(Command):
macroName = 'import'
args = 'dir:str file:str'
def invoke(self, tex):
a = self.parse(tex)
path = os.path.join(a['dir'], a['file'])
fullpath = tex.kpsewhich(path)
status.info(' ( %s ' % fullpath)
try:
encoding = self.config['files']['input-encoding']
tex.input(codecs.open(fullpath, 'r', encoding, 'replace'))
except (OSError, IOError):
log.warning('\nProblem opening file "%s"', fullpath)
status.info(' ) ')
return []
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from mock import Mock
from pytest import fixture
from uber_rides.errors import ClientError
from uber_rides.errors import ErrorDetails
from uber_rides.errors import ServerError
from uber_rides.utils import http
@fixture
def simple_401_error():
code = 'unauthorized'
mock_error = Mock(
status_code=http.STATUS_UNAUTHORIZED,
headers=http.DEFAULT_CONTENT_HEADERS,
)
error_response = {
'message': http.ERROR_CODE_DESCRIPTION_DICT[code],
'code': code,
}
mock_error.json = Mock(return_value=error_response)
return mock_error
@fixture
def simple_422_validation_error():
code = 'validation_failed'
mock_error = Mock(
status_code=http.STATUS_UNPROCESSABLE_ENTITY,
headers=http.DEFAULT_CONTENT_HEADERS,
)
error_response = {
'fields': {
'latitude': 'Must be between -90.0 and 90.0',
'longitude': 'Must be between -180.0 and 180.0',
},
'message': http.ERROR_CODE_DESCRIPTION_DICT[code],
'code': code,
}
mock_error.json = Mock(return_value=error_response)
return mock_error
@fixture
def simple_422_distance_exceeded_error():
code = 'distance_exceeded'
mock_error = Mock(
status_code=http.STATUS_UNPROCESSABLE_ENTITY,
headers=http.DEFAULT_CONTENT_HEADERS,
)
error_response = {
'fields': {
'start_longitude': http.ERROR_CODE_DESCRIPTION_DICT[code],
'end_longitude': http.ERROR_CODE_DESCRIPTION_DICT[code],
'start_latitude': http.ERROR_CODE_DESCRIPTION_DICT[code],
'end_latitude': http.ERROR_CODE_DESCRIPTION_DICT[code],
},
'message': http.ERROR_CODE_DESCRIPTION_DICT[code],
'code': code,
}
mock_error.json = Mock(return_value=error_response)
return mock_error
@fixture
def simple_500_error():
code = 'internal_server_error'
mock_error = Mock(
status_code=http.STATUS_INTERNAL_SERVER_ERROR,
headers=http.DEFAULT_CONTENT_HEADERS,
)
error_response = {
'message': http.ERROR_CODE_DESCRIPTION_DICT[code],
'code': code,
}
mock_error.json = Mock(return_value=error_response)
return mock_error
@fixture
def simple_503_error():
code = 'service_unavailable'
mock_error = Mock(
status_code=http.STATUS_SERVICE_UNAVAILABLE,
headers=http.DEFAULT_CONTENT_HEADERS,
)
error_response = {
'message': http.ERROR_CODE_DESCRIPTION_DICT[code],
'code': code,
}
mock_error.json = Mock(return_value=error_response)
return mock_error
@fixture
def complex_409_surge_error():
code = 'surge'
mock_error = Mock(
status_code=http.STATUS_CONFLICT,
headers=http.DEFAULT_CONTENT_HEADERS,
)
error_response = {
'meta': {
'surge_confirmation': {
'href': 'api.uber.com/v1/surge-confirmations/abc',
'surge_confirmation_id': 'abc',
},
},
'errors': [
{
'status': http.STATUS_CONFLICT,
'code': code,
'title': http.ERROR_CODE_DESCRIPTION_DICT[code],
},
],
}
mock_error.json = Mock(return_value=error_response)
return mock_error
@fixture
def complex_422_same_pickup_dropoff_error():
code = 'same_pickup_dropoff'
mock_error = Mock(
status_code=http.STATUS_UNPROCESSABLE_ENTITY,
headers=http.DEFAULT_CONTENT_HEADERS,
)
error_response = {
'meta': {},
'errors': [
{
'status': http.STATUS_UNPROCESSABLE_ENTITY,
'code': code,
'title': http.ERROR_CODE_DESCRIPTION_DICT[code],
},
],
}
mock_error.json = Mock(return_value=error_response)
return mock_error
def test_simple_401_error(simple_401_error):
"""Test Unauthorized Error converted to ClientError correctly."""
client_error = ClientError(simple_401_error, 'msg')
assert client_error.message == 'msg'
assert isinstance(client_error.errors, list)
assert isinstance(client_error.meta, dict)
assert not client_error.meta
error_details = client_error.errors[0]
expected_code = 'unauthorized'
expected_description = http.ERROR_CODE_DESCRIPTION_DICT[expected_code]
assert isinstance(error_details, ErrorDetails)
assert error_details.status == http.STATUS_UNAUTHORIZED
assert error_details.code == expected_code
assert error_details.title == expected_description
def test_simple_422_validation_error(simple_422_validation_error):
"""Test Validation Error converted to ClientError correctly."""
client_error = ClientError(simple_422_validation_error, 'msg')
assert client_error.message == 'msg'
assert isinstance(client_error.errors, list)
assert isinstance(client_error.meta, dict)
error_details = client_error.errors[0]
expected_code = 'validation_failed'
expected_description = http.ERROR_CODE_DESCRIPTION_DICT[expected_code]
assert isinstance(error_details, ErrorDetails)
assert error_details.status == http.STATUS_UNPROCESSABLE_ENTITY
assert error_details.code == expected_code
assert error_details.title == expected_description
meta = client_error.meta
fields = {
'fields': {
'latitude': 'Must be between -90.0 and 90.0',
'longitude': 'Must be between -180.0 and 180.0',
},
}
assert meta == fields
def test_simple_422_distance_exceeded_error(
simple_422_distance_exceeded_error
):
"""Test Distance Exceeded Error converted to ClientError correctly."""
client_error = ClientError(simple_422_distance_exceeded_error, 'msg')
assert client_error.message == 'msg'
assert isinstance(client_error.errors, list)
assert isinstance(client_error.meta, dict)
error_details = client_error.errors[0]
expected_code = 'distance_exceeded'
expected_description = http.ERROR_CODE_DESCRIPTION_DICT[expected_code]
assert isinstance(error_details, ErrorDetails)
assert error_details.status == http.STATUS_UNPROCESSABLE_ENTITY
assert error_details.code == expected_code
assert error_details.title == expected_description
meta = client_error.meta
fields = {
'fields': {
'start_longitude': http.ERROR_CODE_DESCRIPTION_DICT[expected_code],
'end_longitude': http.ERROR_CODE_DESCRIPTION_DICT[expected_code],
'start_latitude': http.ERROR_CODE_DESCRIPTION_DICT[expected_code],
'end_latitude': http.ERROR_CODE_DESCRIPTION_DICT[expected_code],
},
}
assert meta == fields
def test_simple_500_error(simple_500_error):
"""Test Internal Server Error converted to ClientError correctly."""
server_error = ServerError(simple_500_error, 'msg')
assert server_error.message == 'msg'
assert isinstance(server_error.meta, dict)
assert not server_error.meta
error_details = server_error.error # single error instead of array
expected_code = 'internal_server_error'
expected_description = http.ERROR_CODE_DESCRIPTION_DICT[expected_code]
assert isinstance(error_details, ErrorDetails)
assert error_details.status == http.STATUS_INTERNAL_SERVER_ERROR
assert error_details.code == expected_code
assert error_details.title == expected_description
def test_simple_503_error(simple_503_error):
"""Test Service Unavailable Error converted to ClientError correctly."""
server_error = ServerError(simple_503_error, 'msg')
assert server_error.message == 'msg'
assert isinstance(server_error.meta, dict)
assert not server_error.meta
error_details = server_error.error # single error instead of array
expected_code = 'service_unavailable'
expected_description = http.ERROR_CODE_DESCRIPTION_DICT[expected_code]
assert isinstance(error_details, ErrorDetails)
assert error_details.status == http.STATUS_SERVICE_UNAVAILABLE
assert error_details.code == expected_code
assert error_details.title == expected_description
def test_complex_409_surge_error(complex_409_surge_error):
"""Test Surge Error converted to ClientError correctly."""
client_error = ClientError(complex_409_surge_error, 'msg')
assert client_error.message == 'msg'
assert isinstance(client_error.errors, list)
assert isinstance(client_error.meta, dict)
error_details = client_error.errors[0]
expected_code = 'surge'
expected_description = http.ERROR_CODE_DESCRIPTION_DICT[expected_code]
assert isinstance(error_details, ErrorDetails)
assert error_details.status == http.STATUS_CONFLICT
assert error_details.code == expected_code
assert error_details.title == expected_description
surge_meta = client_error.meta['surge_confirmation']
assert surge_meta['surge_confirmation_id'] == 'abc'
assert surge_meta['href'] == 'api.uber.com/v1/surge-confirmations/abc'
def test_complex_422_same_pickup_dropoff_error(
complex_422_same_pickup_dropoff_error
):
"""Test Same Pickup-Dropoff Error converted to ClientError correctly."""
client_error = ClientError(complex_422_same_pickup_dropoff_error, 'msg')
assert client_error.message == 'msg'
assert isinstance(client_error.errors, list)
assert isinstance(client_error.meta, dict)
assert not client_error.meta
error_details = client_error.errors[0]
expected_code = 'same_pickup_dropoff'
expected_description = http.ERROR_CODE_DESCRIPTION_DICT[expected_code]
assert isinstance(error_details, ErrorDetails)
assert error_details.status == http.STATUS_UNPROCESSABLE_ENTITY
assert error_details.code == expected_code
assert error_details.title == expected_description
|
"""A parser for XML, using the derived class as static DTD."""
import re
import string
version = '0.3'
class Error(RuntimeError):
pass
_S = '[ \t\r\n]+' # white space
_opS = '[ \t\r\n]*' # optional white space
_Name = '[a-zA-Z_:][-a-zA-Z0-9._:]*' # valid XML name
_QStr = "(?:'[^']*'|\"[^\"]*\")" # quoted XML string
illegal = re.compile('[^\t\r\n -\176\240-\377]') # illegal chars in content
interesting = re.compile('[]&<]')
amp = re.compile('&')
ref = re.compile('&(' + _Name + '|#[0-9]+|#x[0-9a-fA-F]+)[^-a-zA-Z0-9._:]')
entityref = re.compile('&(?P<name>' + _Name + ')[^-a-zA-Z0-9._:]')
charref = re.compile('&#(?P<char>[0-9]+[^0-9]|x[0-9a-fA-F]+[^0-9a-fA-F])')
space = re.compile(_S + '$')
newline = re.compile('\n')
attrfind = re.compile(
_S + '(?P<name>' + _Name + ')'
'(' + _opS + '=' + _opS +
'(?P<value>'+_QStr+'|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
starttagopen = re.compile('<' + _Name)
starttagend = re.compile(_opS + '(?P<slash>/?)>')
starttagmatch = re.compile('<(?P<tagname>'+_Name+')'
'(?P<attrs>(?:'+attrfind.pattern+')*)'+
starttagend.pattern)
endtagopen = re.compile('</')
endbracket = re.compile(_opS + '>')
endbracketfind = re.compile('(?:[^>\'"]|'+_QStr+')*>')
tagfind = re.compile(_Name)
cdataopen = re.compile(r'<!\[CDATA\[')
cdataclose = re.compile(r'\]\]>')
_SystemLiteral = '(?P<%s>'+_QStr+')'
_PublicLiteral = '(?P<%s>"[-\'\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*"|' \
"'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
_ExternalId = '(?:SYSTEM|' \
'PUBLIC'+_S+_PublicLiteral%'pubid'+ \
')'+_S+_SystemLiteral%'syslit'
doctype = re.compile('<!DOCTYPE'+_S+'(?P<name>'+_Name+')'
'(?:'+_S+_ExternalId+')?'+_opS)
xmldecl = re.compile('<\?xml'+_S+
'version'+_opS+'='+_opS+'(?P<version>'+_QStr+')'+
'(?:'+_S+'encoding'+_opS+'='+_opS+
"(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
'"[A-Za-z][-A-Za-z0-9._]*"))?'
'(?:'+_S+'standalone'+_opS+'='+_opS+
'(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?'+
_opS+'\?>')
procopen = re.compile(r'<\?(?P<proc>' + _Name + ')' + _opS)
procclose = re.compile(_opS + r'\?>')
commentopen = re.compile('<!--')
commentclose = re.compile('-->')
doubledash = re.compile('--')
attrtrans = string.maketrans(' \r\n\t', ' ')
_NCName = '[a-zA-Z_][-a-zA-Z0-9._]*' # XML Name, minus the ":"
ncname = re.compile(_NCName + '$')
qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?' # optional prefix
'(?P<local>' + _NCName + ')$')
xmlns = re.compile('xmlns(?::(?P<ncname>'+_NCName+'))?$')
class XMLParser:
attributes = {} # default, to be overridden
elements = {} # default, to be overridden
# parsing options, settable using keyword args in __init__
__accept_unquoted_attributes = 0
__accept_missing_endtag_name = 0
__map_case = 0
__accept_utf8 = 0
__translate_attribute_references = 1
# Interface -- initialize and reset this instance
def __init__(self, **kw):
self.__fixed = 0
if kw.has_key('accept_unquoted_attributes'):
self.__accept_unquoted_attributes = kw['accept_unquoted_attributes']
if kw.has_key('accept_missing_endtag_name'):
self.__accept_missing_endtag_name = kw['accept_missing_endtag_name']
if kw.has_key('map_case'):
self.__map_case = kw['map_case']
if kw.has_key('accept_utf8'):
self.__accept_utf8 = kw['accept_utf8']
if kw.has_key('translate_attribute_references'):
self.__translate_attribute_references = kw['translate_attribute_references']
self.reset()
def __fixelements(self):
self.__fixed = 1
self.elements = {}
self.__fixdict(self.__dict__)
self.__fixclass(self.__class__)
def __fixclass(self, kl):
self.__fixdict(kl.__dict__)
for k in kl.__bases__:
self.__fixclass(k)
def __fixdict(self, dict):
for key in dict.keys():
if key[:6] == 'start_':
tag = key[6:]
start, end = self.elements.get(tag, (None, None))
if start is None:
self.elements[tag] = getattr(self, key), end
elif key[:4] == 'end_':
tag = key[4:]
start, end = self.elements.get(tag, (None, None))
if end is None:
self.elements[tag] = start, getattr(self, key)
# Interface -- reset this instance. Loses all unprocessed data
def reset(self):
self.rawdata = ''
self.stack = []
self.nomoretags = 0
self.literal = 0
self.lineno = 1
self.__at_start = 1
self.__seen_doctype = None
self.__seen_starttag = 0
self.__use_namespaces = 0
self.__namespaces = {'xml':None} # xml is implicitly declared
# backward compatibility hack: if elements not overridden,
# fill it in ourselves
if self.elements is XMLParser.elements:
self.__fixelements()
# For derived classes only -- enter literal mode (CDATA) till EOF
def setnomoretags(self):
self.nomoretags = self.literal = 1
# For derived classes only -- enter literal mode (CDATA)
def setliteral(self, *args):
self.literal = 1
# Interface -- feed some data to the parser. Call this as
# often as you want, with as little or as much text as you
# want (may include '\n'). (This just saves the text, all the
# processing is done by goahead().)
def feed(self, data):
self.rawdata = self.rawdata + data
self.goahead(0)
# Interface -- handle the remaining data
def close(self):
self.goahead(1)
if self.__fixed:
self.__fixed = 0
# remove self.elements so that we don't leak
del self.elements
# Interface -- translate references
def translate_references(self, data, all = 1):
if not self.__translate_attribute_references:
return data
i = 0
while 1:
res = amp.search(data, i)
if res is None:
return data
s = res.start(0)
res = ref.match(data, s)
if res is None:
self.syntax_error("bogus `&'")
i = s+1
continue
i = res.end(0)
str = res.group(1)
rescan = 0
if str[0] == '#':
if str[1] == 'x':
str = chr(int(str[2:], 16))
else:
str = chr(int(str[1:]))
if data[i - 1] != ';':
self.syntax_error("`;' missing after char reference")
i = i-1
elif all:
if self.entitydefs.has_key(str):
str = self.entitydefs[str]
rescan = 1
elif data[i - 1] != ';':
self.syntax_error("bogus `&'")
i = s + 1 # just past the &
continue
else:
self.syntax_error("reference to unknown entity `&%s;'" % str)
str = '&' + str + ';'
elif data[i - 1] != ';':
self.syntax_error("bogus `&'")
i = s + 1 # just past the &
continue
# when we get here, str contains the translated text and i points
# to the end of the string that is to be replaced
data = data[:s] + str + data[i:]
if rescan:
i = s
else:
i = s + len(str)
# Interface - return a dictionary of all namespaces currently valid
def getnamespace(self):
nsdict = {}
for t, d, nst in self.stack:
nsdict.update(d)
return nsdict
# Internal -- handle data as far as reasonable. May leave state
# and data to be processed by a subsequent call. If 'end' is
# true, force handling all data as if followed by EOF marker.
def goahead(self, end):
rawdata = self.rawdata
i = 0
n = len(rawdata)
while i < n:
if i > 0:
self.__at_start = 0
if self.nomoretags:
data = rawdata[i:n]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = n
break
res = interesting.search(rawdata, i)
if res:
j = res.start(0)
else:
j = n
if i < j:
data = rawdata[i:j]
if self.__at_start and space.match(data) is None:
self.syntax_error('illegal data at start of file')
self.__at_start = 0
if not self.stack and space.match(data) is None:
self.syntax_error('data not in content')
if not self.__accept_utf8 and illegal.search(data):
self.syntax_error('illegal character in content')
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = j
if i == n: break
if rawdata[i] == '<':
if starttagopen.match(rawdata, i):
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
k = self.parse_starttag(i)
if k < 0: break
self.__seen_starttag = 1
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if endtagopen.match(rawdata, i):
k = self.parse_endtag(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if commentopen.match(rawdata, i):
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
k = self.parse_comment(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if cdataopen.match(rawdata, i):
k = self.parse_cdata(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
res = xmldecl.match(rawdata, i)
if res:
if not self.__at_start:
self.syntax_error("<?xml?> declaration not at start of document")
version, encoding, standalone = res.group('version',
'encoding',
'standalone')
if version[1:-1] != '1.0':
raise Error('only XML version 1.0 supported')
if encoding: encoding = encoding[1:-1]
if standalone: standalone = standalone[1:-1]
self.handle_xml(encoding, standalone)
i = res.end(0)
continue
res = procopen.match(rawdata, i)
if res:
k = self.parse_proc(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
res = doctype.match(rawdata, i)
if res:
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
if self.__seen_doctype:
self.syntax_error('multiple DOCTYPE elements')
if self.__seen_starttag:
self.syntax_error('DOCTYPE not at beginning of document')
k = self.parse_doctype(res)
if k < 0: break
self.__seen_doctype = res.group('name')
if self.__map_case:
self.__seen_doctype = self.__seen_doctype.lower()
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
elif rawdata[i] == '&':
if self.literal:
data = rawdata[i]
self.handle_data(data)
i = i+1
continue
res = charref.match(rawdata, i)
if res is not None:
i = res.end(0)
if rawdata[i-1] != ';':
self.syntax_error("`;' missing in charref")
i = i-1
if not self.stack:
self.syntax_error('data not in content')
self.handle_charref(res.group('char')[:-1])
self.lineno = self.lineno + res.group(0).count('\n')
continue
res = entityref.match(rawdata, i)
if res is not None:
i = res.end(0)
if rawdata[i-1] != ';':
self.syntax_error("`;' missing in entityref")
i = i-1
name = res.group('name')
if self.__map_case:
name = name.lower()
if self.entitydefs.has_key(name):
self.rawdata = rawdata = rawdata[:res.start(0)] + self.entitydefs[name] + rawdata[i:]
n = len(rawdata)
i = res.start(0)
else:
self.unknown_entityref(name)
self.lineno = self.lineno + res.group(0).count('\n')
continue
elif rawdata[i] == ']':
if self.literal:
data = rawdata[i]
self.handle_data(data)
i = i+1
continue
if n-i < 3:
break
if cdataclose.match(rawdata, i):
self.syntax_error("bogus `]]>'")
self.handle_data(rawdata[i])
i = i+1
continue
else:
raise Error('neither < nor & ??')
# We get here only if incomplete matches but
# nothing else
break
# end while
if i > 0:
self.__at_start = 0
if end and i < n:
data = rawdata[i]
self.syntax_error("bogus `%s'" % data)
if not self.__accept_utf8 and illegal.search(data):
self.syntax_error('illegal character in content')
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
self.rawdata = rawdata[i+1:]
return self.goahead(end)
self.rawdata = rawdata[i:]
if end:
if not self.__seen_starttag:
self.syntax_error('no elements in file')
if self.stack:
self.syntax_error('missing end tags')
while self.stack:
self.finish_endtag(self.stack[-1][0])
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
raise Error('unexpected call to handle_comment')
res = commentclose.search(rawdata, i+4)
if res is None:
return -1
if doubledash.search(rawdata, i+4, res.start(0)):
self.syntax_error("`--' inside comment")
if rawdata[res.start(0)-1] == '-':
self.syntax_error('comment cannot end in three dashes')
if not self.__accept_utf8 and \
illegal.search(rawdata, i+4, res.start(0)):
self.syntax_error('illegal character in comment')
self.handle_comment(rawdata[i+4: res.start(0)])
return res.end(0)
# Internal -- handle DOCTYPE tag, return length or -1 if not terminated
def parse_doctype(self, res):
rawdata = self.rawdata
n = len(rawdata)
name = res.group('name')
if self.__map_case:
name = name.lower()
pubid, syslit = res.group('pubid', 'syslit')
if pubid is not None:
pubid = pubid[1:-1] # remove quotes
pubid = ' '.join(pubid.split()) # normalize
if syslit is not None: syslit = syslit[1:-1] # remove quotes
j = k = res.end(0)
if k >= n:
return -1
if rawdata[k] == '[':
level = 0
k = k+1
dq = sq = 0
while k < n:
c = rawdata[k]
if not sq and c == '"':
dq = not dq
elif not dq and c == "'":
sq = not sq
elif sq or dq:
pass
elif level <= 0 and c == ']':
res = endbracket.match(rawdata, k+1)
if res is None:
return -1
self.handle_doctype(name, pubid, syslit, rawdata[j+1:k])
return res.end(0)
elif c == '<':
level = level + 1
elif c == '>':
level = level - 1
if level < 0:
self.syntax_error("bogus `>' in DOCTYPE")
k = k+1
res = endbracketfind.match(rawdata, k)
if res is None:
return -1
if endbracket.match(rawdata, k) is None:
self.syntax_error('garbage in DOCTYPE')
self.handle_doctype(name, pubid, syslit, None)
return res.end(0)
# Internal -- handle CDATA tag, return length or -1 if not terminated
def parse_cdata(self, i):
rawdata = self.rawdata
if rawdata[i:i+9] != '<![CDATA[':
raise Error('unexpected call to parse_cdata')
res = cdataclose.search(rawdata, i+9)
if res is None:
return -1
if not self.__accept_utf8 and \
illegal.search(rawdata, i+9, res.start(0)):
self.syntax_error('illegal character in CDATA')
if not self.stack:
self.syntax_error('CDATA not in content')
self.handle_cdata(rawdata[i+9:res.start(0)])
return res.end(0)
__xml_namespace_attributes = {'ns':None, 'src':None, 'prefix':None}
# Internal -- handle a processing instruction tag
def parse_proc(self, i):
rawdata = self.rawdata
end = procclose.search(rawdata, i)
if end is None:
return -1
j = end.start(0)
if not self.__accept_utf8 and illegal.search(rawdata, i+2, j):
self.syntax_error('illegal character in processing instruction')
res = tagfind.match(rawdata, i+2)
if res is None:
raise Error('unexpected call to parse_proc')
k = res.end(0)
name = res.group(0)
if self.__map_case:
name = name.lower()
if name == 'xml:namespace':
self.syntax_error('old-fashioned namespace declaration')
self.__use_namespaces = -1
# namespace declaration
# this must come after the <?xml?> declaration (if any)
# and before the <!DOCTYPE> (if any).
if self.__seen_doctype or self.__seen_starttag:
self.syntax_error('xml:namespace declaration too late in document')
attrdict, namespace, k = self.parse_attributes(name, k, j)
if namespace:
self.syntax_error('namespace declaration inside namespace declaration')
for attrname in attrdict.keys():
if not self.__xml_namespace_attributes.has_key(attrname):
self.syntax_error("unknown attribute `%s' in xml:namespace tag" % attrname)
if not attrdict.has_key('ns') or not attrdict.has_key('prefix'):
self.syntax_error('xml:namespace without required attributes')
prefix = attrdict.get('prefix')
if ncname.match(prefix) is None:
self.syntax_error('xml:namespace illegal prefix value')
return end.end(0)
if self.__namespaces.has_key(prefix):
self.syntax_error('xml:namespace prefix not unique')
self.__namespaces[prefix] = attrdict['ns']
else:
if name.lower() == 'xml':
self.syntax_error('illegal processing instruction target name')
self.handle_proc(name, rawdata[k:j])
return end.end(0)
# Internal -- parse attributes between i and j
def parse_attributes(self, tag, i, j):
rawdata = self.rawdata
attrdict = {}
namespace = {}
while i < j:
res = attrfind.match(rawdata, i)
if res is None:
break
attrname, attrvalue = res.group('name', 'value')
if self.__map_case:
attrname = attrname.lower()
i = res.end(0)
if attrvalue is None:
self.syntax_error("no value specified for attribute `%s'" % attrname)
attrvalue = attrname
elif attrvalue[:1] == "'" == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
elif not self.__accept_unquoted_attributes:
self.syntax_error("attribute `%s' value not quoted" % attrname)
res = xmlns.match(attrname)
if res is not None:
# namespace declaration
ncname = res.group('ncname')
namespace[ncname or ''] = attrvalue or None
if not self.__use_namespaces:
self.__use_namespaces = len(self.stack)+1
continue
if '<' in attrvalue:
self.syntax_error("`<' illegal in attribute value")
if attrdict.has_key(attrname):
self.syntax_error("attribute `%s' specified twice" % attrname)
attrvalue = attrvalue.translate(attrtrans)
attrdict[attrname] = self.translate_references(attrvalue)
return attrdict, namespace, i
# Internal -- handle starttag, return length or -1 if not terminated
def parse_starttag(self, i):
rawdata = self.rawdata
# i points to start of tag
end = endbracketfind.match(rawdata, i+1)
if end is None:
return -1
tag = starttagmatch.match(rawdata, i)
if tag is None or tag.end(0) != end.end(0):
self.syntax_error('garbage in starttag')
return end.end(0)
nstag = tagname = tag.group('tagname')
if self.__map_case:
nstag = tagname = nstag.lower()
if not self.__seen_starttag and self.__seen_doctype and \
tagname != self.__seen_doctype:
self.syntax_error('starttag does not match DOCTYPE')
if self.__seen_starttag and not self.stack:
self.syntax_error('multiple elements on top level')
k, j = tag.span('attrs')
attrdict, nsdict, k = self.parse_attributes(tagname, k, j)
self.stack.append((tagname, nsdict, nstag))
if self.__use_namespaces:
res = qname.match(tagname)
else:
res = None
if res is not None:
prefix, nstag = res.group('prefix', 'local')
if prefix is None:
prefix = ''
ns = None
for t, d, nst in self.stack:
if d.has_key(prefix):
ns = d[prefix]
if ns is None and prefix != '':
ns = self.__namespaces.get(prefix)
if ns is not None:
nstag = ns + ' ' + nstag
elif prefix != '':
nstag = prefix + ':' + nstag # undo split
self.stack[-1] = tagname, nsdict, nstag
# translate namespace of attributes
attrnamemap = {} # map from new name to old name (used for error reporting)
for key in attrdict.keys():
attrnamemap[key] = key
if self.__use_namespaces:
nattrdict = {}
for key, val in attrdict.items():
okey = key
res = qname.match(key)
if res is not None:
aprefix, key = res.group('prefix', 'local')
if self.__map_case:
key = key.lower()
if aprefix is None:
aprefix = ''
ans = None
for t, d, nst in self.stack:
if d.has_key(aprefix):
ans = d[aprefix]
if ans is None and aprefix != '':
ans = self.__namespaces.get(aprefix)
if ans is not None:
key = ans + ' ' + key
elif aprefix != '':
key = aprefix + ':' + key
elif ns is not None:
key = ns + ' ' + key
nattrdict[key] = val
attrnamemap[key] = okey
attrdict = nattrdict
attributes = self.attributes.get(nstag)
if attributes is not None:
for key in attrdict.keys():
if not attributes.has_key(key):
self.syntax_error("unknown attribute `%s' in tag `%s'" % (attrnamemap[key], tagname))
for key, val in attributes.items():
if val is not None and not attrdict.has_key(key):
attrdict[key] = val
method = self.elements.get(nstag, (None, None))[0]
self.finish_starttag(nstag, attrdict, method)
if tag.group('slash') == '/':
self.finish_endtag(tagname)
return tag.end(0)
# Internal -- parse endtag
def parse_endtag(self, i):
rawdata = self.rawdata
end = endbracketfind.match(rawdata, i+1)
if end is None:
return -1
res = tagfind.match(rawdata, i+2)
if res is None:
if self.literal:
self.handle_data(rawdata[i])
return i+1
if not self.__accept_missing_endtag_name:
self.syntax_error('no name specified in end tag')
tag = self.stack[-1][0]
k = i+2
else:
tag = res.group(0)
if self.__map_case:
tag = tag.lower()
if self.literal:
if not self.stack or tag != self.stack[-1][0]:
self.handle_data(rawdata[i])
return i+1
k = res.end(0)
if endbracket.match(rawdata, k) is None:
self.syntax_error('garbage in end tag')
self.finish_endtag(tag)
return end.end(0)
# Internal -- finish processing of start tag
def finish_starttag(self, tagname, attrdict, method):
if method is not None:
self.handle_starttag(tagname, method, attrdict)
else:
self.unknown_starttag(tagname, attrdict)
# Internal -- finish processing of end tag
def finish_endtag(self, tag):
self.literal = 0
if not tag:
self.syntax_error('name-less end tag')
found = len(self.stack) - 1
if found < 0:
self.unknown_endtag(tag)
return
else:
found = -1
for i in range(len(self.stack)):
if tag == self.stack[i][0]:
found = i
if found == -1:
self.syntax_error('unopened end tag')
return
while len(self.stack) > found:
if found < len(self.stack) - 1:
self.syntax_error('missing close tag for %s' % self.stack[-1][2])
nstag = self.stack[-1][2]
method = self.elements.get(nstag, (None, None))[1]
if method is not None:
self.handle_endtag(nstag, method)
else:
self.unknown_endtag(nstag)
if self.__use_namespaces == len(self.stack):
self.__use_namespaces = 0
del self.stack[-1]
# Overridable -- handle xml processing instruction
def handle_xml(self, encoding, standalone):
pass
# Overridable -- handle DOCTYPE
def handle_doctype(self, tag, pubid, syslit, data):
pass
# Overridable -- handle start tag
def handle_starttag(self, tag, method, attrs):
method(attrs)
# Overridable -- handle end tag
def handle_endtag(self, tag, method):
method()
# Example -- handle character reference, no need to override
def handle_charref(self, name):
try:
if name[0] == 'x':
n = int(name[1:], 16)
else:
n = int(name)
except ValueError:
self.unknown_charref(name)
return
if not 0 <= n <= 255:
self.unknown_charref(name)
return
self.handle_data(chr(n))
# Definition of entities -- derived classes may override
entitydefs = {'lt': '<', # must use charref
'gt': '>',
'amp': '&', # must use charref
'quot': '"',
'apos': ''',
}
# Example -- handle data, should be overridden
def handle_data(self, data):
pass
# Example -- handle cdata, could be overridden
def handle_cdata(self, data):
pass
# Example -- handle comment, could be overridden
def handle_comment(self, data):
pass
# Example -- handle processing instructions, could be overridden
def handle_proc(self, name, data):
pass
# Example -- handle relatively harmless syntax errors, could be overridden
def syntax_error(self, message):
raise Error('Syntax error at line %d: %s' % (self.lineno, message))
# To be overridden -- handlers for unknown objects
def unknown_starttag(self, tag, attrs): pass
def unknown_endtag(self, tag): pass
def unknown_charref(self, ref): pass
def unknown_entityref(self, name):
self.syntax_error("reference to unknown entity `&%s;'" % name)
class TestXMLParser(XMLParser):
def __init__(self, **kw):
self.testdata = ""
apply(XMLParser.__init__, (self,), kw)
def handle_xml(self, encoding, standalone):
self.flush()
print 'xml: encoding =',encoding,'standalone =',standalone
def handle_doctype(self, tag, pubid, syslit, data):
self.flush()
print 'DOCTYPE:',tag, `data`
def handle_data(self, data):
self.testdata = self.testdata + data
if len(`self.testdata`) >= 70:
self.flush()
def flush(self):
data = self.testdata
if data:
self.testdata = ""
print 'data:', `data`
def handle_cdata(self, data):
self.flush()
print 'cdata:', `data`
def handle_proc(self, name, data):
self.flush()
print 'processing:',name,`data`
def handle_comment(self, data):
self.flush()
r = `data`
if len(r) > 68:
r = r[:32] + '...' + r[-32:]
print 'comment:', r
def syntax_error(self, message):
print 'error at line %d:' % self.lineno, message
def unknown_starttag(self, tag, attrs):
self.flush()
if not attrs:
print 'start tag: <' + tag + '>'
else:
print 'start tag: <' + tag,
for name, value in attrs.items():
print name + '=' + '"' + value + '"',
print '>'
def unknown_endtag(self, tag):
self.flush()
print 'end tag: </' + tag + '>'
def unknown_entityref(self, ref):
self.flush()
print '*** unknown entity ref: &' + ref + ';'
def unknown_charref(self, ref):
self.flush()
print '*** unknown char ref: &#' + ref + ';'
def close(self):
XMLParser.close(self)
self.flush()
def test(args = None):
import sys, getopt
from time import time
if not args:
args = sys.argv[1:]
opts, args = getopt.getopt(args, 'st')
klass = TestXMLParser
do_time = 0
for o, a in opts:
if o == '-s':
klass = XMLParser
elif o == '-t':
do_time = 1
if args:
file = args[0]
else:
file = 'test.xml'
if file == '-':
f = sys.stdin
else:
try:
f = open(file, 'r')
except IOError, msg:
print file, ":", msg
sys.exit(1)
data = f.read()
if f is not sys.stdin:
f.close()
x = klass()
t0 = time()
try:
if do_time:
x.feed(data)
x.close()
else:
for c in data:
x.feed(c)
x.close()
except Error, msg:
t1 = time()
print msg
if do_time:
print 'total time: %g' % (t1-t0)
sys.exit(1)
t1 = time()
if do_time:
print 'total time: %g' % (t1-t0)
if __name__ == '__main__':
test()
|
import errno
import operator
import os
import shutil
import site
from optparse import SUPPRESS_HELP, Values
from typing import Iterable, List, Optional
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.cache import WheelCache
from pip._internal.cli import cmdoptions
from pip._internal.cli.cmdoptions import make_target_python
from pip._internal.cli.req_command import (
RequirementCommand,
warn_if_run_as_root,
with_cleanup,
)
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.exceptions import CommandError, InstallationError
from pip._internal.locations import get_scheme
from pip._internal.metadata import get_environment
from pip._internal.models.format_control import FormatControl
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
from pip._internal.req import install_given_reqs
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.distutils_args import parse_distutils_args
from pip._internal.utils.filesystem import test_writable_dir
from pip._internal.utils.logging import getLogger
from pip._internal.utils.misc import (
ensure_dir,
get_pip_version,
protect_pip_from_modification_on_windows,
write_output,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.virtualenv import (
running_under_virtualenv,
virtualenv_no_global,
)
from pip._internal.wheel_builder import (
BinaryAllowedPredicate,
build,
should_build_for_install_command,
)
logger = getLogger(__name__)
def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate:
def check_binary_allowed(req: InstallRequirement) -> bool:
canonical_name = canonicalize_name(req.name or "")
allowed_formats = format_control.get_allowed_formats(canonical_name)
return "binary" in allowed_formats
return check_binary_allowed
class InstallCommand(RequirementCommand):
"""
Install packages from:
- PyPI (and other indexes) using requirement specifiers.
- VCS project urls.
- Local project directories.
- Local or remote source archives.
pip also supports installing from "requirements files", which provide
an easy way to specify a whole environment to be installed.
"""
usage = """
%prog [options] <requirement specifier> [package-index-options] ...
%prog [options] -r <requirements file> [package-index-options] ...
%prog [options] [-e] <vcs project url> ...
%prog [options] [-e] <local project path> ...
%prog [options] <archive url/path> ..."""
def add_options(self) -> None:
self.cmd_opts.add_option(cmdoptions.requirements())
self.cmd_opts.add_option(cmdoptions.constraints())
self.cmd_opts.add_option(cmdoptions.no_deps())
self.cmd_opts.add_option(cmdoptions.pre())
self.cmd_opts.add_option(cmdoptions.editable())
self.cmd_opts.add_option(
"-t",
"--target",
dest="target_dir",
metavar="dir",
default=None,
help=(
"Install packages into <dir>. "
"By default this will not replace existing files/folders in "
"<dir>. Use --upgrade to replace existing packages in <dir> "
"with new versions."
),
)
cmdoptions.add_target_python_options(self.cmd_opts)
self.cmd_opts.add_option(
"--user",
dest="use_user_site",
action="store_true",
help=(
"Install to the Python user install directory for your "
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
"Windows. (See the Python documentation for site.USER_BASE "
"for full details.)"
),
)
self.cmd_opts.add_option(
"--no-user",
dest="use_user_site",
action="store_false",
help=SUPPRESS_HELP,
)
self.cmd_opts.add_option(
"--root",
dest="root_path",
metavar="dir",
default=None,
help="Install everything relative to this alternate root directory.",
)
self.cmd_opts.add_option(
"--prefix",
dest="prefix_path",
metavar="dir",
default=None,
help=(
"Installation prefix where lib, bin and other top-level "
"folders are placed"
),
)
self.cmd_opts.add_option(cmdoptions.src())
self.cmd_opts.add_option(
"-U",
"--upgrade",
dest="upgrade",
action="store_true",
help=(
"Upgrade all specified packages to the newest available "
"version. The handling of dependencies depends on the "
"upgrade-strategy used."
),
)
self.cmd_opts.add_option(
"--upgrade-strategy",
dest="upgrade_strategy",
default="only-if-needed",
choices=["only-if-needed", "eager"],
help=(
"Determines how dependency upgrading should be handled "
"[default: %default]. "
'"eager" - dependencies are upgraded regardless of '
"whether the currently installed version satisfies the "
"requirements of the upgraded package(s). "
'"only-if-needed" - are upgraded only when they do not '
"satisfy the requirements of the upgraded package(s)."
),
)
self.cmd_opts.add_option(
"--force-reinstall",
dest="force_reinstall",
action="store_true",
help="Reinstall all packages even if they are already up-to-date.",
)
self.cmd_opts.add_option(
"-I",
"--ignore-installed",
dest="ignore_installed",
action="store_true",
help=(
"Ignore the installed packages, overwriting them. "
"This can break your system if the existing package "
"is of a different version or was installed "
"with a different package manager!"
),
)
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
self.cmd_opts.add_option(cmdoptions.use_pep517())
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
self.cmd_opts.add_option(cmdoptions.install_options())
self.cmd_opts.add_option(cmdoptions.global_options())
self.cmd_opts.add_option(
"--compile",
action="store_true",
dest="compile",
default=True,
help="Compile Python source files to bytecode",
)
self.cmd_opts.add_option(
"--no-compile",
action="store_false",
dest="compile",
help="Do not compile Python source files to bytecode",
)
self.cmd_opts.add_option(
"--no-warn-script-location",
action="store_false",
dest="warn_script_location",
default=True,
help="Do not warn when installing scripts outside PATH",
)
self.cmd_opts.add_option(
"--no-warn-conflicts",
action="store_false",
dest="warn_about_conflicts",
default=True,
help="Do not warn about broken dependencies",
)
self.cmd_opts.add_option(cmdoptions.no_binary())
self.cmd_opts.add_option(cmdoptions.only_binary())
self.cmd_opts.add_option(cmdoptions.prefer_binary())
self.cmd_opts.add_option(cmdoptions.require_hashes())
self.cmd_opts.add_option(cmdoptions.progress_bar())
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
self.parser,
)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, self.cmd_opts)
@with_cleanup
def run(self, options: Values, args: List[str]) -> int:
if options.use_user_site and options.target_dir is not None:
raise CommandError("Can not combine '--user' and '--target'")
cmdoptions.check_install_build_global(options)
upgrade_strategy = "to-satisfy-only"
if options.upgrade:
upgrade_strategy = options.upgrade_strategy
cmdoptions.check_dist_restriction(options, check_target=True)
install_options = options.install_options or []
logger.verbose("Using %s", get_pip_version())
options.use_user_site = decide_user_install(
options.use_user_site,
prefix_path=options.prefix_path,
target_dir=options.target_dir,
root_path=options.root_path,
isolated_mode=options.isolated_mode,
)
target_temp_dir: Optional[TempDirectory] = None
target_temp_dir_path: Optional[str] = None
if options.target_dir:
options.ignore_installed = True
options.target_dir = os.path.abspath(options.target_dir)
if (
# fmt: off
os.path.exists(options.target_dir) and
not os.path.isdir(options.target_dir)
# fmt: on
):
raise CommandError(
"Target path exists but is not a directory, will not continue."
)
# Create a target directory for using with the target option
target_temp_dir = TempDirectory(kind="target")
target_temp_dir_path = target_temp_dir.path
self.enter_context(target_temp_dir)
global_options = options.global_options or []
session = self.get_default_session(options)
target_python = make_target_python(options)
finder = self._build_package_finder(
options=options,
session=session,
target_python=target_python,
ignore_requires_python=options.ignore_requires_python,
)
wheel_cache = WheelCache(options.cache_dir, options.format_control)
req_tracker = self.enter_context(get_requirement_tracker())
directory = TempDirectory(
delete=not options.no_clean,
kind="install",
globally_managed=True,
)
try:
reqs = self.get_requirements(args, options, finder, session)
# Only when installing is it permitted to use PEP 660.
# In other circumstances (pip wheel, pip download) we generate
# regular (i.e. non editable) metadata and wheels.
for req in reqs:
req.permit_editable_wheels = True
reject_location_related_install_options(reqs, options.install_options)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
options=options,
req_tracker=req_tracker,
session=session,
finder=finder,
use_user_site=options.use_user_site,
verbosity=self.verbosity,
)
resolver = self.make_resolver(
preparer=preparer,
finder=finder,
options=options,
wheel_cache=wheel_cache,
use_user_site=options.use_user_site,
ignore_installed=options.ignore_installed,
ignore_requires_python=options.ignore_requires_python,
force_reinstall=options.force_reinstall,
upgrade_strategy=upgrade_strategy,
use_pep517=options.use_pep517,
)
self.trace_basic_info(finder)
requirement_set = resolver.resolve(
reqs, check_supported_wheels=not options.target_dir
)
try:
pip_req = requirement_set.get_requirement("pip")
except KeyError:
modifying_pip = False
else:
# If we're not replacing an already installed pip,
# we're not modifying it.
modifying_pip = pip_req.satisfied_by is None
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
check_binary_allowed = get_check_binary_allowed(finder.format_control)
reqs_to_build = [
r
for r in requirement_set.requirements.values()
if should_build_for_install_command(r, check_binary_allowed)
]
_, build_failures = build(
reqs_to_build,
wheel_cache=wheel_cache,
verify=True,
build_options=[],
global_options=[],
)
# If we're using PEP 517, we cannot do a legacy setup.py install
# so we fail here.
pep517_build_failure_names: List[str] = [
r.name for r in build_failures if r.use_pep517 # type: ignore
]
if pep517_build_failure_names:
raise InstallationError(
"Could not build wheels for {}, which is required to "
"install pyproject.toml-based projects".format(
", ".join(pep517_build_failure_names)
)
)
# For now, we just warn about failures building legacy
# requirements, as we'll fall through to a setup.py install for
# those.
for r in build_failures:
if not r.use_pep517:
r.legacy_install_reason = 8368
to_install = resolver.get_installation_order(requirement_set)
# Check for conflicts in the package set we're installing.
conflicts: Optional[ConflictDetails] = None
should_warn_about_conflicts = (
not options.ignore_dependencies and options.warn_about_conflicts
)
if should_warn_about_conflicts:
conflicts = self._determine_conflicts(to_install)
# Don't warn about script install locations if
# --target or --prefix has been specified
warn_script_location = options.warn_script_location
if options.target_dir or options.prefix_path:
warn_script_location = False
installed = install_given_reqs(
to_install,
install_options,
global_options,
root=options.root_path,
home=target_temp_dir_path,
prefix=options.prefix_path,
warn_script_location=warn_script_location,
use_user_site=options.use_user_site,
pycompile=options.compile,
)
lib_locations = get_lib_location_guesses(
user=options.use_user_site,
home=target_temp_dir_path,
root=options.root_path,
prefix=options.prefix_path,
isolated=options.isolated_mode,
)
env = get_environment(lib_locations)
installed.sort(key=operator.attrgetter("name"))
items = []
for result in installed:
item = result.name
try:
installed_dist = env.get_distribution(item)
if installed_dist is not None:
item = f"{item}-{installed_dist.version}"
except Exception:
pass
items.append(item)
if conflicts is not None:
self._warn_about_conflicts(
conflicts,
resolver_variant=self.determine_resolver_variant(options),
)
installed_desc = " ".join(items)
if installed_desc:
write_output(
"Successfully installed %s",
installed_desc,
)
except OSError as error:
show_traceback = self.verbosity >= 1
message = create_os_error_message(
error,
show_traceback,
options.use_user_site,
)
logger.error(message, exc_info=show_traceback) # noqa
return ERROR
if options.target_dir:
assert target_temp_dir
self._handle_target_dir(
options.target_dir, target_temp_dir, options.upgrade
)
warn_if_run_as_root()
return SUCCESS
def _handle_target_dir(
self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
) -> None:
ensure_dir(target_dir)
# Checking both purelib and platlib directories for installed
# packages to be moved to target directory
lib_dir_list = []
# Checking both purelib and platlib directories for installed
# packages to be moved to target directory
scheme = get_scheme("", home=target_temp_dir.path)
purelib_dir = scheme.purelib
platlib_dir = scheme.platlib
data_dir = scheme.data
if os.path.exists(purelib_dir):
lib_dir_list.append(purelib_dir)
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
lib_dir_list.append(platlib_dir)
if os.path.exists(data_dir):
lib_dir_list.append(data_dir)
for lib_dir in lib_dir_list:
for item in os.listdir(lib_dir):
if lib_dir == data_dir:
ddir = os.path.join(data_dir, item)
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
continue
target_item_dir = os.path.join(target_dir, item)
if os.path.exists(target_item_dir):
if not upgrade:
logger.warning(
"Target directory %s already exists. Specify "
"--upgrade to force replacement.",
target_item_dir,
)
continue
if os.path.islink(target_item_dir):
logger.warning(
"Target directory %s already exists and is "
"a link. pip will not automatically replace "
"links, please remove if replacement is "
"desired.",
target_item_dir,
)
continue
if os.path.isdir(target_item_dir):
shutil.rmtree(target_item_dir)
else:
os.remove(target_item_dir)
shutil.move(os.path.join(lib_dir, item), target_item_dir)
def _determine_conflicts(
self, to_install: List[InstallRequirement]
) -> Optional[ConflictDetails]:
try:
return check_install_conflicts(to_install)
except Exception:
logger.exception(
"Error while checking for conflicts. Please file an issue on "
"pip's issue tracker: https://github.com/pypa/pip/issues/new"
)
return None
def _warn_about_conflicts(
self, conflict_details: ConflictDetails, resolver_variant: str
) -> None:
package_set, (missing, conflicting) = conflict_details
if not missing and not conflicting:
return
parts: List[str] = []
if resolver_variant == "legacy":
parts.append(
"pip's legacy dependency resolver does not consider dependency "
"conflicts when selecting packages. This behaviour is the "
"source of the following dependency conflicts."
)
else:
assert resolver_variant == "2020-resolver"
parts.append(
"pip's dependency resolver does not currently take into account "
"all the packages that are installed. This behaviour is the "
"source of the following dependency conflicts."
)
# NOTE: There is some duplication here, with commands/check.py
for project_name in missing:
version = package_set[project_name][0]
for dependency in missing[project_name]:
message = (
"{name} {version} requires {requirement}, "
"which is not installed."
).format(
name=project_name,
version=version,
requirement=dependency[1],
)
parts.append(message)
for project_name in conflicting:
version = package_set[project_name][0]
for dep_name, dep_version, req in conflicting[project_name]:
message = (
"{name} {version} requires {requirement}, but {you} have "
"{dep_name} {dep_version} which is incompatible."
).format(
name=project_name,
version=version,
requirement=req,
dep_name=dep_name,
dep_version=dep_version,
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
)
parts.append(message)
logger.critical("\n".join(parts))
def get_lib_location_guesses(
user: bool = False,
home: Optional[str] = None,
root: Optional[str] = None,
isolated: bool = False,
prefix: Optional[str] = None,
) -> List[str]:
scheme = get_scheme(
"",
user=user,
home=home,
root=root,
isolated=isolated,
prefix=prefix,
)
return [scheme.purelib, scheme.platlib]
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
return all(
test_writable_dir(d)
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
)
def decide_user_install(
use_user_site: Optional[bool],
prefix_path: Optional[str] = None,
target_dir: Optional[str] = None,
root_path: Optional[str] = None,
isolated_mode: bool = False,
) -> bool:
"""Determine whether to do a user install based on the input options.
If use_user_site is False, no additional checks are done.
If use_user_site is True, it is checked for compatibility with other
options.
If use_user_site is None, the default behaviour depends on the environment,
which is provided by the other arguments.
"""
# In some cases (config from tox), use_user_site can be set to an integer
# rather than a bool, which 'use_user_site is False' wouldn't catch.
if (use_user_site is not None) and (not use_user_site):
logger.debug("Non-user install by explicit request")
return False
if use_user_site:
if prefix_path:
raise CommandError(
"Can not combine '--user' and '--prefix' as they imply "
"different installation locations"
)
if virtualenv_no_global():
raise InstallationError(
"Can not perform a '--user' install. User site-packages "
"are not visible in this virtualenv."
)
logger.debug("User install by explicit request")
return True
# If we are here, user installs have not been explicitly requested/avoided
assert use_user_site is None
# user install incompatible with --prefix/--target
if prefix_path or target_dir:
logger.debug("Non-user install due to --prefix or --target option")
return False
# If user installs are not enabled, choose a non-user install
if not site.ENABLE_USER_SITE:
logger.debug("Non-user install because user site-packages disabled")
return False
# If we have permission for a non-user install, do that,
# otherwise do a user install.
if site_packages_writable(root=root_path, isolated=isolated_mode):
logger.debug("Non-user install because site-packages writeable")
return False
logger.info(
"Defaulting to user installation because normal site-packages "
"is not writeable"
)
return True
def reject_location_related_install_options(
requirements: List[InstallRequirement], options: Optional[List[str]]
) -> None:
"""If any location-changing --install-option arguments were passed for
requirements or on the command-line, then show a deprecation warning.
"""
def format_options(option_names: Iterable[str]) -> List[str]:
return ["--{}".format(name.replace("_", "-")) for name in option_names]
offenders = []
for requirement in requirements:
install_options = requirement.install_options
location_options = parse_distutils_args(install_options)
if location_options:
offenders.append(
"{!r} from {}".format(
format_options(location_options.keys()), requirement
)
)
if options:
location_options = parse_distutils_args(options)
if location_options:
offenders.append(
"{!r} from command line".format(format_options(location_options.keys()))
)
if not offenders:
return
raise CommandError(
"Location-changing options found in --install-option: {}."
" This is unsupported, use pip-level options like --user,"
" --prefix, --root, and --target instead.".format("; ".join(offenders))
)
def create_os_error_message(
error: OSError, show_traceback: bool, using_user_site: bool
) -> str:
"""Format an error message for an OSError
It may occur anytime during the execution of the install command.
"""
parts = []
# Mention the error if we are not going to show a traceback
parts.append("Could not install packages due to an OSError")
if not show_traceback:
parts.append(": ")
parts.append(str(error))
else:
parts.append(".")
# Spilt the error indication from a helper message (if any)
parts[-1] += "\n"
# Suggest useful actions to the user:
# (1) using user site-packages or (2) verifying the permissions
if error.errno == errno.EACCES:
user_option_part = "Consider using the `--user` option"
permissions_part = "Check the permissions"
if not running_under_virtualenv() and not using_user_site:
parts.extend(
[
user_option_part,
" or ",
permissions_part.lower(),
]
)
else:
parts.append(permissions_part)
parts.append(".\n")
# Suggest the user to enable Long Paths if path length is
# more than 260
if (
WINDOWS
and error.errno == errno.ENOENT
and error.filename
and len(error.filename) > 260
):
parts.append(
"HINT: This error might have occurred since "
"this system does not have Windows Long Path "
"support enabled. You can find information on "
"how to enable this at "
"https://pip.pypa.io/warnings/enable-long-paths\n"
)
return "".join(parts).strip() + "\n"
|
"""
Support for a local MQTT broker.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/mqtt/#use-the-embedded-broker
"""
import logging
import tempfile
from homeassistant.core import callback
from homeassistant.components.mqtt import PROTOCOL_311
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.util.async import run_coroutine_threadsafe
REQUIREMENTS = ['hbmqtt==0.7.1']
DEPENDENCIES = ['http']
def start(hass, server_config):
"""Initialize MQTT Server."""
from hbmqtt.broker import Broker, BrokerException
try:
passwd = tempfile.NamedTemporaryFile()
if server_config is None:
server_config, client_config = generate_config(hass, passwd)
else:
client_config = None
broker = Broker(server_config, hass.loop)
run_coroutine_threadsafe(broker.start(), hass.loop).result()
except BrokerException:
logging.getLogger(__name__).exception('Error initializing MQTT server')
return False, None
finally:
passwd.close()
@callback
def shutdown_mqtt_server(event):
"""Shut down the MQTT server."""
hass.async_add_job(broker.shutdown())
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, shutdown_mqtt_server)
return True, client_config
def generate_config(hass, passwd):
"""Generate a configuration based on current Home Assistant instance."""
config = {
'listeners': {
'default': {
'max-connections': 50000,
'bind': '0.0.0.0:1883',
'type': 'tcp',
},
'ws-1': {
'bind': '0.0.0.0:8080',
'type': 'ws',
},
},
'auth': {
'allow-anonymous': hass.config.api.api_password is None
},
'plugins': ['auth_anonymous'],
}
if hass.config.api.api_password:
username = 'homeassistant'
password = hass.config.api.api_password
# Encrypt with what hbmqtt uses to verify
from passlib.apps import custom_app_context
passwd.write(
'homeassistant:{}\n'.format(
custom_app_context.encrypt(
hass.config.api.api_password)).encode('utf-8'))
passwd.flush()
config['auth']['password-file'] = passwd.name
config['plugins'].append('auth_file')
else:
username = None
password = None
client_config = ('localhost', 1883, username, password, None, PROTOCOL_311)
return config, client_config
|
import os
import sys
from unittest2 import TestLoader, TextTestRunner
if __name__ == '__main__':
tests_dir = os.path.dirname(os.path.abspath(__file__))
authorize_dir = os.path.join(tests_dir, os.path.pardir)
sys.path.append(authorize_dir)
suite = TestLoader().discover(tests_dir)
runner = TextTestRunner(verbosity=1)
result = runner.run(suite)
sys.exit(not result.wasSuccessful())
|
import os
from argparse import ArgumentParser
import list_white
import list_ip
def parse_args():
parser = ArgumentParser()
parser.add_argument('-i', '--input', dest='input', default=os.path.join('data','whitelist.pac'),
help='path to gfwlist')
parser.add_argument('-o', '--output', dest='output', default='whitelist.pac',
help='path to output pac', metavar='PAC')
parser.add_argument('-p', '--proxy', dest='proxy', default='"SOCKS5 127.0.0.1:1080; SOCKS 127.0.0.1:1080;"',
help='the proxy parameter in the pac file, for example,\
"127.0.0.1:1080;"', metavar='SOCKS5')
return parser.parse_args()
def get_file_data(filename):
content = ''
with open(filename, 'r') as file_obj:
content = file_obj.read()
return content
def writefile(input_file, proxy, output_file):
ip_content = list_ip.final_list()
ip16_content = list_ip.center_list()
fake_ip_content = list_ip.fake_list()
domains_content = list_white.final_list()
proxy_content = get_file_data(input_file)
proxy_content = proxy_content.replace('__PROXY__', proxy)
proxy_content = proxy_content.replace('__DOMAINS__', domains_content)
proxy_content = proxy_content.replace('__IP_LIST__', ip_content)
proxy_content = proxy_content.replace('__IP16_LIST__', ip16_content)
proxy_content = proxy_content.replace('__FAKE_IP_LIST__', fake_ip_content)
with open(output_file, 'w') as file_obj:
file_obj.write(proxy_content)
def main():
args = parse_args()
writefile(args.input, '"' + args.proxy.strip('"') + '"', args.output)
if __name__ == '__main__':
main()
|
import os
from twisted.internet import reactor
from twisted.python import filepath, util
from nevow import athena, inevow, loaders, tags, static
from twisted.web import server, resource
from zope.interface import implements, Interface
import coherence.extern.louie as louie
from coherence import log
class IWeb(Interface):
def goingLive(self):
pass
class Web(object):
def __init__(self, coherence):
super(Web, self).__init__()
self.coherence = coherence
class MenuFragment(athena.LiveElement, log.Loggable):
logCategory = 'webui_menu_fragment'
jsClass = u'Coherence.Base'
fragmentName = 'coherence-menu'
docFactory = loaders.stan(
tags.div(render=tags.directive('liveElement'))[
tags.div(id="coherence_menu_box", class_="coherence_menu_box")[""],
]
)
def __init__(self, page):
super(MenuFragment, self).__init__()
self.setFragmentParent(page)
self.page = page
self.coherence = page.coherence
self.tabs = []
def going_live(self):
self.info("add a view to the MenuFragment")
d = self.page.notifyOnDisconnect()
d.addCallback(self.remove_me)
d.addErrback(self.remove_me)
if len(self.tabs):
return self.tabs
else:
return {}
athena.expose(going_live)
def add_tab(self, title, active, id):
self.info("add tab %s to the MenuFragment", title)
new_tab = {u'title': unicode(title),
u'active': unicode(active),
u'athenaid': u'athenaid:%d' % id}
for t in self.tabs:
if t[u'title'] == new_tab[u'title']:
return
self.tabs.append(new_tab)
self.callRemote('addTab', new_tab)
def remove_me(self, result):
self.info("remove view from MenuFragment")
class DevicesFragment(athena.LiveElement, log.Loggable):
logCategory = 'webui_device_fragment'
jsClass = u'Coherence.Devices'
fragmentName = 'coherence-devices'
docFactory = loaders.stan(
tags.div(render=tags.directive('liveElement'))[
tags.div(id="Devices-container", class_="coherence_container")[""],
]
)
def __init__(self, page, active):
super(DevicesFragment, self).__init__()
self.setFragmentParent(page)
self.page = page
self.coherence = page.coherence
self.active = active
def going_live(self):
self.info("add a view to the DevicesFragment %s", self._athenaID)
self.page.menu.add_tab('Devices', self.active, self._athenaID)
d = self.page.notifyOnDisconnect()
d.addCallback(self.remove_me)
d.addErrback(self.remove_me)
devices = []
for device in self.coherence.get_devices():
if device is not None:
devices.append({u'name': device.get_markup_name(),
u'usn': unicode(device.get_usn())})
louie.connect(self.add_device,
'Coherence.UPnP.Device.detection_completed', louie.Any)
louie.connect(self.remove_device,
'Coherence.UPnP.Device.removed', louie.Any)
return devices
athena.expose(going_live)
def remove_me(self, result):
self.info("remove view from the DevicesFragment")
def add_device(self, device):
self.info("DevicesFragment found device %s %s of type %s",
device.get_usn(),
device.get_friendly_name(),
device.get_device_type())
self.callRemote('addDevice',
{u'name': device.get_markup_name(),
u'usn': unicode(device.get_usn())})
def remove_device(self, usn):
self.info("DevicesFragment remove device %s", usn)
self.callRemote('removeDevice', unicode(usn))
def render_devices(self, ctx, data):
cl = []
self.info('children: %s', self.coherence.children)
for child in self.coherence.children:
device = self.coherence.get_device_with_id(child)
if device is not None:
cl.append(tags.li[tags.a(href='/' + child)[
device.get_friendly_device_type, ':',
device.get_device_type_version, ' ',
device.get_friendly_name()]])
else:
cl.append(tags.li[child])
return ctx.tag[tags.ul[cl]]
class LoggingFragment(athena.LiveElement, log.Loggable):
logCategory = 'webui_logging_fragment'
jsClass = u'Coherence.Logging'
fragmentName = 'coherence-logging'
docFactory = loaders.stan(
tags.div(render=tags.directive('liveElement'))[
tags.div(id="Logging-container", class_="coherence_container")[""],
]
)
def __init__(self, page, active):
super(LoggingFragment, self).__init__()
self.setFragmentParent(page)
self.page = page
self.coherence = page.coherence
self.active = active
def going_live(self):
self.info("add a view to the LoggingFragment %s", self._athenaID)
self.page.menu.add_tab('Logging', self.active, self._athenaID)
d = self.page.notifyOnDisconnect()
d.addCallback(self.remove_me)
d.addErrback(self.remove_me)
return {}
athena.expose(going_live)
def remove_me(self, result):
self.info("remove view from the LoggingFragment")
class WebUI(athena.LivePage, log.Loggable):
"""
"""
logCategory = 'webui'
jsClass = u'Coherence'
addSlash = True
docFactory = loaders.xmlstr("""\
<html xmlns:nevow="http://nevow.com/ns/nevow/0.1">
<head>
<nevow:invisible nevow:render="liveglue" />
<link rel="stylesheet" type="text/css" href="static/main.css" />
</head>
<body>
<div id="coherence_header"><div class="coherence_title">Coherence</div><div nevow:render="menu"></div></div>
<div id="coherence_body">
<div nevow:render="devices" />
<div nevow:render="logging" />
</div>
</body>
</html>
""")
def __init__(self, *a, **kw):
super(WebUI, self).__init__(*a, **kw)
self.coherence = self.rootObject.coherence
self.jsModules.mapping.update({
'MochiKit': filepath.FilePath(__file__).parent().child('static').child('MochiKit.js').path})
self.jsModules.mapping.update({
'Coherence': filepath.FilePath(__file__).parent().child('static').child('Coherence.js').path})
self.jsModules.mapping.update({
'Coherence.Base': filepath.FilePath(__file__).parent().child('static').child('Coherence.Base.js').path})
self.jsModules.mapping.update({
'Coherence.Devices': filepath.FilePath(__file__).parent().child('static').child('Coherence.Devices.js').path})
self.jsModules.mapping.update({
'Coherence.Logging': filepath.FilePath(__file__).parent().child('static').child('Coherence.Logging.js').path})
self.menu = MenuFragment(self)
def childFactory(self, ctx, name):
self.info('WebUI childFactory: %s', name)
try:
return self.rootObject.coherence.children[name]
except:
ch = super(WebUI, self).childFactory(ctx, name)
if ch is None:
p = util.sibpath(__file__, name)
self.info('looking for file %s', p)
if os.path.exists(p):
ch = static.File(p)
return ch
def render_listmenu(self, ctx, data):
l = []
l.append(tags.div(id="t", class_="coherence_menu_item")[tags.a(href='/' + 'devices', class_="coherence_menu_link")['Devices']])
l.append(tags.div(id="t", class_="coherence_menu_item")[tags.a(href='/' + 'logging', class_="coherence_menu_link")['Logging']])
return ctx.tag[l]
def render_menu(self, ctx, data):
self.info('render_menu')
return self.menu
def render_devices(self, ctx, data):
self.info('render_devices')
f = DevicesFragment(self, 'yes')
return f
def render_logging(self, ctx, data):
self.info('render_logging')
f = LoggingFragment(self, 'no')
return f
|
import unittest
import os
import time
import re
import glob
import logging
try:
import autotest.common as common
except ImportError:
import common
from autotest.client.shared.test_utils import mock
from autotest.client.shared import boottool
from autotest.client import kernel, job, utils, kernelexpand
from autotest.client import kernel_config, os_dep
class TestAddKernelToBootLoader(unittest.TestCase):
def add_to_bootloader(self, base_args, args, bootloader_args, tag='image',
image='image', initrd='initrd'):
god = mock.mock_god()
bootloader = god.create_mock_class(boottool.boottool, "boottool")
# record
bootloader.remove_kernel.expect_call(tag)
bootloader.add_kernel.expect_call(path=image, title=tag, initrd=initrd,
args=bootloader_args)
# run and check
kernel._add_kernel_to_bootloader(bootloader, base_args, tag, args,
image, initrd)
god.check_playback()
def test_add_kernel_to_bootloader(self):
self.add_to_bootloader(base_args='baseargs', args='',
bootloader_args='baseargs')
self.add_to_bootloader(base_args='arg1 arg2', args='arg3',
bootloader_args='arg1 arg2 arg3')
class TestBootableKernel(unittest.TestCase):
def setUp(self):
self.god = mock.mock_god()
self.god.stub_function(time, "time")
self.god.stub_function(utils, "system")
self.god.stub_function(kernel, "_add_kernel_to_bootloader")
job_ = self.god.create_mock_class(job.job, "job")
self.kernel = kernel.BootableKernel(job_)
self.kernel.job.bootloader = self.god.create_mock_class(
boottool.boottool, "boottool")
def tearDown(self):
# note: time.time() can only be unstubbed via tearDown()
self.god.unstub_all()
def boot_kernel(self, ident_check):
notes = "applied_patches"
when = 1
args = ''
base_args = 'base_args'
tag = 'ident'
subdir = 'subdir'
self.kernel.image = 'image'
self.kernel.initrd = 'initrd'
self.kernel.installed_as = tag
# record
args_ = args
if ident_check:
time.time.expect_call().and_return(when)
args_ += " IDENT=%d" % when
status = ["job.end_reboot_and_verify", when, tag, subdir, notes]
else:
status = ["job.end_reboot", subdir, tag, notes]
self.kernel.job.next_step_prepend.expect_call(status)
self.kernel.job.config_get.expect_call(
'boot.default_args').and_return(base_args)
kernel._add_kernel_to_bootloader.expect_call(
self.kernel.job.bootloader, base_args, tag,
args_, self.kernel.image, self.kernel.initrd)
utils.system.expect_call('touch /fastboot')
self.kernel.job.start_reboot.expect_call()
self.kernel.job.reboot.expect_call(tag=tag)
# run and check
self.kernel._boot_kernel(args=args, ident_check=ident_check,
expected_ident=tag, subdir=subdir, notes=notes)
self.god.check_playback()
def test_boot_kernel(self):
self.boot_kernel(ident_check=False)
self.boot_kernel(ident_check=True)
class TestKernel(unittest.TestCase):
def setUp(self):
self.god = mock.mock_god()
logging.disable(logging.CRITICAL)
self.god.stub_function(time, "time")
self.god.stub_function(os, "mkdir")
self.god.stub_function(os, "chdir")
self.god.stub_function(os, "symlink")
self.god.stub_function(os, "remove")
self.god.stub_function(os.path, "isdir")
self.god.stub_function(os.path, "exists")
self.god.stub_function(os.path, "isfile")
self.god.stub_function(os_dep, "commands")
self.god.stub_function(kernel, "open")
self.god.stub_function(utils, "system")
self.god.stub_function(utils, "system_output")
self.god.stub_function(utils, "get_file")
self.god.stub_function(utils, "grep")
self.god.stub_function(utils, "get_current_kernel_arch")
self.god.stub_function(utils, "cat_file_to_cmd")
self.god.stub_function(utils, "force_copy")
self.god.stub_function(utils, "extract_tarball_to_dir")
self.god.stub_function(utils, "count_cpus")
self.god.stub_function(utils, "get_os_vendor")
self.god.stub_function(kernelexpand, "expand_classic")
self.god.stub_function(kernel_config, "modules_needed")
self.god.stub_function(glob, "glob")
def dummy_mark(filename, msg):
pass
self.god.stub_with(kernel, '_mark', dummy_mark)
self.job = self.god.create_mock_class(job.job, "job")
self.job.bootloader = self.god.create_mock_class(boottool.boottool,
"boottool")
class DummyLoggingManager(object):
def tee_redirect_debug_dir(self, *args, **kwargs):
pass
def restore(self, *args, **kwargs):
pass
self.job.logging = DummyLoggingManager()
self.job.autodir = "autodir"
self.base_tree = "2.6.24"
self.tmp_dir = "tmpdir"
self.subdir = "subdir"
def tearDown(self):
self.god.unstub_all()
def construct_kernel(self):
self.kernel = kernel.kernel.__new__(kernel.kernel)
self.god.stub_function(self.kernel, "extract")
# setup
self.src_dir = os.path.join(self.tmp_dir, 'src')
self.build_dir = os.path.join(self.tmp_dir, "build_dir")
self.config_dir = os.path.join(self.subdir, 'config')
self.log_dir = os.path.join(self.subdir, 'debug')
self.results_dir = os.path.join(self.subdir, 'results')
# record
os.path.isdir.expect_call(self.src_dir).and_return(True)
utils.system.expect_call('rm -rf ' + self.src_dir)
os.path.isdir.expect_call(self.build_dir).and_return(True)
utils.system.expect_call('rm -rf ' + self.build_dir)
os.path.exists.expect_call(self.src_dir).and_return(False)
os.mkdir.expect_call(self.src_dir)
for path in [self.config_dir, self.log_dir, self.results_dir]:
os.path.exists.expect_call(path).and_return(True)
utils.system.expect_call('rm -rf ' + path)
os.mkdir.expect_call(path)
logpath = os.path.join(self.log_dir, 'build_log')
self.logfile = self.god.create_mock_class(file, "file")
kernel.open.expect_call(logpath, 'w+').and_return(self.logfile)
utils.get_current_kernel_arch.expect_call().and_return('ia64')
self.logfile.write.expect_call('BASE: %s\n' % self.base_tree)
self.kernel.extract.expect_call(self.base_tree)
# finish creation of kernel object and test (and unstub extract)
self.kernel.__init__(self.job, self.base_tree, self.subdir,
self.tmp_dir, "build_dir")
self.god.check_playback()
self.god.unstub(self.kernel, "extract")
def test_constructor(self):
self.construct_kernel()
def test_kernelexpand1(self):
self.construct_kernel()
ret_val = self.kernel.kernelexpand("/path/to/kernel")
self.assertEquals(ret_val, ["/path/to/kernel"])
self.god.check_playback()
def test_kernel_expand2(self):
self.construct_kernel()
kernel = "kernel.tar.gz"
# record
self.job.config_get.expect_call('mirror.mirrors').and_return('mirror')
kernelexpand.expand_classic.expect_call(kernel,
'mirror').and_return('patches')
# run
self.assertEquals(self.kernel.kernelexpand(kernel), 'patches')
self.god.check_playback()
def test_kernel_expand3(self):
self.construct_kernel()
kernel = "kernel.tar.gz"
# record
self.job.config_get.expect_call('mirror.mirrors')
self.job.config_get.expect_call(
'mirror.ftp_kernel_org').and_return('mirror')
korg = 'http://www.kernel.org/pub/linux/kernel'
mirrors = [
[korg + '/v2.6', 'mirror' + '/v2.6'],
[korg + '/people/akpm/patches/2.6', 'mirror' + '/akpm'],
[korg + '/people/mbligh', 'mirror' + '/mbligh'],
]
kernelexpand.expand_classic.expect_call(kernel,
mirrors).and_return('patches')
# run
self.assertEquals(self.kernel.kernelexpand(kernel), 'patches')
self.god.check_playback()
def test_extract1(self):
self.construct_kernel()
# setup
self.god.stub_function(self.kernel, "get_kernel_tree")
# record
os.path.exists.expect_call(self.base_tree).and_return(True)
self.kernel.get_kernel_tree.expect_call(self.base_tree)
self.job.record.expect_call('GOOD', self.subdir, 'kernel.extract')
# run
self.kernel.extract(self.base_tree)
self.god.check_playback()
self.god.unstub(self.kernel, "get_kernel_tree")
def test_extract2(self):
self.construct_kernel()
# setup
self.god.stub_function(self.kernel, "kernelexpand")
self.god.stub_function(self.kernel, "get_kernel_tree")
self.god.stub_function(self.kernel, "patch")
# record
os.path.exists.expect_call(self.base_tree).and_return(False)
components = ["component0", "component1"]
self.kernel.kernelexpand.expect_call(self.base_tree).and_return(
components)
self.kernel.get_kernel_tree.expect_call(components[0])
self.kernel.patch.expect_call(components[1])
self.job.record.expect_call('GOOD', self.subdir, 'kernel.extract')
# run
self.kernel.extract(self.base_tree)
self.god.check_playback()
self.god.unstub(self.kernel, "kernelexpand")
self.god.unstub(self.kernel, "get_kernel_tree")
self.god.unstub(self.kernel, "patch")
def test_patch1(self):
self.construct_kernel()
patches = ('patch1', 'patch2')
self.god.stub_function(self.kernel, "apply_patches")
self.god.stub_function(self.kernel, "get_patches")
# record
self.kernel.get_patches.expect_call(patches).and_return(patches)
self.kernel.apply_patches.expect_call(patches)
self.job.record.expect_call('GOOD', self.subdir, 'kernel.patch')
# run
self.kernel.patch(*patches)
self.god.check_playback()
self.god.unstub(self.kernel, "apply_patches")
self.god.unstub(self.kernel, "get_patches")
def test_patch2(self):
self.construct_kernel()
patches = []
# record
self.job.record.expect_call('GOOD', self.subdir, 'kernel.patch')
# run
self.kernel.patch(*patches)
self.god.check_playback()
def test_config(self):
self.construct_kernel()
# setup
self.god.stub_function(self.kernel, "set_cross_cc")
self.god.stub_class(kernel_config, "kernel_config")
kernel_config.kernel_config.build_config = "/tmp/config"
# record
self.kernel.set_cross_cc.expect_call()
kernel_config.kernel_config.expect_new(self.job, self.build_dir,
self.config_dir, '', None,
False, self.base_tree, None)
utils.grep.expect_call('^CONFIG_DEFAULT_UIMAGE=y', '/tmp/config')
self.job.record.expect_call('GOOD', self.subdir, 'kernel.config')
# run
self.kernel.config()
self.god.check_playback()
self.god.unstub(self.kernel, "set_cross_cc")
def test_get_patches(self):
self.construct_kernel()
# setup
patches = ['patch1', 'patch2', 'patch3']
local_patches = []
# record
for patch in patches:
dest = os.path.join(self.src_dir, os.path.basename(patch))
utils.get_file.expect_call(patch, dest)
utils.system_output.expect_call(
'md5sum ' + dest).and_return('md5sum')
local_patches.append((patch, dest, 'md5sum'))
# run and check
self.assertEquals(self.kernel.get_patches(patches), local_patches)
self.god.check_playback()
def test_apply_patches(self):
self.construct_kernel()
# setup
patches = []
patches.append(('patch1', 'patch1.gz', 'md5sum1'))
patches.append(('patch2', 'patch2.bz2', 'md5sum2'))
patches.append(('patch3', 'patch3', 'md5sum3'))
applied_patches = []
# record
os.chdir.expect_call(self.build_dir)
patch_id = "%s %s %s" % ('patch1', 'patch1', 'md5sum1')
log = "PATCH: " + patch_id + "\n"
utils.cat_file_to_cmd.expect_call('patch1.gz',
'patch -p1 > /dev/null')
self.logfile.write.expect_call(log)
applied_patches.append(patch_id)
patch_id = "%s %s %s" % ('patch2', 'patch2', 'md5sum2')
log = "PATCH: " + patch_id + "\n"
utils.cat_file_to_cmd.expect_call('patch2.bz2',
'patch -p1 > /dev/null')
self.logfile.write.expect_call(log)
applied_patches.append(patch_id)
utils.force_copy.expect_call('patch3',
self.results_dir).and_return('local_patch3')
self.job.relative_path.expect_call('local_patch3').and_return(
'rel_local_patch3')
patch_id = "%s %s %s" % ('patch3', 'rel_local_patch3', 'md5sum3')
log = "PATCH: " + patch_id + "\n"
utils.cat_file_to_cmd.expect_call('patch3',
'patch -p1 > /dev/null')
self.logfile.write.expect_call(log)
applied_patches.append(patch_id)
# run and test
self.kernel.apply_patches(patches)
self.assertEquals(self.kernel.applied_patches, applied_patches)
self.god.check_playback()
def test_get_kernel_tree1(self):
self.construct_kernel()
# record
os.path.isdir.expect_call(self.base_tree).and_return(True)
os.symlink.expect_call(self.base_tree, self.build_dir)
# run and check
self.kernel.get_kernel_tree(self.base_tree)
self.god.check_playback()
def test_get_kernel_tree2(self):
self.construct_kernel()
# record
os.path.isdir.expect_call(self.base_tree).and_return(False)
os.chdir.expect_call(os.path.dirname(self.src_dir))
tarball = os.path.join(self.src_dir, os.path.basename(self.base_tree))
utils.get_file.expect_call(self.base_tree, tarball)
utils.extract_tarball_to_dir.expect_call(tarball,
self.build_dir)
# run and check
self.kernel.get_kernel_tree(self.base_tree)
self.god.check_playback()
def test_extraversion(self):
self.construct_kernel()
tag = "tag"
# setup
self.god.stub_function(self.kernel, "config")
# record
os.chdir.expect_call(self.build_dir)
extraversion_sub = r's/^CONFIG_LOCALVERSION=\s*"\(.*\)"/CONFIG_LOCALVERSION='
cfg = self.build_dir + '/.config'
p = extraversion_sub + '"\\1-%s"/' % tag
os.path.exists.expect_call(cfg).and_return(True)
utils.system.expect_call('mv %s %s.old' % (cfg, cfg))
utils.system.expect_call("sed '%s' < %s.old > %s" % (p, cfg, cfg))
self.kernel.config.expect_call(make='oldconfig')
# run and check
self.kernel.extraversion(tag)
self.god.check_playback()
def test_extraversion_nocfg(self):
self.construct_kernel()
tag = "tag"
# setup
self.god.stub_function(self.kernel, "config")
# record
os.chdir.expect_call(self.build_dir)
extraversion_sub = r's/^CONFIG_LOCALVERSION=\s*"\(.*\)"/CONFIG_LOCALVERSION='
cfg = self.build_dir + '/.config'
p = extraversion_sub + '"\\1-%s"/' % tag
os.path.exists.expect_call(cfg).and_return(False)
self.kernel.config.expect_call()
# run and check
self.kernel.extraversion(tag)
self.god.check_playback()
def test_build(self):
self.construct_kernel()
self.god.stub_function(self.kernel, "extraversion")
self.god.stub_function(self.kernel, "set_cross_cc")
self.god.stub_function(self.kernel, "get_kernel_build_ver")
self.kernel.build_target = 'build_target'
# record
os_dep.commands.expect_call('gcc', 'make')
logfile = os.path.join(self.log_dir, 'kernel_build')
os.chdir.expect_call(self.build_dir)
self.kernel.extraversion.expect_call('autotest')
self.kernel.set_cross_cc.expect_call()
utils.system.expect_call('make dep', ignore_status=True)
utils.count_cpus.expect_call().and_return(4)
threads = 2 * 4
build_string = 'make -j %d %s %s' % (threads, '', 'build_target')
utils.system.expect_call(build_string)
kernel_config.modules_needed.expect_call('.config').and_return(True)
utils.system.expect_call('make -j %d %s modules' % (threads, ''))
self.kernel.get_kernel_build_ver.expect_call().and_return('2.6.24')
kernel_version = re.sub('-autotest', '', '2.6.24')
self.logfile.write.expect_call('BUILD VERSION: %s\n' % kernel_version)
utils.force_copy.expect_call(self.build_dir + '/System.map',
self.results_dir)
self.job.record.expect_call('GOOD', self.subdir, 'kernel.build')
# run and check
self.kernel.build()
self.god.check_playback()
def test_build_timed(self):
self.construct_kernel()
self.god.stub_function(self.kernel, "set_cross_cc")
self.god.stub_function(self.kernel, "clean")
# record
os.chdir.expect_call(self.build_dir)
self.kernel.set_cross_cc.expect_call()
self.kernel.clean.expect_call()
build_string = "/usr/bin/time -o /dev/null make -j 8 vmlinux"
build_string += ' > /dev/null 2>&1'
utils.system.expect_call(build_string)
os.path.isfile.expect_call('vmlinux').and_return(True)
# run and check
self.kernel.build_timed(threads=8)
self.god.check_playback()
def test_clean(self):
self.construct_kernel()
# record
os.chdir.expect_call(self.build_dir)
utils.system.expect_call('make clean > /dev/null 2> /dev/null')
self.job.record.expect_call('GOOD', self.subdir, 'kernel.clean')
# run and check
self.kernel.clean()
self.god.check_playback()
def test_mkinitrd(self):
self.construct_kernel()
# record
utils.get_os_vendor.expect_call().and_return('Ubuntu')
os.path.isfile.expect_call('initrd').and_return(True)
os.remove.expect_call('initrd')
self.job.config_get.expect_call(
'kernel.mkinitrd_extra_args').and_return(None)
args = ''
glob.glob.expect_call('/lib/modules/2.6.24*').and_return(['2.6.24'])
os.path.isfile.expect_call('/usr/sbin/mkinitrd').and_return(True)
cmd = '/usr/sbin/mkinitrd'
utils.system.expect_call('%s %s -o initrd 2.6.24' % (cmd, args))
self.job.record.expect_call('GOOD', self.subdir, 'kernel.mkinitrd')
# run and check
self.kernel.mkinitrd(version="2.6.24", image="image",
system_map="system_map", initrd="initrd")
self.god.check_playback()
def test_install(self):
self.construct_kernel()
tag = 'autotest'
prefix = '/'
self.kernel.build_image = None
self.kernel.build_target = 'build_target'
self.god.stub_function(self.kernel, "get_kernel_build_ver")
self.god.stub_function(self.kernel, "mkinitrd")
# record
os.chdir.expect_call(self.build_dir)
os.path.isdir.expect_call(prefix).and_return(False)
os.mkdir.expect_call(prefix)
boot_dir = os.path.join(prefix, 'boot')
os.path.isdir.expect_call(boot_dir).and_return(False)
os.mkdir.expect_call(boot_dir)
glob.glob.expect_call(
'arch/*/boot/' + 'build_target').and_return('')
build_image = self.kernel.build_target
utils.force_copy.expect_call('vmlinux',
'/boot/vmlinux-autotest')
utils.force_copy.expect_call('build_target',
'/boot/vmlinuz-autotest')
utils.force_copy.expect_call('System.map',
'/boot/System.map-autotest')
utils.force_copy.expect_call('.config',
'/boot/config-autotest')
kernel_config.modules_needed.expect_call('.config').and_return(True)
utils.system.expect_call('make modules_install INSTALL_MOD_PATH=%s'
% prefix)
initrd = boot_dir + '/initrd-' + tag
self.kernel.get_kernel_build_ver.expect_call().and_return('2.6.24')
self.kernel.mkinitrd.expect_call('2.6.24', '/boot/vmlinuz-autotest',
'/boot/System.map-autotest', '/boot/initrd-autotest')
self.job.record.expect_call('GOOD', self.subdir, 'kernel.install')
# run and check
self.kernel.install()
self.god.check_playback()
def test_get_kernel_build_arch1(self):
self.construct_kernel()
# record
utils.get_current_kernel_arch.expect_call().and_return("i386")
# run and check
self.assertEquals(self.kernel.get_kernel_build_arch(), "i386")
self.god.check_playback()
def test_get_kernel_build_arch2(self):
self.construct_kernel()
# run and check
self.assertEquals(self.kernel.get_kernel_build_arch('i586'), "i386")
self.god.check_playback()
def test_get_kernel_build_release(self):
self.construct_kernel()
mock_file = self.god.create_mock_class(file, "file")
# record
for f in [self.build_dir + "/include/linux/version.h",
self.build_dir + "/include/linux/utsrelease.h"]:
os.path.exists.expect_call(f).and_return(True)
kernel.open.expect_call(f, 'r').and_return(mock_file)
mock_file.readlines.expect_call().and_return("Some lines")
mock_file.close.expect_call()
for f in [self.build_dir + "/include/linux/compile.h",
self.build_dir + "/include/generated/utsrelease.h",
self.build_dir + "/include/generated/compile.h"]:
os.path.exists.expect_call(f).and_return(False)
# run and test
self.kernel.get_kernel_build_release()
self.god.check_playback()
def test_get_kernel_build_ident(self):
self.construct_kernel()
self.god.stub_function(self.kernel, "get_kernel_build_release")
# record
self.kernel.get_kernel_build_release.expect_call().and_return(
("AwesomeRelease", "1.0"))
# run and check
self.assertEquals(self.kernel.get_kernel_build_ident(),
"AwesomeRelease::1.0")
self.god.check_playback()
def test_boot(self):
self.construct_kernel()
self.god.stub_function(self.kernel, "get_kernel_build_ident")
self.god.stub_function(self.kernel, "install")
self.god.stub_function(self.kernel, "_boot_kernel")
self.kernel.applied_patches = "applied_patches"
self.kernel.installed_as = None
args = ''
expected_ident = 'ident'
ident = True
# record
self.kernel.install.expect_call()
self.kernel.get_kernel_build_ident.expect_call(
).and_return(expected_ident)
self.kernel._boot_kernel.expect_call(
args, ident, expected_ident,
self.subdir, self.kernel.applied_patches)
# run and check
self.kernel.boot(args=args, ident=ident)
self.god.check_playback()
if __name__ == "__main__":
unittest.main()
|
import os
import shutil
from cStringIO import StringIO
from tempfile import mkstemp
from tests import TestCase, add
import warnings
warnings.simplefilter("ignore", DeprecationWarning)
from mutagen.m4a import M4A, Atom, Atoms, M4ATags, M4AInfo, \
delete, M4ACover, M4AMetadataError
try: from os.path import devnull
except ImportError: devnull = "/dev/null"
class TAtom(TestCase):
uses_mmap = False
def test_no_children(self):
fileobj = StringIO("\x00\x00\x00\x08atom")
atom = Atom(fileobj)
self.failUnlessRaises(KeyError, atom.__getitem__, "test")
def test_length_1(self):
fileobj = StringIO("\x00\x00\x00\x01atom" + "\x00" * 8)
self.failUnlessRaises(IOError, Atom, fileobj)
def test_render_too_big(self):
class TooBig(str):
def __len__(self):
return 1L << 32
data = TooBig("test")
try: len(data)
except OverflowError:
# Py_ssize_t is still only 32 bits on this system.
self.failUnlessRaises(OverflowError, Atom.render, "data", data)
else:
data = Atom.render("data", data)
self.failUnlessEqual(len(data), 4 + 4 + 8 + 4)
def test_length_0(self):
fileobj = StringIO("\x00\x00\x00\x00atom")
Atom(fileobj)
self.failUnlessEqual(fileobj.tell(), 8)
add(TAtom)
class TAtoms(TestCase):
uses_mmap = False
filename = os.path.join("tests", "data", "has-tags.m4a")
def setUp(self):
self.atoms = Atoms(open(self.filename, "rb"))
def test___contains__(self):
self.failUnless(self.atoms["moov"])
self.failUnless(self.atoms["moov.udta"])
self.failUnlessRaises(KeyError, self.atoms.__getitem__, "whee")
def test_name(self):
self.failUnlessEqual(self.atoms.atoms[0].name, "ftyp")
def test_children(self):
self.failUnless(self.atoms.atoms[2].children)
def test_no_children(self):
self.failUnless(self.atoms.atoms[0].children is None)
def test_repr(self):
repr(self.atoms)
add(TAtoms)
class TM4AInfo(TestCase):
uses_mmap = False
def test_no_soun(self):
self.failUnlessRaises(
IOError, self.test_mdhd_version_1, "no so und data here")
def test_mdhd_version_1(self, soun="soun"):
mdhd = Atom.render("mdhd", ("\x01\x00\x00\x00" + "\x00" * 16 +
"\x00\x00\x00\x02" + # 2 Hz
"\x00\x00\x00\x00\x00\x00\x00\x10"))
hdlr = Atom.render("hdlr", soun)
mdia = Atom.render("mdia", mdhd + hdlr)
trak = Atom.render("trak", mdia)
moov = Atom.render("moov", trak)
fileobj = StringIO(moov)
atoms = Atoms(fileobj)
info = M4AInfo(atoms, fileobj)
self.failUnlessEqual(info.length, 8)
add(TM4AInfo)
class TM4ATags(TestCase):
uses_mmap = False
def wrap_ilst(self, data):
ilst = Atom.render("ilst", data)
meta = Atom.render("meta", "\x00" * 4 + ilst)
data = Atom.render("moov", Atom.render("udta", meta))
fileobj = StringIO(data)
return M4ATags(Atoms(fileobj), fileobj)
def test_bad_freeform(self):
mean = Atom.render("mean", "net.sacredchao.Mutagen")
name = Atom.render("name", "empty test key")
bad_freeform = Atom.render("----", "\x00" * 4 + mean + name)
self.failIf(self.wrap_ilst(bad_freeform))
def test_genre(self):
data = Atom.render("data", "\x00" * 8 + "\x00\x01")
genre = Atom.render("gnre", data)
tags = self.wrap_ilst(genre)
self.failIf("gnre" in tags)
self.failUnlessEqual(tags.get("\xa9gen"), "Blues")
def test_empty_cpil(self):
cpil = Atom.render("cpil", Atom.render("data", "\x00" * 8))
tags = self.wrap_ilst(cpil)
self.failUnless("cpil" in tags)
self.failIf(tags["cpil"])
def test_genre_too_big(self):
data = Atom.render("data", "\x00" * 8 + "\x01\x00")
genre = Atom.render("gnre", data)
tags = self.wrap_ilst(genre)
self.failIf("gnre" in tags)
self.failIf("\xa9gen" in tags)
def test_strips_unknown_types(self):
data = Atom.render("data", "\x00" * 8 + "whee")
foob = Atom.render("foob", data)
tags = self.wrap_ilst(foob)
self.failIf(tags)
def test_bad_covr(self):
data = Atom.render("foob", "\x00\x00\x00\x0E" + "\x00" * 4 + "whee")
covr = Atom.render("covr", data)
self.failUnlessRaises(M4AMetadataError, self.wrap_ilst, covr)
add(TM4ATags)
class TM4A(TestCase):
def setUp(self):
fd, self.filename = mkstemp(suffix='m4a')
os.close(fd)
shutil.copy(self.original, self.filename)
self.audio = M4A(self.filename)
def faad(self):
if not have_faad: return
value = os.system(
"faad %s -o %s > %s 2> %s" % (
self.filename, devnull, devnull, devnull))
self.failIf(value and value != NOTFOUND)
def test_bitrate(self):
self.failUnlessEqual(self.audio.info.bitrate, 2914)
def test_length(self):
self.failUnlessAlmostEqual(3.7, self.audio.info.length, 1)
def set_key(self, key, value):
self.audio[key] = value
self.audio.save()
audio = M4A(self.audio.filename)
self.failUnless(key in audio)
self.failUnlessEqual(audio[key], value)
self.faad()
def test_save_text(self):
self.set_key('\xa9nam', u"Some test name")
def test_freeform(self):
self.set_key('----:net.sacredchao.Mutagen:test key', "whee")
def test_tracknumber(self):
self.set_key('trkn', (1, 10))
def test_disk(self):
self.set_key('disk', (18, 0))
def test_tracknumber_too_small(self):
self.failUnlessRaises(ValueError, self.set_key, 'trkn', (-1, 0))
self.failUnlessRaises(ValueError, self.set_key, 'trkn', (2**18, 1))
def test_disk_too_small(self):
self.failUnlessRaises(ValueError, self.set_key, 'disk', (-1, 0))
self.failUnlessRaises(ValueError, self.set_key, 'disk', (2**18, 1))
def test_tracknumber_wrong_size(self):
self.failUnlessRaises(ValueError, self.set_key, 'trkn', (1,))
self.failUnlessRaises(ValueError, self.set_key, 'trkn', (1, 2, 3,))
def test_disk_wrong_size(self):
self.failUnlessRaises(ValueError, self.set_key, 'disk', (1,))
self.failUnlessRaises(ValueError, self.set_key, 'disk', (1, 2, 3,))
def test_tempo(self):
self.set_key('tmpo', 150)
def test_tempo_invalid(self):
self.failUnlessRaises(ValueError, self.set_key, 'tmpo', 100000)
def test_compilation(self):
self.set_key('cpil', True)
def test_compilation_false(self):
self.set_key('cpil', False)
def test_cover(self):
self.set_key('covr', 'woooo')
def test_cover_png(self):
self.set_key('covr', M4ACover('woooo', M4ACover.FORMAT_PNG))
def test_cover_jpeg(self):
self.set_key('covr', M4ACover('hoooo', M4ACover.FORMAT_JPEG))
def test_pprint(self):
self.audio.pprint()
def test_pprint_binary(self):
self.audio["covr"] = "\x00\xa9\garbage"
self.audio.pprint()
def test_delete(self):
self.audio.delete()
audio = M4A(self.audio.filename)
self.failIf(audio.tags)
self.faad()
def test_module_delete(self):
delete(self.filename)
audio = M4A(self.audio.filename)
self.failIf(audio.tags)
self.faad()
def test_reads_unknown_text(self):
self.set_key("foob", u"A test")
def test_mime(self):
self.failUnless("audio/mp4" in self.audio.mime)
def tearDown(self):
os.unlink(self.filename)
class TM4AHasTags(TM4A):
original = os.path.join("tests", "data", "has-tags.m4a")
def test_save_simple(self):
self.audio.save()
self.faad()
def test_shrink(self):
map(self.audio.__delitem__, self.audio.keys())
self.audio.save()
audio = M4A(self.audio.filename)
self.failIf(self.audio.tags)
def test_has_tags(self):
self.failUnless(self.audio.tags)
def test_has_covr(self):
self.failUnless('covr' in self.audio.tags)
covr = self.audio.tags['covr']
self.failUnlessEqual(covr.imageformat, M4ACover.FORMAT_PNG)
def test_not_my_file(self):
self.failUnlessRaises(
IOError, M4A, os.path.join("tests", "data", "empty.ogg"))
add(TM4AHasTags)
class TM4ANoTags(TM4A):
original = os.path.join("tests", "data", "no-tags.m4a")
def test_no_tags(self):
self.failUnless(self.audio.tags is None)
add(TM4ANoTags)
NOTFOUND = os.system("tools/notarealprogram 2> %s" % devnull)
have_faad = True
if os.system("faad 2> %s > %s" % (devnull, devnull)) == NOTFOUND:
have_faad = False
print "WARNING: Skipping FAAD reference tests."
|
import os
import re
from avocado import Test
from avocado.utils import process, build, archive
from avocado.utils.software_manager import SoftwareManager
class Flail(Test):
'''
Flail is system call fuzzer
:avocado: tags=fs
'''
def setUp(self):
'''
Setup Flail
'''
smm = SoftwareManager()
for package in ['gcc', 'make']:
if not smm.check_installed(package) and not smm.install(package):
self.cancel(package + ' is needed for the test to be run')
self.fs_type = self.params.get('fstype', default='xfs')
archive.extract(self.get_data("flail-0.2.0.tar.gz"), self.workdir)
self.build_dir = os.path.join(self.workdir, 'flail-0.2.0')
os.chdir(self.build_dir)
fin = open("Makefile", "rt")
data = fin.read()
data = data.replace('-lm -o $@ flail.c', '-o $@ flail.c -lm')
fin.close()
fin = open("Makefile", "wt")
fin.write(data)
fin.close()
build.make(self.build_dir)
def test(self):
'''
Runs flail with the appropriate parameters.
:param fstype: Filesystem type there user want to run flail
'''
self.clear_dmesg()
os.chdir(self.build_dir)
process.system('./flail %s 1' % self.fs_type, ignore_status=True)
dmesg = process.system_output('dmesg')
match = re.search(br'Call Trace:', dmesg, re.M | re.I)
if match:
self.fail("some call traces seen please check")
def clear_dmesg(self):
process.run("dmesg -C ", sudo=True)
|
import re
import aexpect
from avocado.utils import process
from . import utils_misc
from . import error_context
class QemuIOParamError(Exception):
"""
Parameter Error for qemu-io command
"""
pass
class QemuIO(object):
"""
A class for execute qemu-io command
"""
def __init__(self, test, params, image_name, blkdebug_cfg="",
prompt=r"qemu-io>\s*$", log_filename=None, io_options="",
log_func=None):
self.type = ""
if log_filename:
log_filename += "-" + utils_misc.generate_random_string(4)
self.output_func = utils_misc.log_line
self.output_params = (log_filename,)
else:
self.output_func = None
self.output_params = ()
self.output_prefix = ""
self.prompt = prompt
self.blkdebug_cfg = blkdebug_cfg
self.qemu_io_cmd = utils_misc.get_qemu_io_binary(params)
self.io_options = io_options
self.run_command = False
self.image_name = image_name
self.blkdebug_cfg = blkdebug_cfg
self.log_func = log_func
def get_cmd_line(self, ignore_option=[], essential_option=[],
forbid_option=[]):
"""
Generate the command line for qemu-io from the parameters
:params ignore_option: list for the options should not in command
:params essential_option: list for the essential options
:params forbid_option: list for the option should not in command
:return: qemu-io command line
"""
essential_flag = False
qemu_io_cmd = self.qemu_io_cmd
if self.io_options:
for io_option in re.split(",", self.io_options):
if io_option in ignore_option:
pass
elif io_option in forbid_option:
raise QemuIOParamError
else:
if not essential_flag and io_option in essential_option:
essential_flag = True
if len(io_option) == 1:
qemu_io_cmd += " -%s" % io_option
else:
qemu_io_cmd += " --%s" % io_option
if essential_option and not essential_flag:
raise QemuIOParamError
if self.image_name:
qemu_io_cmd += " "
if self.blkdebug_cfg:
qemu_io_cmd += "blkdebug:%s:" % self.blkdebug_cfg
qemu_io_cmd += self.image_name
return qemu_io_cmd
def cmd_output(self, command):
"""
Run a command in qemu-io
"""
pass
def close(self):
"""
Clean up
"""
pass
class QemuIOShellSession(QemuIO):
"""
Use a shell session to execute qemu-io command
"""
def __init__(self, test, params, image_name, blkdebug_cfg="",
prompt=r"qemu+-io>\s*$", log_filename=None, io_options="",
log_func=None):
QemuIO.__init__(self, test, params, image_name, blkdebug_cfg, prompt,
log_filename, io_options, log_func)
self.type = "shell"
forbid_option = ["h", "help", "V", "version", "c", "cmd"]
self.qemu_io_cmd = self.get_cmd_line(forbid_option=forbid_option)
self.create_session = True
self.session = None
@error_context.context_aware
def cmd_output(self, command, timeout=60):
"""
Get output from shell session. If the create flag is True, init the
shell session and set the create flag to False.
:param command: command to execute in qemu-io
:param timeout: timeout for execute the command
"""
qemu_io_cmd = self.qemu_io_cmd
prompt = self.prompt
output_func = self.output_func
output_params = self.output_params
output_prefix = self.output_prefix
if self.create_session:
error_context.context(
"Running command: %s" % qemu_io_cmd, self.log_func)
self.session = aexpect.ShellSession(qemu_io_cmd, echo=True,
prompt=prompt,
output_func=output_func,
output_params=output_params,
output_prefix=output_prefix)
# Record the command line in log file
if self.output_func:
params = self.output_params + (qemu_io_cmd, )
self.output_func(*params)
self.create_session = False
# Get the reaction from session
self.session.cmd_output("\n")
error_context.context("Executing command: %s" % command, self.log_func)
return self.session.cmd_output(command, timeout=timeout)
def close(self):
"""
Close the shell session for qemu-io
"""
if not self.create_session:
self.session.close()
class QemuIOSystem(QemuIO):
"""
Run qemu-io with a command line which will return immediately
"""
def __init__(self, test, params, image_name, blkdebug_cfg="",
prompt=r"qemu-io>\s*$", log_filename=None, io_options="",
log_func=None):
QemuIO.__init__(self, test, params, image_name, blkdebug_cfg, prompt,
log_filename, io_options, log_func)
ignore_option = ["c", "cmd"]
essential_option = ["h", "help", "V", "version", "c", "cmd"]
self.qemu_io_cmd = self.get_cmd_line(ignore_option=ignore_option,
essential_option=essential_option)
@error_context.context_aware
def cmd_output(self, command, timeout=60):
"""
Get output from system_output. Add the command to the qemu-io command
line with -c and record the output in the log file.
:param command: command to execute in qemu-io
:param timeout: timeout for execute the command
"""
qemu_io_cmd = self.qemu_io_cmd
if command:
qemu_io_cmd += " -c '%s'" % command
error_context.context(
"Running command: %s" % qemu_io_cmd, self.log_func)
output = process.system_output(qemu_io_cmd, timeout=timeout)
# Record command line in log file
if self.output_func:
params = self.output_params + (qemu_io_cmd,)
self.output_func(*params)
params = self.output_params + (output,)
self.output_func(*params)
return output
def close(self):
"""
To keep the the same interface with QemuIOShellSession
"""
pass
|
"""
***************************************************************************
ExtentSelectionPanel.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
__revision__ = '$Format:%H$'
from qgis.core import *
from PyQt4 import QtGui, QtCore
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from processing import interface
from processing.gui.RectangleMapTool import RectangleMapTool
from processing.parameters.ParameterRaster import ParameterRaster
from processing.parameters.ParameterVector import ParameterVector
from processing.parameters.ParameterMultipleInput import ParameterMultipleInput
from processing.tools import dataobjects
class ExtentSelectionPanel(QtGui.QWidget):
def __init__(self, dialog, alg, default):
super(ExtentSelectionPanel, self).__init__(None)
self.dialog = dialog
self.params = alg.parameters
self.horizontalLayout = QtGui.QHBoxLayout(self)
self.horizontalLayout.setSpacing(2)
self.horizontalLayout.setMargin(0)
self.text = QtGui.QLineEdit()
self.text.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
if self.canUseAutoExtent():
if hasattr(self.text, 'setPlaceholderText'):
self.text.setPlaceholderText(
'[Leave blank to use min covering extent]')
self.horizontalLayout.addWidget(self.text)
self.pushButton = QtGui.QPushButton()
self.pushButton.setText('...')
self.pushButton.clicked.connect(self.buttonPushed)
self.horizontalLayout.addWidget(self.pushButton)
self.setLayout(self.horizontalLayout)
canvas = interface.iface.mapCanvas()
self.prevMapTool = canvas.mapTool()
self.tool = RectangleMapTool(canvas)
self.connect(self.tool, SIGNAL('rectangleCreated()'), self.fillCoords)
def canUseAutoExtent(self):
for param in self.params:
if isinstance(param, (ParameterRaster, ParameterVector)):
return True
if isinstance(param, ParameterMultipleInput):
return True
return False
def buttonPushed(self):
popupmenu = QMenu()
useLayerExtentAction = QtGui.QAction('Use layer/canvas extent',
self.pushButton)
useLayerExtentAction.triggered.connect(self.useLayerExtent)
popupmenu.addAction(useLayerExtentAction)
selectOnCanvasAction = QtGui.QAction('Select extent on canvas',
self.pushButton)
selectOnCanvasAction.triggered.connect(self.selectOnCanvas)
popupmenu.addAction(selectOnCanvasAction)
if self.canUseAutoExtent():
useMincoveringExtentAction = \
QtGui.QAction('Use min convering extent from input layers',
self.pushButton)
useMincoveringExtentAction.triggered.connect(
self.useMinCoveringExtent)
popupmenu.addAction(useMincoveringExtentAction)
popupmenu.exec_(QtGui.QCursor.pos())
def useMinCoveringExtent(self):
self.text.setText('')
def getMinCoveringExtent(self):
first = True
found = False
for param in self.params:
if param.value:
if isinstance(param, (ParameterRaster, ParameterVector)):
if isinstance(param.value, (QgsRasterLayer,
QgsVectorLayer)):
layer = param.value
else:
layer = dataobjects.getObjectFromUri(param.value)
if layer:
found = True
self.addToRegion(layer, first)
first = False
elif isinstance(param, ParameterMultipleInput):
layers = param.value.split(';')
for layername in layers:
layer = dataobjects.getObjectFromUri(layername, first)
if layer:
found = True
self.addToRegion(layer, first)
first = False
if found:
return str(self.xmin) + ',' + str(self.xmax) + ',' \
+ str(self.ymin) + ',' + str(self.ymax)
else:
return None
def useNewAlg(self, alg):
self.params = alg.parameters
def addToRegion(self, layer, first):
if first:
self.xmin = layer.extent().xMinimum()
self.xmax = layer.extent().xMaximum()
self.ymin = layer.extent().yMinimum()
self.ymax = layer.extent().yMaximum()
else:
self.xmin = min(self.xmin, layer.extent().xMinimum())
self.xmax = max(self.xmax, layer.extent().xMaximum())
self.ymin = min(self.ymin, layer.extent().yMinimum())
self.ymax = max(self.ymax, layer.extent().yMaximum())
def useLayerExtent(self):
CANVAS_KEY = 'Use canvas extent'
extentsDict = {}
extentsDict[CANVAS_KEY] = interface.iface.mapCanvas().extent()
extents = [CANVAS_KEY]
layers = dataobjects.getAllLayers()
for layer in layers:
extents.append(layer.name())
extentsDict[layer.name()] = layer.extent()
(item, ok) = QtGui.QInputDialog.getItem(self, 'Select extent',
'Use extent from', extents, False)
if ok:
self.setValueFromRect(extentsDict[item])
def selectOnCanvas(self):
canvas = interface.iface.mapCanvas()
canvas.setMapTool(self.tool)
self.dialog.showMinimized()
def fillCoords(self):
r = self.tool.rectangle()
self.setValueFromRect(r)
def setValueFromRect(self, r):
s = str(r.xMinimum()) + ',' + str(r.xMaximum()) + ',' \
+ str(r.yMinimum()) + ',' + str(r.yMaximum())
self.text.setText(s)
self.tool.reset()
canvas = interface.iface.mapCanvas()
canvas.setMapTool(self.prevMapTool)
self.dialog.showNormal()
self.dialog.raise_()
self.dialog.activateWindow()
def getValue(self):
if str(self.text.text()).strip() != '':
return str(self.text.text())
else:
return self.getMinCoveringExtent()
|
"""
Unit tests for module_generator.py.
@author: Toon Willems (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
import os
import shutil
import sys
import tempfile
from test.framework.utilities import EnhancedTestCase, init_config
from unittest import TestLoader, TestSuite, TextTestRunner, main
from vsc.utils.fancylogger import setLogLevelDebug, logToScreen
from vsc.utils.missing import get_subclasses
import easybuild.tools.module_generator
from easybuild.framework.easyconfig.tools import process_easyconfig
from easybuild.tools import config
from easybuild.tools.module_generator import ModuleGeneratorLua, ModuleGeneratorTcl
from easybuild.tools.module_naming_scheme.utilities import is_valid_module_name
from easybuild.framework.easyblock import EasyBlock
from easybuild.framework.easyconfig.easyconfig import EasyConfig, ActiveMNS
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.utilities import quote_str
from test.framework.utilities import find_full_path, init_config
class ModuleGeneratorTest(EnhancedTestCase):
"""Tests for module_generator module."""
MODULE_GENERATOR_CLASS = None
def setUp(self):
"""Test setup."""
super(ModuleGeneratorTest, self).setUp()
# find .eb file
eb_path = os.path.join(os.path.join(os.path.dirname(__file__), 'easyconfigs'), 'gzip-1.4.eb')
eb_full_path = find_full_path(eb_path)
self.assertTrue(eb_full_path)
ec = EasyConfig(eb_full_path)
self.eb = EasyBlock(ec)
self.modgen = self.MODULE_GENERATOR_CLASS(self.eb)
self.modgen.app.installdir = tempfile.mkdtemp(prefix='easybuild-modgen-test-')
self.orig_module_naming_scheme = config.get_module_naming_scheme()
def test_descr(self):
"""Test generation of module description (which includes '#%Module' header)."""
gzip_txt = "gzip (GNU zip) is a popular data compression program as a replacement for compress "
gzip_txt += "- Homepage: http://www.gzip.org/"
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
expected = '\n'.join([
"proc ModulesHelp { } {",
" puts stderr { %s" % gzip_txt,
" }",
"}",
'',
"module-whatis {%s}" % gzip_txt,
'',
"set root %s" % self.modgen.app.installdir,
'',
"conflict gzip",
'',
])
else:
expected = '\n'.join([
'help([[%s]])' % gzip_txt,
'',
"whatis([[%s]])" % gzip_txt,
'',
'local root = "%s"' % self.modgen.app.installdir,
'',
'conflict("gzip")',
'',
])
desc = self.modgen.get_description()
self.assertEqual(desc, expected)
# Test description with list of 'whatis' strings
self.eb.cfg['whatis'] = ['foo', 'bar']
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
expected = '\n'.join([
"proc ModulesHelp { } {",
" puts stderr { %s" % gzip_txt,
" }",
"}",
'',
"module-whatis {foo}",
"module-whatis {bar}",
'',
"set root %s" % self.modgen.app.installdir,
'',
"conflict gzip",
'',
])
else:
expected = '\n'.join([
'help([[%s]])' % gzip_txt,
'',
"whatis([[foo]])",
"whatis([[bar]])",
'',
'local root = "%s"' % self.modgen.app.installdir,
'',
'conflict("gzip")',
'',
])
desc = self.modgen.get_description()
self.assertEqual(desc, expected)
def test_load(self):
"""Test load part in generated module file."""
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
# default: guarded module load (which implies no recursive unloading)
expected = [
'',
"if { ![ is-loaded mod_name ] } {",
" module load mod_name",
"}",
'',
]
self.assertEqual('\n'.join(expected), self.modgen.load_module("mod_name"))
# with recursive unloading: no if is-loaded guard
expected = [
'',
"module load mod_name",
'',
]
self.assertEqual('\n'.join(expected), self.modgen.load_module("mod_name", recursive_unload=True))
init_config(build_options={'recursive_mod_unload': True})
self.assertEqual('\n'.join(expected), self.modgen.load_module("mod_name"))
else:
# default: guarded module load (which implies no recursive unloading)
expected = '\n'.join([
'',
'if not isloaded("mod_name") then',
' load("mod_name")',
'end',
'',
])
self.assertEqual(expected,self.modgen.load_module("mod_name"))
# with recursive unloading: no if isloaded guard
expected = '\n'.join([
'',
'load("mod_name")',
'',
])
self.assertEqual(expected, self.modgen.load_module("mod_name", recursive_unload=True))
init_config(build_options={'recursive_mod_unload': True})
self.assertEqual(expected,self.modgen.load_module("mod_name"))
def test_unload(self):
"""Test unload part in generated module file."""
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
expected = '\n'.join([
'',
"module unload mod_name",
])
self.assertEqual(expected, self.modgen.unload_module("mod_name"))
else:
expected = '\n'.join([
'',
'unload("mod_name")',
])
self.assertEqual(expected, self.modgen.unload_module("mod_name"))
def test_prepend_paths(self):
"""Test generating prepend-paths statements."""
# test prepend_paths
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
expected = ''.join([
"prepend-path\tkey\t\t$root/path1\n",
"prepend-path\tkey\t\t$root/path2\n",
"prepend-path\tkey\t\t$root\n",
])
paths = ['path1', 'path2', '']
self.assertEqual(expected, self.modgen.prepend_paths("key", paths))
# 2nd call should still give same result, no side-effects like manipulating passed list 'paths'!
self.assertEqual(expected, self.modgen.prepend_paths("key", paths))
expected = "prepend-path\tbar\t\t$root/foo\n"
self.assertEqual(expected, self.modgen.prepend_paths("bar", "foo"))
res = self.modgen.prepend_paths("key", ["/abs/path"], allow_abs=True)
self.assertEqual("prepend-path\tkey\t\t/abs/path\n", res)
res = self.modgen.prepend_paths('key', ['1234@example.com'], expand_relpaths=False)
self.assertEqual("prepend-path\tkey\t\t1234@example.com\n", res)
else:
expected = ''.join([
'prepend_path("key", pathJoin(root, "path1"))\n',
'prepend_path("key", pathJoin(root, "path2"))\n',
'prepend_path("key", root)\n',
])
paths = ['path1', 'path2', '']
self.assertEqual(expected, self.modgen.prepend_paths("key", paths))
# 2nd call should still give same result, no side-effects like manipulating passed list 'paths'!
self.assertEqual(expected, self.modgen.prepend_paths("key", paths))
expected = 'prepend_path("bar", pathJoin(root, "foo"))\n'
self.assertEqual(expected, self.modgen.prepend_paths("bar", "foo"))
expected = 'prepend_path("key", "/abs/path")\n'
self.assertEqual(expected, self.modgen.prepend_paths("key", ["/abs/path"], allow_abs=True))
res = self.modgen.prepend_paths('key', ['1234@example.com'], expand_relpaths=False)
self.assertEqual('prepend_path("key", "1234@example.com")\n', res)
self.assertErrorRegex(EasyBuildError, "Absolute path %s/foo passed to prepend_paths " \
"which only expects relative paths." % self.modgen.app.installdir,
self.modgen.prepend_paths, "key2", ["bar", "%s/foo" % self.modgen.app.installdir])
def test_use(self):
"""Test generating module use statements."""
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
# Test regular 'module use' statements
expected = ''.join([
'module use "/some/path"\n',
'module use "/foo/bar/baz"\n',
])
self.assertEqual(self.modgen.use(["/some/path", "/foo/bar/baz"]), expected)
# Test guarded 'module use' statements using prefix
expected = ''.join([
'if { [ file isdirectory [ file join "/foo" "/some/path" ] ] } {\n',
' module use [ file join "/foo" "/some/path" ]\n',
'}\n',
])
self.assertEqual(self.modgen.use(["/some/path"], prefix=quote_str("/foo"), guarded=True), expected)
else:
# Test regular 'module use' statements
expected = ''.join([
'prepend_path("MODULEPATH", "/some/path")\n',
'prepend_path("MODULEPATH", "/foo/bar/baz")\n',
])
self.assertEqual(self.modgen.use(["/some/path", "/foo/bar/baz"]), expected)
# Test guarded 'module use' statements using prefix
expected = ''.join([
'if isDir(pathJoin("/foo", "/some/path")) then\n',
' prepend_path("MODULEPATH", pathJoin("/foo", "/some/path"))\n',
'end\n',
])
self.assertEqual(self.modgen.use(["/some/path"], prefix=quote_str("/foo"), guarded=True), expected)
def test_env(self):
"""Test setting of environment variables."""
# test set_environment
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
self.assertEqual('setenv\tkey\t\t"value"\n', self.modgen.set_environment("key", "value"))
self.assertEqual("setenv\tkey\t\t'va\"lue'\n", self.modgen.set_environment("key", 'va"lue'))
self.assertEqual('setenv\tkey\t\t"va\'lue"\n', self.modgen.set_environment("key", "va'lue"))
self.assertEqual('setenv\tkey\t\t"""va"l\'ue"""\n', self.modgen.set_environment("key", """va"l'ue"""))
else:
self.assertEqual('setenv("key", "value")\n', self.modgen.set_environment("key", "value"))
def test_getenv_cmd(self):
"""Test getting value of environment variable."""
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
self.assertEqual('$env(HOSTNAME)', self.modgen.getenv_cmd('HOSTNAME'))
self.assertEqual('$env(HOME)', self.modgen.getenv_cmd('HOME'))
else:
self.assertEqual('os.getenv("HOSTNAME")', self.modgen.getenv_cmd('HOSTNAME'))
self.assertEqual('os.getenv("HOME")', self.modgen.getenv_cmd('HOME'))
def test_alias(self):
"""Test setting of alias in modulefiles."""
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
# test set_alias
self.assertEqual('set-alias\tkey\t\t"value"\n', self.modgen.set_alias("key", "value"))
self.assertEqual("set-alias\tkey\t\t'va\"lue'\n", self.modgen.set_alias("key", 'va"lue'))
self.assertEqual('set-alias\tkey\t\t"va\'lue"\n', self.modgen.set_alias("key", "va'lue"))
self.assertEqual('set-alias\tkey\t\t"""va"l\'ue"""\n', self.modgen.set_alias("key", """va"l'ue"""))
else:
self.assertEqual('set_alias("key", "value")\n', self.modgen.set_alias("key", "value"))
def test_conditional_statement(self):
"""Test formatting of conditional statements."""
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
simple_cond = self.modgen.conditional_statement("is-loaded foo", "module load bar")
expected = '\n'.join([
"if { [ is-loaded foo ] } {",
" module load bar",
'}',
'',
])
self.assertEqual(simple_cond, expected)
neg_cond = self.modgen.conditional_statement("is-loaded foo", "module load bar", negative=True)
expected = '\n'.join([
"if { ![ is-loaded foo ] } {",
" module load bar",
'}',
'',
])
self.assertEqual(neg_cond, expected)
if_else_cond = self.modgen.conditional_statement("is-loaded foo", "module load bar", else_body='puts "foo"')
expected = '\n'.join([
"if { [ is-loaded foo ] } {",
" module load bar",
"} else {",
' puts "foo"',
'}',
'',
])
self.assertEqual(if_else_cond, expected)
elif self.MODULE_GENERATOR_CLASS == ModuleGeneratorLua:
simple_cond = self.modgen.conditional_statement('isloaded("foo")', 'load("bar")')
expected = '\n'.join([
'if isloaded("foo") then',
' load("bar")',
'end',
'',
])
self.assertEqual(simple_cond, expected)
neg_cond = self.modgen.conditional_statement('isloaded("foo")', 'load("bar")', negative=True)
expected = '\n'.join([
'if not isloaded("foo") then',
' load("bar")',
'end',
'',
])
self.assertEqual(neg_cond, expected)
if_else_cond = self.modgen.conditional_statement('isloaded("foo")', 'load("bar")', else_body='load("bleh")')
expected = '\n'.join([
'if isloaded("foo") then',
' load("bar")',
'else',
' load("bleh")',
'end',
'',
])
self.assertEqual(if_else_cond, expected)
else:
self.assertTrue(False, "Unknown module syntax")
def test_load_msg(self):
"""Test including a load message in the module file."""
if self.MODULE_GENERATOR_CLASS == ModuleGeneratorTcl:
tcl_load_msg = '\n'.join([
'',
"if { [ module-info mode load ] } {",
" puts stderr \"test \\$test \\$test",
" test \\$foo \\$bar\"",
"}",
'',
])
self.assertEqual(tcl_load_msg, self.modgen.msg_on_load('test $test \\$test\ntest $foo \\$bar'))
else:
pass
def test_module_naming_scheme(self):
"""Test using default module naming scheme."""
all_stops = [x[0] for x in EasyBlock.get_steps()]
init_config(build_options={'valid_stops': all_stops})
ecs_dir = os.path.join(os.path.dirname(__file__), 'easyconfigs')
ec_files = [os.path.join(subdir, fil) for (subdir, _, files) in os.walk(ecs_dir) for fil in files]
# TODO FIXME: drop this once 2.0/.yeb support works
ec_files = [fil for fil in ec_files if not ('v2.0/' in fil or 'yeb/' in fil)]
build_options = {
'check_osdeps': False,
'external_modules_metadata': {},
'robot_path': [ecs_dir],
'valid_stops': all_stops,
'validate': False,
}
init_config(build_options=build_options)
def test_mns():
"""Test default module naming scheme."""
# test default naming scheme
for ec_file in [f for f in ec_files if not 'broken' in os.path.basename(f)]:
ec_path = os.path.abspath(ec_file)
ecs = process_easyconfig(ec_path, validate=False)
# derive module name directly from easyconfig file name
ec_fn = os.path.basename(ec_file)
if ec_fn in ec2mod_map:
# only check first, ignore any others (occurs when blocks are used (format v1.0 only))
self.assertEqual(ec2mod_map[ec_fn], ActiveMNS().det_full_module_name(ecs[0]['ec']))
# test default module naming scheme
default_ec2mod_map = {
'GCC-4.6.3.eb': 'GCC/4.6.3',
'gzip-1.4.eb': 'gzip/1.4',
'gzip-1.4-GCC-4.6.3.eb': 'gzip/1.4-GCC-4.6.3',
'gzip-1.5-goolf-1.4.10.eb': 'gzip/1.5-goolf-1.4.10',
'gzip-1.5-ictce-4.1.13.eb': 'gzip/1.5-ictce-4.1.13',
'toy-0.0.eb': 'toy/0.0',
'toy-0.0-multiple.eb': 'toy/0.0-somesuffix', # first block sets versionsuffix to '-somesuffix'
}
ec2mod_map = default_ec2mod_map
test_mns()
# generating module name from non-parsed easyconfig works fine
non_parsed = {
'name': 'foo',
'version': '1.2.3',
'versionsuffix': '-bar',
'toolchain': {
'name': 't00ls',
'version': '6.6.6',
},
}
self.assertEqual('foo/1.2.3-t00ls-6.6.6-bar', ActiveMNS().det_full_module_name(non_parsed))
# install custom module naming scheme dynamically
test_mns_parent_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'sandbox')
sys.path.append(test_mns_parent_dir)
reload(easybuild)
reload(easybuild.tools)
reload(easybuild.tools.module_naming_scheme)
# make sure test module naming schemes are available
mns_mods = ['broken_module_naming_scheme', 'test_module_naming_scheme', 'test_module_naming_scheme_more']
for test_mns_mod in mns_mods:
mns_path = "easybuild.tools.module_naming_scheme.%s" % test_mns_mod
__import__(mns_path, globals(), locals(), [''])
init_config(build_options=build_options)
# verify that key errors in module naming scheme are reported properly
os.environ['EASYBUILD_MODULE_NAMING_SCHEME'] = 'BrokenModuleNamingScheme'
init_config(build_options=build_options)
err_pattern = 'nosucheasyconfigparameteravailable'
self.assertErrorRegex(EasyBuildError, err_pattern, EasyConfig, os.path.join(ecs_dir, 'gzip-1.5-goolf-1.4.10.eb'))
# test simple custom module naming scheme
os.environ['EASYBUILD_MODULE_NAMING_SCHEME'] = 'TestModuleNamingScheme'
init_config(build_options=build_options)
ec2mod_map = {
'GCC-4.6.3.eb': 'GCC/4.6.3',
'gzip-1.4.eb': 'gzip/1.4',
'gzip-1.4-GCC-4.6.3.eb': 'gnu/gzip/1.4',
'gzip-1.5-goolf-1.4.10.eb': 'gnu/openmpi/gzip/1.5',
'gzip-1.5-ictce-4.1.13.eb': 'intel/intelmpi/gzip/1.5',
'toy-0.0.eb': 'toy/0.0',
'toy-0.0-multiple.eb': 'toy/0.0', # test module naming scheme ignores version suffixes
}
test_mns()
ec = EasyConfig(os.path.join(ecs_dir, 'gzip-1.5-goolf-1.4.10.eb'))
self.assertEqual(ec.toolchain.det_short_module_name(), 'goolf/1.4.10')
# test module naming scheme using all available easyconfig parameters
os.environ['EASYBUILD_MODULE_NAMING_SCHEME'] = 'TestModuleNamingSchemeMore'
init_config(build_options=build_options)
# note: these checksums will change if another easyconfig parameter is added
ec2mod_map = {
'GCC-4.6.3.eb': 'GCC/9e9ab5a1e978f0843b5aedb63ac4f14c51efb859',
'gzip-1.4.eb': 'gzip/53d5c13e85cb6945bd43a58d1c8d4a4c02f3462d',
'gzip-1.4-GCC-4.6.3.eb': 'gzip/585eba598f33c64ef01c6fa47af0fc37f3751311',
'gzip-1.5-goolf-1.4.10.eb': 'gzip/fceb41e04c26b540b7276c4246d1ecdd1e8251c9',
'gzip-1.5-ictce-4.1.13.eb': 'gzip/ae16b3a0a330d4323987b360c0d024f244ac4498',
'toy-0.0.eb': 'toy/44a206d9e8c14130cc9f79e061468303c6e91b53',
'toy-0.0-multiple.eb': 'toy/44a206d9e8c14130cc9f79e061468303c6e91b53',
}
test_mns()
# test determining module name for dependencies (i.e. non-parsed easyconfigs)
# using a module naming scheme that requires all easyconfig parameters
ec2mod_map['gzip-1.5-goolf-1.4.10.eb'] = 'gzip/.fceb41e04c26b540b7276c4246d1ecdd1e8251c9'
for dep_ec, dep_spec in [
('GCC-4.6.3.eb', {
'name': 'GCC',
'version': '4.6.3',
'versionsuffix': '',
'toolchain': {'name': 'dummy', 'version': 'dummy'},
'hidden': False,
}),
('gzip-1.5-goolf-1.4.10.eb', {
'name': 'gzip',
'version': '1.5',
'versionsuffix': '',
'toolchain': {'name': 'goolf', 'version': '1.4.10'},
'hidden': True,
}),
('toy-0.0-multiple.eb', {
'name': 'toy',
'version': '0.0',
'versionsuffix': '-multiple',
'toolchain': {'name': 'dummy', 'version': 'dummy'},
'hidden': False,
}),
]:
# determine full module name
self.assertEqual(ActiveMNS().det_full_module_name(dep_spec), ec2mod_map[dep_ec])
ec = EasyConfig(os.path.join(ecs_dir, 'gzip-1.5-goolf-1.4.10.eb'), hidden=True)
self.assertEqual(ec.full_mod_name, ec2mod_map['gzip-1.5-goolf-1.4.10.eb'])
self.assertEqual(ec.toolchain.det_short_module_name(), 'goolf/a86eb41d8f9c1d6f2d3d61cdb8f420cc2a21cada')
# restore default module naming scheme, and retest
os.environ['EASYBUILD_MODULE_NAMING_SCHEME'] = self.orig_module_naming_scheme
init_config(build_options=build_options)
ec2mod_map = default_ec2mod_map
test_mns()
def test_mod_name_validation(self):
"""Test module naming validation."""
# module name must be a string
self.assertTrue(not is_valid_module_name(('foo', 'bar')))
self.assertTrue(not is_valid_module_name(['foo', 'bar']))
self.assertTrue(not is_valid_module_name(123))
# module name must be relative
self.assertTrue(not is_valid_module_name('/foo/bar'))
# module name must only contain valid characters
self.assertTrue(not is_valid_module_name('foo\x0bbar'))
self.assertTrue(not is_valid_module_name('foo\x0cbar'))
self.assertTrue(not is_valid_module_name('foo\rbar'))
self.assertTrue(not is_valid_module_name('foo\0bar'))
# valid module name must be accepted
self.assertTrue(is_valid_module_name('gzip/goolf-1.4.10-suffix'))
self.assertTrue(is_valid_module_name('GCC/4.7.2'))
self.assertTrue(is_valid_module_name('foo-bar/1.2.3'))
self.assertTrue(is_valid_module_name('ictce'))
def test_is_short_modname_for(self):
"""Test is_short_modname_for method of module naming schemes."""
test_cases = [
('GCC/4.7.2', 'GCC', True),
('gzip/1.6-gompi-1.4.10', 'gzip', True),
('OpenMPI/1.6.4-GCC-4.7.2-no-OFED', 'OpenMPI', True),
('BLACS/1.1-gompi-1.1.0-no-OFED', 'BLACS', True),
('ScaLAPACK/1.8.0-gompi-1.1.0-no-OFED-ATLAS-3.8.4-LAPACK-3.4.0-BLACS-1.1', 'ScaLAPACK', True),
('netCDF-C++/4.2-goolf-1.4.10', 'netCDF-C++', True),
('gcc/4.7.2', 'GCC', False),
('ScaLAPACK/1.8.0-gompi-1.1.0-no-OFED-ATLAS-3.8.4-LAPACK-3.4.0-BLACS-1.1', 'BLACS', False),
('apps/blacs/1.1', 'BLACS', False),
('lib/math/BLACS-stable/1.1', 'BLACS', False),
]
for modname, softname, res in test_cases:
if res:
errormsg = "%s is recognised as a module for '%s'" % (modname, softname)
else:
errormsg = "%s is NOT recognised as a module for '%s'" % (modname, softname)
self.assertEqual(ActiveMNS().is_short_modname_for(modname, softname), res, errormsg)
def test_hierarchical_mns(self):
"""Test hierarchical module naming scheme."""
moduleclasses = ['base', 'compiler', 'mpi', 'numlib', 'system', 'toolchain']
ecs_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
all_stops = [x[0] for x in EasyBlock.get_steps()]
build_options = {
'check_osdeps': False,
'robot_path': [ecs_dir],
'valid_stops': all_stops,
'validate': False,
'valid_module_classes': moduleclasses,
}
def test_ec(ecfile, short_modname, mod_subdir, modpath_exts, user_modpath_exts, init_modpaths):
"""Test whether active module naming scheme returns expected values."""
ec = EasyConfig(os.path.join(ecs_dir, ecfile))
self.assertEqual(ActiveMNS().det_full_module_name(ec), os.path.join(mod_subdir, short_modname))
self.assertEqual(ActiveMNS().det_short_module_name(ec), short_modname)
self.assertEqual(ActiveMNS().det_module_subdir(ec), mod_subdir)
self.assertEqual(ActiveMNS().det_modpath_extensions(ec), modpath_exts)
self.assertEqual(ActiveMNS().det_user_modpath_extensions(ec), user_modpath_exts)
self.assertEqual(ActiveMNS().det_init_modulepaths(ec), init_modpaths)
os.environ['EASYBUILD_MODULE_NAMING_SCHEME'] = 'HierarchicalMNS'
init_config(build_options=build_options)
# format: easyconfig_file: (short_mod_name, mod_subdir, modpath_exts, user_modpath_exts, init_modpaths)
iccver = '2013.5.192-GCC-4.8.3'
impi_ec = 'impi-4.1.3.049-iccifort-2013.5.192-GCC-4.8.3.eb'
imkl_ec = 'imkl-11.1.2.144-iimpi-5.5.3-GCC-4.8.3.eb'
test_ecs = {
'GCC-4.7.2.eb': ('GCC/4.7.2', 'Core', ['Compiler/GCC/4.7.2'],
['Compiler/GCC/4.7.2'], ['Core']),
'OpenMPI-1.6.4-GCC-4.7.2.eb': ('OpenMPI/1.6.4', 'Compiler/GCC/4.7.2', ['MPI/GCC/4.7.2/OpenMPI/1.6.4'],
['MPI/GCC/4.7.2/OpenMPI/1.6.4'], ['Core']),
'gzip-1.5-goolf-1.4.10.eb': ('gzip/1.5', 'MPI/GCC/4.7.2/OpenMPI/1.6.4', [],
[], ['Core']),
'goolf-1.4.10.eb': ('goolf/1.4.10', 'Core', [],
[], ['Core']),
'icc-2013.5.192-GCC-4.8.3.eb': ('icc/%s' % iccver, 'Core', ['Compiler/intel/%s' % iccver],
['Compiler/intel/%s' % iccver], ['Core']),
'ifort-2013.3.163.eb': ('ifort/2013.3.163', 'Core', ['Compiler/intel/2013.3.163'],
['Compiler/intel/2013.3.163'], ['Core']),
'CUDA-5.5.22-GCC-4.8.2.eb': ('CUDA/5.5.22', 'Compiler/GCC/4.8.2', ['Compiler/GCC-CUDA/4.8.2-5.5.22'],
['Compiler/GCC-CUDA/4.8.2-5.5.22'], ['Core']),
impi_ec: ('impi/4.1.3.049', 'Compiler/intel/%s' % iccver, ['MPI/intel/%s/impi/4.1.3.049' % iccver],
['MPI/intel/%s/impi/4.1.3.049' % iccver], ['Core']),
imkl_ec: ('imkl/11.1.2.144', 'MPI/intel/%s/impi/4.1.3.049' % iccver, [],
[], ['Core']),
}
for ecfile, mns_vals in test_ecs.items():
test_ec(ecfile, *mns_vals)
# impi with dummy toolchain, which doesn't make sense in a hierarchical context
ec = EasyConfig(os.path.join(ecs_dir, 'impi-4.1.3.049.eb'))
self.assertErrorRegex(EasyBuildError, 'No compiler available.*MPI lib', ActiveMNS().det_modpath_extensions, ec)
os.environ['EASYBUILD_MODULE_NAMING_SCHEME'] = 'CategorizedHMNS'
init_config(build_options=build_options)
# format: easyconfig_file: (short_mod_name, mod_subdir, modpath_exts, user_modpath_exts)
test_ecs = {
'GCC-4.7.2.eb': ('GCC/4.7.2', 'Core/compiler',
['Compiler/GCC/4.7.2/%s' % c for c in moduleclasses],
['Compiler/GCC/4.7.2']),
'OpenMPI-1.6.4-GCC-4.7.2.eb': ('OpenMPI/1.6.4', 'Compiler/GCC/4.7.2/mpi',
['MPI/GCC/4.7.2/OpenMPI/1.6.4/%s' % c for c in moduleclasses],
['MPI/GCC/4.7.2/OpenMPI/1.6.4']),
'gzip-1.5-goolf-1.4.10.eb': ('gzip/1.5', 'MPI/GCC/4.7.2/OpenMPI/1.6.4/tools',
[], []),
'goolf-1.4.10.eb': ('goolf/1.4.10', 'Core/toolchain',
[], []),
'icc-2013.5.192-GCC-4.8.3.eb': ('icc/%s' % iccver, 'Core/compiler',
['Compiler/intel/%s/%s' % (iccver, c) for c in moduleclasses],
['Compiler/intel/%s' % iccver]),
'ifort-2013.3.163.eb': ('ifort/2013.3.163', 'Core/compiler',
['Compiler/intel/2013.3.163/%s' % c for c in moduleclasses],
['Compiler/intel/2013.3.163']),
'CUDA-5.5.22-GCC-4.8.2.eb': ('CUDA/5.5.22', 'Compiler/GCC/4.8.2/system',
['Compiler/GCC-CUDA/4.8.2-5.5.22/%s' % c for c in moduleclasses],
['Compiler/GCC-CUDA/4.8.2-5.5.22']),
impi_ec: ('impi/4.1.3.049', 'Compiler/intel/%s/mpi' % iccver,
['MPI/intel/%s/impi/4.1.3.049/%s' % (iccver, c) for c in moduleclasses],
['MPI/intel/%s/impi/4.1.3.049' % iccver]),
imkl_ec: ('imkl/11.1.2.144', 'MPI/intel/%s/impi/4.1.3.049/numlib' % iccver,
[], []),
}
for ecfile, mns_vals in test_ecs.items():
test_ec(ecfile, *mns_vals, init_modpaths = ['Core/%s' % c for c in moduleclasses])
# impi with dummy toolchain, which doesn't make sense in a hierarchical context
ec = EasyConfig(os.path.join(ecs_dir, 'impi-4.1.3.049.eb'))
self.assertErrorRegex(EasyBuildError, 'No compiler available.*MPI lib', ActiveMNS().det_modpath_extensions, ec)
os.environ['EASYBUILD_MODULE_NAMING_SCHEME'] = self.orig_module_naming_scheme
init_config(build_options=build_options)
test_ecs = {
'GCC-4.7.2.eb': ('GCC/4.7.2', '', [], [], []),
'OpenMPI-1.6.4-GCC-4.7.2.eb': ('OpenMPI/1.6.4-GCC-4.7.2', '', [], [], []),
'gzip-1.5-goolf-1.4.10.eb': ('gzip/1.5-goolf-1.4.10', '', [], [], []),
'goolf-1.4.10.eb': ('goolf/1.4.10', '', [], [], []),
'impi-4.1.3.049.eb': ('impi/4.1.3.049', '', [], [], []),
}
for ecfile, mns_vals in test_ecs.items():
test_ec(ecfile, *mns_vals)
class TclModuleGeneratorTest(ModuleGeneratorTest):
"""Test for module_generator module for Tcl syntax."""
MODULE_GENERATOR_CLASS = ModuleGeneratorTcl
class LuaModuleGeneratorTest(ModuleGeneratorTest):
"""Test for module_generator module for Tcl syntax."""
MODULE_GENERATOR_CLASS = ModuleGeneratorLua
def suite():
""" returns all the testcases in this module """
suite = TestSuite()
suite.addTests(TestLoader().loadTestsFromTestCase(TclModuleGeneratorTest))
suite.addTests(TestLoader().loadTestsFromTestCase(LuaModuleGeneratorTest))
return suite
if __name__ == '__main__':
#logToScreen(enable=True)
#setLogLevelDebug()
TextTestRunner().run(suite())
|
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: data_pipeline
version_added: "2.4"
author:
- Raghu Udiyar <raghusiddarth@gmail.com> (@raags)
- Sloane Hertel <shertel@redhat.com>
requirements: [ "boto3" ]
short_description: Create and manage AWS Datapipelines
extends_documentation_fragment:
- aws
- ec2
description:
- Create and manage AWS Datapipelines. Creation is not idempotent in AWS,
so the uniqueId is created by hashing the options (minus objects) given to the datapipeline.
The pipeline definition must be in the format given here
U(http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_PutPipelineDefinition.html#API_PutPipelineDefinition_RequestSyntax).
Also operations will wait for a configurable amount
of time to ensure the pipeline is in the requested state.
options:
name:
description:
- The name of the Datapipeline to create/modify/delete.
required: true
description:
description:
- An optional description for the pipeline being created.
default: ''
objects:
description:
- A list of pipeline object definitions, each of which is a dict that takes the keys C(id), C(name) and C(fields).
suboptions:
id:
description:
- The ID of the object.
name:
description:
- The name of the object.
fields:
description:
- A list of dicts that take the keys C(key) and C(stringValue)/C(refValue).
The value is specified as a reference to another object C(refValue) or as a string value C(stringValue)
but not as both.
parameters:
description:
- A list of parameter objects (dicts) in the pipeline definition.
suboptions:
id:
description:
- The ID of the parameter object.
attributes:
description:
- A list of attributes (dicts) of the parameter object. Each attribute takes the keys C(key) and C(stringValue) both
of which are strings.
values:
description:
- A list of parameter values (dicts) in the pipeline definition. Each dict takes the keys C(id) and C(stringValue) both
of which are strings.
timeout:
description:
- Time in seconds to wait for the pipeline to transition to the requested state, fail otherwise.
default: 300
state:
description:
- The requested state of the pipeline.
choices: ['present', 'absent', 'active', 'inactive']
default: present
tags:
description:
- A dict of key:value pair(s) to add to the pipeline.
default: null
'''
EXAMPLES = '''
- data_pipeline:
name: test-dp
region: us-west-2
objects: "{{pipelineObjects}}"
parameters: "{{pipelineParameters}}"
values: "{{pipelineValues}}"
tags:
key1: val1
key2: val2
state: present
- data_pipeline:
name: test-dp
objects:
- "id": "DefaultSchedule"
"name": "Every 1 day"
"fields":
- "key": "period"
"stringValue": "1 days"
- "key": "type"
"stringValue": "Schedule"
- "key": "startAt"
"stringValue": "FIRST_ACTIVATION_DATE_TIME"
- "id": "Default"
"name": "Default"
"fields": [ { "key": "resourceRole", "stringValue": "my_resource_role" },
{ "key": "role", "stringValue": "DataPipelineDefaultRole" },
{ "key": "pipelineLogUri", "stringValue": "s3://my_s3_log.txt" },
{ "key": "scheduleType", "stringValue": "cron" },
{ "key": "schedule", "refValue": "DefaultSchedule" },
{ "key": "failureAndRerunMode", "stringValue": "CASCADE" } ]
state: active
- data_pipeline:
name: test-dp
region: us-west-2
state: active
- data_pipeline:
name: test-dp
region: us-west-2
state: absent
'''
RETURN = '''
changed:
description: whether the data pipeline has been modified
type: bool
returned: always
sample:
changed: true
result:
description:
- Contains the data pipeline data (data_pipeline) and a return message (msg).
If the data pipeline exists data_pipeline will contain the keys description, name,
pipeline_id, state, tags, and unique_id. If the data pipeline does not exist then
data_pipeline will be an empty dict. The msg describes the status of the operation.
returned: always
type: dict
'''
import hashlib
import traceback
import re
import json
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, get_aws_connection_info, boto3_conn, camel_dict_to_snake_dict
from ansible.module_utils._text import to_bytes, to_text
try:
import boto3
from botocore.exceptions import ClientError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
DP_ACTIVE_STATES = ['ACTIVE', 'SCHEDULED']
DP_INACTIVE_STATES = ['INACTIVE', 'PENDING', 'FINISHED', 'DELETING']
DP_ACTIVATING_STATE = 'ACTIVATING'
DP_DEACTIVATING_STATE = 'DEACTIVATING'
PIPELINE_DOESNT_EXIST = '^.*Pipeline with id: {0} does not exist$'
class DataPipelineNotFound(Exception):
pass
class TimeOutException(Exception):
pass
def pipeline_id(client, name):
"""Return pipeline id for the given pipeline name
:param object client: boto3 datapipeline client
:param string name: pipeline name
:returns: pipeline id
:raises: DataPipelineNotFound
"""
pipelines = client.list_pipelines()
for dp in pipelines['pipelineIdList']:
if dp['name'] == name:
return dp['id']
else:
raise DataPipelineNotFound
def pipeline_description(client, dp_id):
"""Return pipeline description list
:param object client: boto3 datapipeline client
:returns: pipeline description dictionary
:raises: DataPipelineNotFound
"""
try:
return client.describe_pipelines(pipelineIds=[dp_id])
except ClientError as e:
raise DataPipelineNotFound
def pipeline_field(client, dp_id, field):
"""Return a pipeline field from the pipeline description.
The available fields are listed in describe_pipelines output.
:param object client: boto3 datapipeline client
:param string dp_id: pipeline id
:param string field: pipeline description field
:returns: pipeline field information
"""
dp_description = pipeline_description(client, dp_id)
for field_key in dp_description['pipelineDescriptionList'][0]['fields']:
if field_key['key'] == field:
return field_key['stringValue']
else:
raise KeyError("Field key {0} not found!".format(field))
def run_with_timeout(timeout, func, *func_args, **func_kwargs):
"""Run func with the provided args and kwargs, and wait utill
timeout for truthy return value
:param int timeout: time to wait for status
:param function func: function to run, should return True or False
:param args func_args: function args to pass to func
:param kwargs func_kwargs: function key word args
:returns: True if func returns truthy within timeout
:raises: TimeOutException
"""
for _ in range(timeout // 10):
if func(*func_args, **func_kwargs):
return True
else:
# check every 10s
time.sleep(10)
raise TimeOutException
def check_dp_exists(client, dp_id):
"""Check if datapipeline exists
:param object client: boto3 datapipeline client
:param string dp_id: pipeline id
:returns: True or False
"""
try:
# pipeline_description raises DataPipelineNotFound
if pipeline_description(client, dp_id):
return True
else:
return False
except DataPipelineNotFound:
return False
def check_dp_status(client, dp_id, status):
"""Checks if datapipeline matches states in status list
:param object client: boto3 datapipeline client
:param string dp_id: pipeline id
:param list status: list of states to check against
:returns: True or False
"""
assert isinstance(status, list)
if pipeline_field(client, dp_id, field="@pipelineState") in status:
return True
else:
return False
def pipeline_status_timeout(client, dp_id, status, timeout):
args = (client, dp_id, status)
return run_with_timeout(timeout, check_dp_status, *args)
def pipeline_exists_timeout(client, dp_id, timeout):
args = (client, dp_id)
return run_with_timeout(timeout, check_dp_exists, *args)
def activate_pipeline(client, module):
"""Activates pipeline
"""
dp_name = module.params.get('name')
timeout = module.params.get('timeout')
try:
dp_id = pipeline_id(client, dp_name)
except DataPipelineNotFound:
module.fail_json(msg='Data Pipeline {0} not found'.format(dp_name))
if pipeline_field(client, dp_id, field="@pipelineState") in DP_ACTIVE_STATES:
changed = False
else:
try:
client.activate_pipeline(pipelineId=dp_id)
except ClientError as e:
if e.response["Error"]["Code"] == "InvalidRequestException":
module.fail_json(msg="You need to populate your pipeline before activation.")
try:
pipeline_status_timeout(client, dp_id, status=DP_ACTIVE_STATES,
timeout=timeout)
except TimeOutException:
if pipeline_field(client, dp_id, field="@pipelineState") == "FINISHED":
# activated but completed more rapidly than it was checked
pass
else:
module.fail_json(msg=('Data Pipeline {0} failed to activate '
'within timeout {1} seconds').format(dp_name, timeout))
changed = True
data_pipeline = get_result(client, dp_id)
result = {'data_pipeline': data_pipeline,
'msg': 'Data Pipeline {0} activated.'.format(dp_name)}
return (changed, result)
def deactivate_pipeline(client, module):
"""Deactivates pipeline
"""
dp_name = module.params.get('name')
timeout = module.params.get('timeout')
try:
dp_id = pipeline_id(client, dp_name)
except DataPipelineNotFound:
module.fail_json(msg='Data Pipeline {0} not found'.format(dp_name))
if pipeline_field(client, dp_id, field="@pipelineState") in DP_INACTIVE_STATES:
changed = False
else:
client.deactivate_pipeline(pipelineId=dp_id)
try:
pipeline_status_timeout(client, dp_id, status=DP_INACTIVE_STATES,
timeout=timeout)
except TimeOutException:
module.fail_json(msg=('Data Pipeline {0} failed to deactivate'
'within timeout {1} seconds').format(dp_name, timeout))
changed = True
data_pipeline = get_result(client, dp_id)
result = {'data_pipeline': data_pipeline,
'msg': 'Data Pipeline {0} deactivated.'.format(dp_name)}
return (changed, result)
def _delete_dp_with_check(dp_id, client, timeout):
client.delete_pipeline(pipelineId=dp_id)
try:
pipeline_status_timeout(client=client, dp_id=dp_id, status=[PIPELINE_DOESNT_EXIST], timeout=timeout)
except DataPipelineNotFound:
return True
def delete_pipeline(client, module):
"""Deletes pipeline
"""
dp_name = module.params.get('name')
timeout = module.params.get('timeout')
try:
dp_id = pipeline_id(client, dp_name)
_delete_dp_with_check(dp_id, client, timeout)
changed = True
except DataPipelineNotFound:
changed = False
except TimeOutException:
module.fail_json(msg=('Data Pipeline {0} failed to delete'
'within timeout {1} seconds').format(dp_name, timeout))
result = {'data_pipeline': {},
'msg': 'Data Pipeline {0} deleted'.format(dp_name)}
return (changed, result)
def build_unique_id(module):
data = dict(module.params)
# removing objects from the unique id so we can update objects or populate the pipeline after creation without needing to make a new pipeline
[data.pop(each, None) for each in ('objects', 'timeout')]
json_data = json.dumps(data, sort_keys=True).encode("utf-8")
hashed_data = hashlib.md5(json_data).hexdigest()
return hashed_data
def format_tags(tags):
""" Reformats tags
:param dict tags: dict of data pipeline tags (e.g. {key1: val1, key2: val2, key3: val3})
:returns: list of dicts (e.g. [{key: key1, value: val1}, {key: key2, value: val2}, {key: key3, value: val3}])
"""
return [dict(key=k, value=v) for k, v in tags.items()]
def get_result(client, dp_id):
""" Get the current state of the data pipeline and reformat it to snake_case for exit_json
:param object client: boto3 datapipeline client
:param string dp_id: pipeline id
:returns: reformatted dict of pipeline description
"""
# pipeline_description returns a pipelineDescriptionList of length 1
# dp is a dict with keys "description" (str), "fields" (list), "name" (str), "pipelineId" (str), "tags" (dict)
dp = pipeline_description(client, dp_id)['pipelineDescriptionList'][0]
# Get uniqueId and pipelineState in fields to add to the exit_json result
dp["unique_id"] = pipeline_field(client, dp_id, field="uniqueId")
dp["pipeline_state"] = pipeline_field(client, dp_id, field="@pipelineState")
# Remove fields; can't make a list snake_case and most of the data is redundant
del dp["fields"]
# Note: tags is already formatted fine so we don't need to do anything with it
# Reformat data pipeline and add reformatted fields back
dp = camel_dict_to_snake_dict(dp)
return dp
def diff_pipeline(client, module, objects, unique_id, dp_name):
"""Check if there's another pipeline with the same unique_id and if so, checks if the object needs to be updated
"""
result = {}
changed = False
create_dp = False
# See if there is already a pipeline with the same unique_id
unique_id = build_unique_id(module)
try:
dp_id = pipeline_id(client, dp_name)
dp_unique_id = to_text(pipeline_field(client, dp_id, field="uniqueId"))
if dp_unique_id != unique_id:
# A change is expected but not determined. Updated to a bool in create_pipeline().
changed = "NEW_VERSION"
create_dp = True
# Unique ids are the same - check if pipeline needs modification
else:
dp_objects = client.get_pipeline_definition(pipelineId=dp_id)['pipelineObjects']
# Definition needs to be updated
if dp_objects != objects:
changed, msg = define_pipeline(client, module, objects, dp_id)
# No changes
else:
msg = 'Data Pipeline {0} is present'.format(dp_name)
data_pipeline = get_result(client, dp_id)
result = {'data_pipeline': data_pipeline,
'msg': msg}
except DataPipelineNotFound:
create_dp = True
return create_dp, changed, result
def define_pipeline(client, module, objects, dp_id):
"""Puts pipeline definition
"""
dp_name = module.params.get('name')
if pipeline_field(client, dp_id, field="@pipelineState") == "FINISHED":
msg = 'Data Pipeline {0} is unable to be updated while in state FINISHED.'.format(dp_name)
changed = False
elif objects:
parameters = module.params.get('parameters')
values = module.params.get('values')
try:
client.put_pipeline_definition(pipelineId=dp_id,
pipelineObjects=objects,
parameterObjects=parameters,
parameterValues=values)
msg = 'Data Pipeline {0} has been updated.'.format(dp_name)
changed = True
except ClientError as e:
module.fail_json(msg="Failed to put the definition for pipeline {0}. Check that string/reference fields"
"are not empty and that the number of objects in the pipeline does not exceed maximum allowed"
"objects".format(dp_name), exception=traceback.format_exc())
else:
changed = False
msg = ""
return changed, msg
def create_pipeline(client, module):
"""Creates datapipeline. Uses uniqueId to achieve idempotency.
"""
dp_name = module.params.get('name')
objects = module.params.get('objects', None)
description = module.params.get('description', '')
tags = module.params.get('tags')
timeout = module.params.get('timeout')
unique_id = build_unique_id(module)
create_dp, changed, result = diff_pipeline(client, module, objects, unique_id, dp_name)
if changed == "NEW_VERSION":
# delete old version
changed, _ = delete_pipeline(client, module)
# There isn't a pipeline or it has different parameters than the pipeline in existence.
if create_dp:
# Make pipeline
try:
tags = format_tags(tags)
dp = client.create_pipeline(name=dp_name,
uniqueId=unique_id,
description=description,
tags=tags)
dp_id = dp['pipelineId']
pipeline_exists_timeout(client, dp_id, timeout)
except ClientError as e:
module.fail_json(msg="Failed to create the data pipeline {0}.".format(dp_name), exception=traceback.format_exc())
except TimeOutException:
module.fail_json(msg=('Data Pipeline {0} failed to create'
'within timeout {1} seconds').format(dp_name, timeout))
# Put pipeline definition
_, msg = define_pipeline(client, module, objects, dp_id)
changed = True
data_pipeline = get_result(client, dp_id)
result = {'data_pipeline': data_pipeline,
'msg': 'Data Pipeline {0} created.'.format(dp_name) + msg}
return (changed, result)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True),
version=dict(required=False),
description=dict(required=False, default=''),
objects=dict(required=False, type='list', default=[]),
parameters=dict(required=False, type='list', default=[]),
timeout=dict(required=False, type='int', default=300),
state=dict(default='present', choices=['present', 'absent',
'active', 'inactive']),
tags=dict(required=False, type='dict')
)
)
module = AnsibleModule(argument_spec, supports_check_mode=False)
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required for the datapipeline module!')
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
client = boto3_conn(module, conn_type='client',
resource='datapipeline', region=region,
endpoint=ec2_url, **aws_connect_kwargs)
except ClientError as e:
module.fail_json(msg="Can't authorize connection - " + str(e))
state = module.params.get('state')
if state == 'present':
changed, result = create_pipeline(client, module)
elif state == 'absent':
changed, result = delete_pipeline(client, module)
elif state == 'active':
changed, result = activate_pipeline(client, module)
elif state == 'inactive':
changed, result = deactivate_pipeline(client, module)
module.exit_json(result=result, changed=changed)
if __name__ == '__main__':
main()
|
"""Spellcheck using google"""
from urlparse import urljoin
import re
from madcow.util import Module, strip_html
from madcow.util.text import *
from madcow.util.http import getsoup
class Main(Module):
pattern = re.compile(r'^\s*spell(?:\s*check)?\s+(.+?)\s*$', re.I)
help = u'spellcheck <word> - use google to spellcheck'
google_url = 'http://www.google.com/'
google_search = urljoin(google_url, '/search')
error = 'I had trouble with that'
def response(self, nick, args, kwargs):
opts = {'hl': 'en', 'safe': 'off', 'q': args[0]}
soup = getsoup(self.google_search, opts, referer=self.google_url)
correct = soup.body.find('a', href=re.compile(r'^/search.*spell=1'))
if correct:
res = strip_html(decode(correct.renderContents(), 'utf-8'))
else:
res = u'spelled correctly. probably.'
return u'%s: %s' % (nick, res)
|
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((10146, 13515.6, 4363.58), (0.7, 0.7, 0.7), 507.685)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((11104.6, 13539.8, 4035.16), (0.7, 0.7, 0.7), 479.978)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((10326.3, 12307.8, 5273.41), (0.7, 0.7, 0.7), 681.834)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((9285.01, 10896.6, 6786.74), (0.7, 0.7, 0.7), 522.532)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((8908.66, 10394.7, 7216.7), (0, 1, 0), 751.925)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((10890.5, 10394.7, 8010.45), (0.7, 0.7, 0.7), 437.001)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((10207, 8701.11, 8626.38), (0.7, 0.7, 0.7), 710.767)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((10938.7, 7892.91, 9965.87), (0.7, 0.7, 0.7), 762.077)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((10271.4, 7026.65, 11099.1), (0.7, 0.7, 0.7), 726.799)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((9939.14, 5364.84, 11737.5), (0.7, 0.7, 0.7), 885.508)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((8614.78, 5530.37, 12611.1), (0.7, 0.7, 0.7), 778.489)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((9172.43, 5526.22, 14651.1), (0.7, 0.7, 0.7), 790.333)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((9822.57, 5611.86, 16602.3), (0.7, 0.7, 0.7), 707.721)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((10384.1, 6810.37, 15647.5), (0.7, 0.7, 0.7), 651.166)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((10208.6, 5176.85, 15390.4), (0.7, 0.7, 0.7), 708.61)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((9807.75, 4289.62, 14157.3), (0.7, 0.7, 0.7), 490.595)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((9848.64, 4545.62, 12749), (0.7, 0.7, 0.7), 591.565)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((9797.57, 4615.05, 11206.4), (0.7, 0.7, 0.7), 581.287)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((11363.4, 5312.87, 10774.3), (0.7, 0.7, 0.7), 789.529)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((11601.2, 4593.37, 9468.05), (0.7, 0.7, 0.7), 623.587)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((11903.5, 3247.46, 8302.99), (0.7, 0.7, 0.7), 1083.56)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((12834.5, 1988.44, 7649.14), (0.7, 0.7, 0.7), 504.258)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((11546.8, 2650.22, 7526.41), (0.7, 0.7, 0.7), 805.519)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((9471.79, 3008.5, 8093.8), (0.7, 0.7, 0.7), 631.708)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((7681.77, 2781.95, 9087.17), (0.7, 0.7, 0.7), 805.942)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((6792.9, 2398.4, 9651.68), (1, 0.7, 0), 672.697)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((5402.46, 3537.02, 7561.23), (0.7, 0.7, 0.7), 797.863)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((3958.13, 3552.34, 6422.12), (1, 0.7, 0), 735.682)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((4476.17, 3535.25, 5258.57), (0.7, 0.7, 0.7), 602.14)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((5277.59, 2822.21, 3201.05), (0.7, 0.7, 0.7), 954.796)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5327.93, 3717.51, 3776.75), (0.7, 0.7, 0.7), 1021.88)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((3855.32, 3615.48, 3245.5), (0.7, 0.7, 0.7), 909.323)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((2687.22, 4203.25, 1387.64), (0.7, 0.7, 0.7), 621.049)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((2368.54, 5623.3, 1410.12), (0.7, 0.7, 0.7), 525.154)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((2954.52, 7022.02, 1346.88), (0.7, 0.7, 0.7), 890.246)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((3179.99, 8503.03, 384.664), (0.7, 0.7, 0.7), 671.216)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((4387.97, 9076.06, -644.18), (0.7, 0.7, 0.7), 662.672)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((5128.67, 7608.24, -454.952), (0.7, 0.7, 0.7), 646.682)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((3886.95, 6767.78, -673.432), (0.7, 0.7, 0.7), 769.945)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((3114.82, 6297.21, 1095.93), (0.7, 0.7, 0.7), 606.92)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((1957.53, 6806.16, 995.04), (0.7, 0.7, 0.7), 622.571)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((2962, 6517.27, 1895.81), (0.7, 0.7, 0.7), 466.865)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((3366.39, 7293.35, 1918.37), (0.7, 0.7, 0.7), 682.933)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((2953.08, 6646.79, 1752.87), (0.7, 0.7, 0.7), 809.326)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((2889.18, 4921.92, 2385.61), (0.7, 0.7, 0.7), 796.72)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((2962.36, 4563.77, 5254.82), (0.7, 0.7, 0.7), 870.026)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((1960.76, 5096.73, 6749.88), (0.7, 0.7, 0.7), 909.577)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((1506.4, 6030.12, 7303.72), (0, 1, 0), 500.536)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((-197.183, 6985, 6954), (0.7, 0.7, 0.7), 725.276)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((-2741.64, 7469.42, 6397.24), (0.7, 0.7, 0.7), 570.331)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((-2460.74, 7121.29, 4775.62), (0.7, 0.7, 0.7), 492.203)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((-988.925, 4617.39, 5134.54), (0, 1, 0), 547.7)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((-489.456, 5176.36, 4893.83), (0.7, 0.7, 0.7), 581.921)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((-336.837, 6128.5, 3219.78), (0.7, 0.7, 0.7), 555.314)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((269.955, 7208.66, 2320.19), (0.7, 0.7, 0.7), 404.219)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((1881.91, 7814.42, 3030.95), (0.7, 0.7, 0.7), 764.234)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
|
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse_lazy
from django.utils.decorators import method_decorator
from django.views.generic import (DetailView,
ListView)
from django.views.generic.edit import (CreateView,
DeleteView)
from .forms import CredentialsForm
from .models import Credentials
class LoginRequiredMixin(object):
"""
View mixin which requires that the user is authenticated.
"""
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(LoginRequiredMixin, self).dispatch(*args, **kwargs)
class CredentialsList(LoginRequiredMixin, ListView):
model = Credentials
def get_queryset(self):
return Credentials.objects.filter(owner=self.request.user).order_by('-created')
class CredentialsCreate(LoginRequiredMixin, CreateView):
model = Credentials
form_class = CredentialsForm
success_url = reverse_lazy('credentials-list')
def form_valid(self, form):
form.instance.owner = self.request.user
return super(CredentialsCreate, self).form_valid(form)
class CredentialsDetail(LoginRequiredMixin, DetailView):
model = Credentials
def get_queryset(self):
return Credentials.objects.filter(owner=self.request.user)
class CredentialsDelete(LoginRequiredMixin, DeleteView):
model = Credentials
success_url = reverse_lazy('credentials-list')
def get_object(self, queryset=None):
obj = super(CredentialsDelete, self).get_object(queryset=queryset)
if not obj.owner == self.request.user and not obj.owner.is_superuser:
raise PermissionDenied
return obj
|
from django.core import mail
from bedrock.mozorg.tests import TestCase
from funfactory.urlresolvers import reverse
from nose.tools import eq_
class PrivacyFormTest(TestCase):
def setUp(self):
self.contact = 'foo@bar.com'
with self.activate('en-US'):
self.url = reverse('privacy')
self.data = {
'sender': self.contact,
'comments': 'It works!',
}
self.bad_data = {
'sender': '',
'comments': 'Forgot your email!'
}
mail.outbox = []
def tearDown(self):
mail.outbox = []
def test_send_privacy_contact(self):
self.client.post(self.url, self.data)
# Test that message has been sent.
eq_(len(mail.outbox), 1)
outbox = mail.outbox[0]
# Verify that it has the correct subject
eq_(outbox.subject, 'Message sent from Privacy Hub')
# Verify sender
eq_(outbox.from_email, self.contact)
# Verify recipient
eq_(outbox.to, ['yourprivacyis#1@mozilla.com'])
def test_send_privacy_contact_invalid_data(self):
response = self.client.post(reverse('privacy'), self.bad_data)
eq_(response.status_code, 200)
self.assertIn('This field is required, please enter your email address.', response.content)
# Test that message was not sent.
eq_(len(mail.outbox), 0)
def test_honeypot_existence(self):
res = self.client.get(self.url)
self.assertIn('office_fax', res.content)
def test_send_privacy_contact_with_honeypot(self):
hp_data = self.data.copy()
hp_data['office_fax'] = 'spammer'
res = self.client.post(self.url, hp_data)
self.assertIn("Your request could not be completed. Please try again.", res.content)
|
__author__ = 'kszalai'
|
from openerp.osv import fields, orm
class mgmtsystem_nonconformity_type(orm.Model):
"""Claim Type: Nonconformity, Good Practice, Improvement Opportunity,
Observation, ...
"""
_name = "mgmtsystem.nonconformity.type"
_description = "Claim Type"
_columns = {
'name': fields.char('Title', size=50, required=True, translate=True),
'description': fields.text('Description', translate=True),
'active': fields.boolean('Active?'),
}
_defaults = {
'active': True,
}
|
from odoo.addons.account.tests.common import AccountTestInvoicingCommon
from odoo.tests.common import Form
from odoo.tests import tagged
from odoo import fields
@tagged('post_install', '-at_install')
class TestAccountMoveInRefundOnchanges(AccountTestInvoicingCommon):
@classmethod
def setUpClass(cls, chart_template_ref=None):
super().setUpClass(chart_template_ref=chart_template_ref)
cls.invoice = cls.init_invoice('in_refund', products=cls.product_a+cls.product_b)
cls.product_line_vals_1 = {
'name': cls.product_a.name,
'product_id': cls.product_a.id,
'account_id': cls.product_a.property_account_expense_id.id,
'partner_id': cls.partner_a.id,
'product_uom_id': cls.product_a.uom_id.id,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 800.0,
'price_subtotal': 800.0,
'price_total': 920.0,
'tax_ids': cls.product_a.supplier_taxes_id.ids,
'tax_line_id': False,
'currency_id': cls.company_data['currency'].id,
'amount_currency': -800.0,
'debit': 0.0,
'credit': 800.0,
'date_maturity': False,
'tax_exigible': True,
}
cls.product_line_vals_2 = {
'name': cls.product_b.name,
'product_id': cls.product_b.id,
'account_id': cls.product_b.property_account_expense_id.id,
'partner_id': cls.partner_a.id,
'product_uom_id': cls.product_b.uom_id.id,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 160.0,
'price_subtotal': 160.0,
'price_total': 208.0,
'tax_ids': cls.product_b.supplier_taxes_id.ids,
'tax_line_id': False,
'currency_id': cls.company_data['currency'].id,
'amount_currency': -160.0,
'debit': 0.0,
'credit': 160.0,
'date_maturity': False,
'tax_exigible': True,
}
cls.tax_line_vals_1 = {
'name': cls.tax_purchase_a.name,
'product_id': False,
'account_id': cls.company_data['default_account_tax_purchase'].id,
'partner_id': cls.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 144.0,
'price_subtotal': 144.0,
'price_total': 144.0,
'tax_ids': [],
'tax_line_id': cls.tax_purchase_a.id,
'currency_id': cls.company_data['currency'].id,
'amount_currency': -144.0,
'debit': 0.0,
'credit': 144.0,
'date_maturity': False,
'tax_exigible': True,
}
cls.tax_line_vals_2 = {
'name': cls.tax_purchase_b.name,
'product_id': False,
'account_id': cls.company_data['default_account_tax_purchase'].id,
'partner_id': cls.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 24.0,
'price_subtotal': 24.0,
'price_total': 24.0,
'tax_ids': [],
'tax_line_id': cls.tax_purchase_b.id,
'currency_id': cls.company_data['currency'].id,
'amount_currency': -24.0,
'debit': 0.0,
'credit': 24.0,
'date_maturity': False,
'tax_exigible': True,
}
cls.term_line_vals_1 = {
'name': '',
'product_id': False,
'account_id': cls.company_data['default_account_payable'].id,
'partner_id': cls.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': -1128.0,
'price_subtotal': -1128.0,
'price_total': -1128.0,
'tax_ids': [],
'tax_line_id': False,
'currency_id': cls.company_data['currency'].id,
'amount_currency': 1128.0,
'debit': 1128.0,
'credit': 0.0,
'date_maturity': fields.Date.from_string('2019-01-01'),
'tax_exigible': True,
}
cls.move_vals = {
'partner_id': cls.partner_a.id,
'currency_id': cls.company_data['currency'].id,
'journal_id': cls.company_data['default_journal_purchase'].id,
'date': fields.Date.from_string('2019-01-01'),
'fiscal_position_id': False,
'payment_reference': '',
'invoice_payment_term_id': cls.pay_terms_a.id,
'amount_untaxed': 960.0,
'amount_tax': 168.0,
'amount_total': 1128.0,
}
def setUp(self):
super(TestAccountMoveInRefundOnchanges, self).setUp()
self.assertInvoiceValues(self.invoice, [
self.product_line_vals_1,
self.product_line_vals_2,
self.tax_line_vals_1,
self.tax_line_vals_2,
self.term_line_vals_1,
], self.move_vals)
def test_in_refund_line_onchange_product_1(self):
move_form = Form(self.invoice)
with move_form.invoice_line_ids.edit(0) as line_form:
line_form.product_id = self.product_b
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'name': self.product_b.name,
'product_id': self.product_b.id,
'product_uom_id': self.product_b.uom_id.id,
'account_id': self.product_b.property_account_expense_id.id,
'price_unit': 160.0,
'price_subtotal': 160.0,
'price_total': 208.0,
'tax_ids': self.product_b.supplier_taxes_id.ids,
'amount_currency': -160.0,
'credit': 160.0,
},
self.product_line_vals_2,
{
**self.tax_line_vals_1,
'price_unit': 48.0,
'price_subtotal': 48.0,
'price_total': 48.0,
'amount_currency': -48.0,
'credit': 48.0,
},
{
**self.tax_line_vals_2,
'price_unit': 48.0,
'price_subtotal': 48.0,
'price_total': 48.0,
'amount_currency': -48.0,
'credit': 48.0,
},
{
**self.term_line_vals_1,
'price_unit': -416.0,
'price_subtotal': -416.0,
'price_total': -416.0,
'amount_currency': 416.0,
'debit': 416.0,
},
], {
**self.move_vals,
'amount_untaxed': 320.0,
'amount_tax': 96.0,
'amount_total': 416.0,
})
def test_in_refund_line_onchange_business_fields_1(self):
move_form = Form(self.invoice)
with move_form.invoice_line_ids.edit(0) as line_form:
# Current price_unit is 800.
# We set quantity = 4, discount = 50%, price_unit = 400. The debit/credit fields don't change because (4 * 400) * 0.5 = 800.
line_form.quantity = 4
line_form.discount = 50
line_form.price_unit = 400
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'quantity': 4,
'discount': 50.0,
'price_unit': 400.0,
},
self.product_line_vals_2,
self.tax_line_vals_1,
self.tax_line_vals_2,
self.term_line_vals_1,
], self.move_vals)
move_form = Form(self.invoice)
with move_form.line_ids.edit(2) as line_form:
# Reset field except the discount that becomes 100%.
# /!\ The modification is made on the accounting tab.
line_form.quantity = 1
line_form.discount = 100
line_form.price_unit = 800
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'discount': 100.0,
'price_subtotal': 0.0,
'price_total': 0.0,
'amount_currency': 0.0,
'credit': 0.0,
},
self.product_line_vals_2,
{
**self.tax_line_vals_1,
'price_unit': 24.0,
'price_subtotal': 24.0,
'price_total': 24.0,
'amount_currency': -24.0,
'credit': 24.0,
},
self.tax_line_vals_2,
{
**self.term_line_vals_1,
'price_unit': -208.0,
'price_subtotal': -208.0,
'price_total': -208.0,
'amount_currency': 208.0,
'debit': 208.0,
},
], {
**self.move_vals,
'amount_untaxed': 160.0,
'amount_tax': 48.0,
'amount_total': 208.0,
})
def test_in_refund_line_onchange_accounting_fields_1(self):
move_form = Form(self.invoice)
with move_form.line_ids.edit(2) as line_form:
# Custom credit on the first product line.
line_form.credit = 3000
with move_form.line_ids.edit(3) as line_form:
# Custom debit on the second product line. Credit should be reset by onchange.
# /!\ It's a negative line.
line_form.debit = 500
with move_form.line_ids.edit(0) as line_form:
# Custom credit on the first tax line.
line_form.credit = 800
with move_form.line_ids.edit(4) as line_form:
# Custom credit on the second tax line.
line_form.credit = 250
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'price_unit': 3000.0,
'price_subtotal': 3000.0,
'price_total': 3450.0,
'amount_currency': -3000.0,
'credit': 3000.0,
},
{
**self.product_line_vals_2,
'price_unit': -500.0,
'price_subtotal': -500.0,
'price_total': -650.0,
'amount_currency': 500.0,
'debit': 500.0,
'credit': 0.0,
},
{
**self.tax_line_vals_1,
'price_unit': 800.0,
'price_subtotal': 800.0,
'price_total': 800.0,
'amount_currency': -800.0,
'credit': 800.0,
},
{
**self.tax_line_vals_2,
'price_unit': 250.0,
'price_subtotal': 250.0,
'price_total': 250.0,
'amount_currency': -250.0,
'credit': 250.0,
},
{
**self.term_line_vals_1,
'price_unit': -3550.0,
'price_subtotal': -3550.0,
'price_total': -3550.0,
'amount_currency': 3550.0,
'debit': 3550.0,
},
], {
**self.move_vals,
'amount_untaxed': 2500.0,
'amount_tax': 1050.0,
'amount_total': 3550.0,
})
def test_in_refund_line_onchange_partner_1(self):
move_form = Form(self.invoice)
move_form.partner_id = self.partner_b
move_form.payment_reference = 'turlututu'
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'partner_id': self.partner_b.id,
},
{
**self.product_line_vals_2,
'partner_id': self.partner_b.id,
},
{
**self.tax_line_vals_1,
'partner_id': self.partner_b.id,
},
{
**self.tax_line_vals_2,
'partner_id': self.partner_b.id,
},
{
**self.term_line_vals_1,
'name': 'turlututu',
'partner_id': self.partner_b.id,
'account_id': self.partner_b.property_account_payable_id.id,
'price_unit': -338.4,
'price_subtotal': -338.4,
'price_total': -338.4,
'amount_currency': 338.4,
'debit': 338.4,
},
{
**self.term_line_vals_1,
'name': 'turlututu',
'partner_id': self.partner_b.id,
'account_id': self.partner_b.property_account_payable_id.id,
'price_unit': -789.6,
'price_subtotal': -789.6,
'price_total': -789.6,
'amount_currency': 789.6,
'debit': 789.6,
'date_maturity': fields.Date.from_string('2019-02-28'),
},
], {
**self.move_vals,
'partner_id': self.partner_b.id,
'payment_reference': 'turlututu',
'fiscal_position_id': self.fiscal_pos_a.id,
'invoice_payment_term_id': self.pay_terms_b.id,
'amount_untaxed': 960.0,
'amount_tax': 168.0,
'amount_total': 1128.0,
})
# Remove lines and recreate them to apply the fiscal position.
move_form = Form(self.invoice)
move_form.invoice_line_ids.remove(0)
move_form.invoice_line_ids.remove(0)
with move_form.invoice_line_ids.new() as line_form:
line_form.product_id = self.product_a
with move_form.invoice_line_ids.new() as line_form:
line_form.product_id = self.product_b
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'account_id': self.product_b.property_account_expense_id.id,
'partner_id': self.partner_b.id,
'tax_ids': self.tax_purchase_b.ids,
},
{
**self.product_line_vals_2,
'partner_id': self.partner_b.id,
'price_total': 184.0,
'tax_ids': self.tax_purchase_b.ids,
},
{
**self.tax_line_vals_1,
'name': self.tax_purchase_b.name,
'partner_id': self.partner_b.id,
'tax_line_id': self.tax_purchase_b.id,
},
{
**self.term_line_vals_1,
'name': 'turlututu',
'account_id': self.partner_b.property_account_payable_id.id,
'partner_id': self.partner_b.id,
'price_unit': -331.2,
'price_subtotal': -331.2,
'price_total': -331.2,
'amount_currency': 331.2,
'debit': 331.2,
},
{
**self.term_line_vals_1,
'name': 'turlututu',
'account_id': self.partner_b.property_account_payable_id.id,
'partner_id': self.partner_b.id,
'price_unit': -772.8,
'price_subtotal': -772.8,
'price_total': -772.8,
'amount_currency': 772.8,
'debit': 772.8,
'date_maturity': fields.Date.from_string('2019-02-28'),
},
], {
**self.move_vals,
'partner_id': self.partner_b.id,
'payment_reference': 'turlututu',
'fiscal_position_id': self.fiscal_pos_a.id,
'invoice_payment_term_id': self.pay_terms_b.id,
'amount_untaxed': 960.0,
'amount_tax': 144.0,
'amount_total': 1104.0,
})
def test_in_refund_line_onchange_taxes_1(self):
move_form = Form(self.invoice)
with move_form.invoice_line_ids.edit(0) as line_form:
line_form.price_unit = 960
line_form.tax_ids.add(self.tax_armageddon)
move_form.save()
child_tax_1 = self.tax_armageddon.children_tax_ids[0]
child_tax_2 = self.tax_armageddon.children_tax_ids[1]
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'price_unit': 960.0,
'price_subtotal': 800.0,
'price_total': 1176.0,
'tax_ids': (self.tax_purchase_a + self.tax_armageddon).ids,
'tax_exigible': False,
},
self.product_line_vals_2,
self.tax_line_vals_1,
self.tax_line_vals_2,
{
'name': child_tax_1.name,
'product_id': False,
'account_id': self.company_data['default_account_expense'].id,
'partner_id': self.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 96.0,
'price_subtotal': 96.0,
'price_total': 105.6,
'tax_ids': child_tax_2.ids,
'tax_line_id': child_tax_1.id,
'currency_id': self.company_data['currency'].id,
'amount_currency': -96.0,
'debit': 0.0,
'credit': 96.0,
'date_maturity': False,
'tax_exigible': False,
},
{
'name': child_tax_1.name,
'product_id': False,
'account_id': self.company_data['default_account_tax_sale'].id,
'partner_id': self.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 64.0,
'price_subtotal': 64.0,
'price_total': 70.4,
'tax_ids': child_tax_2.ids,
'tax_line_id': child_tax_1.id,
'currency_id': self.company_data['currency'].id,
'amount_currency': -64.0,
'debit': 0.0,
'credit': 64.0,
'date_maturity': False,
'tax_exigible': False,
},
{
'name': child_tax_2.name,
'product_id': False,
'account_id': child_tax_2.cash_basis_transition_account_id.id,
'partner_id': self.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 96.0,
'price_subtotal': 96.0,
'price_total': 96.0,
'tax_ids': [],
'tax_line_id': child_tax_2.id,
'currency_id': self.company_data['currency'].id,
'amount_currency': -96.0,
'debit': 0.0,
'credit': 96.0,
'date_maturity': False,
'tax_exigible': False,
},
{
**self.term_line_vals_1,
'price_unit': -1384.0,
'price_subtotal': -1384.0,
'price_total': -1384.0,
'amount_currency': 1384.0,
'debit': 1384.0,
},
], {
**self.move_vals,
'amount_untaxed': 960.0,
'amount_tax': 424.0,
'amount_total': 1384.0,
})
def test_in_refund_line_onchange_cash_rounding_1(self):
move_form = Form(self.invoice)
# Add a cash rounding having 'add_invoice_line'.
move_form.invoice_cash_rounding_id = self.cash_rounding_a
move_form.save()
# The cash rounding does nothing as the total is already rounded.
self.assertInvoiceValues(self.invoice, [
self.product_line_vals_1,
self.product_line_vals_2,
self.tax_line_vals_1,
self.tax_line_vals_2,
self.term_line_vals_1,
], self.move_vals)
move_form = Form(self.invoice)
with move_form.invoice_line_ids.edit(0) as line_form:
line_form.price_unit = 799.99
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
'name': 'add_invoice_line',
'product_id': False,
'account_id': self.cash_rounding_a.profit_account_id.id,
'partner_id': self.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': 0.01,
'price_subtotal': 0.01,
'price_total': 0.01,
'tax_ids': [],
'tax_line_id': False,
'currency_id': self.company_data['currency'].id,
'amount_currency': -0.01,
'debit': 0.0,
'credit': 0.01,
'date_maturity': False,
'tax_exigible': True,
},
{
**self.product_line_vals_1,
'price_unit': 799.99,
'price_subtotal': 799.99,
'price_total': 919.99,
'amount_currency': -799.99,
'credit': 799.99,
},
self.product_line_vals_2,
self.tax_line_vals_1,
self.tax_line_vals_2,
self.term_line_vals_1,
], self.move_vals)
move_form = Form(self.invoice)
# Change the cash rounding to one having 'biggest_tax'.
move_form.invoice_cash_rounding_id = self.cash_rounding_b
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'price_unit': 799.99,
'price_subtotal': 799.99,
'price_total': 919.99,
'amount_currency': -799.99,
'credit': 799.99,
},
self.product_line_vals_2,
self.tax_line_vals_1,
self.tax_line_vals_2,
{
'name': '%s (rounding)' % self.tax_purchase_a.name,
'product_id': False,
'account_id': self.company_data['default_account_tax_purchase'].id,
'partner_id': self.partner_a.id,
'product_uom_id': False,
'quantity': 1.0,
'discount': 0.0,
'price_unit': -0.04,
'price_subtotal': -0.04,
'price_total': -0.04,
'tax_ids': [],
'tax_line_id': self.tax_purchase_a.id,
'currency_id': self.company_data['currency'].id,
'amount_currency': 0.04,
'debit': 0.04,
'credit': 0.0,
'date_maturity': False,
'tax_exigible': True,
},
{
**self.term_line_vals_1,
'price_unit': -1127.95,
'price_subtotal': -1127.95,
'price_total': -1127.95,
'amount_currency': 1127.95,
'debit': 1127.95,
},
], {
**self.move_vals,
'amount_untaxed': 959.99,
'amount_tax': 167.96,
'amount_total': 1127.95,
})
def test_in_refund_line_onchange_currency_1(self):
move_form = Form(self.invoice)
move_form.currency_id = self.currency_data['currency']
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -800.0,
'credit': 400.0,
},
{
**self.product_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -160.0,
'credit': 80.0,
},
{
**self.tax_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -144.0,
'credit': 72.0,
},
{
**self.tax_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -24.0,
'credit': 12.0,
},
{
**self.term_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': 1128.0,
'debit': 564.0,
},
], {
**self.move_vals,
'currency_id': self.currency_data['currency'].id,
})
move_form = Form(self.invoice)
# Change the date to get another rate: 1/3 instead of 1/2.
move_form.date = fields.Date.from_string('2016-01-01')
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -800.0,
'credit': 266.67,
},
{
**self.product_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -160.0,
'credit': 53.33,
},
{
**self.tax_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -144.0,
'credit': 48.0,
},
{
**self.tax_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -24.0,
'credit': 8.0,
},
{
**self.term_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': 1128.0,
'debit': 376.0,
},
], {
**self.move_vals,
'currency_id': self.currency_data['currency'].id,
'date': fields.Date.from_string('2016-01-01'),
})
move_form = Form(self.invoice)
with move_form.invoice_line_ids.edit(0) as line_form:
# 0.045 * 0.1 = 0.0045. As the foreign currency has a 0.001 rounding,
# the result should be 0.005 after rounding.
line_form.quantity = 0.1
line_form.price_unit = 0.045
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'quantity': 0.1,
'price_unit': 0.05,
'price_subtotal': 0.005,
'price_total': 0.006,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -0.005,
'credit': 0.0,
},
{
**self.product_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -160.0,
'credit': 53.33,
},
{
**self.tax_line_vals_1,
'price_unit': 24.0,
'price_subtotal': 24.001,
'price_total': 24.001,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -24.001,
'credit': 8.0,
},
{
**self.tax_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -24.0,
'credit': 8.0,
},
{
**self.term_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'price_unit': -208.01,
'price_subtotal': -208.006,
'price_total': -208.006,
'amount_currency': 208.006,
'debit': 69.33,
},
], {
**self.move_vals,
'currency_id': self.currency_data['currency'].id,
'date': fields.Date.from_string('2016-01-01'),
'amount_untaxed': 160.005,
'amount_tax': 48.001,
'amount_total': 208.006,
})
# Exit the multi-currencies.
move_form = Form(self.invoice)
move_form.currency_id = self.company_data['currency']
move_form.save()
self.assertInvoiceValues(self.invoice, [
{
**self.product_line_vals_1,
'quantity': 0.1,
'price_unit': 0.05,
'price_subtotal': 0.01,
'price_total': 0.01,
'amount_currency': -0.01,
'credit': 0.01,
},
self.product_line_vals_2,
{
**self.tax_line_vals_1,
'price_unit': 24.0,
'price_subtotal': 24.0,
'price_total': 24.0,
'amount_currency': -24.0,
'credit': 24.0,
},
self.tax_line_vals_2,
{
**self.term_line_vals_1,
'price_unit': -208.01,
'price_subtotal': -208.01,
'price_total': -208.01,
'amount_currency': 208.01,
'debit': 208.01,
},
], {
**self.move_vals,
'currency_id': self.company_data['currency'].id,
'date': fields.Date.from_string('2016-01-01'),
'amount_untaxed': 160.01,
'amount_tax': 48.0,
'amount_total': 208.01,
})
def test_in_refund_onchange_past_invoice_1(self):
copy_invoice = self.invoice.copy()
move_form = Form(self.invoice)
move_form.invoice_line_ids.remove(0)
move_form.invoice_line_ids.remove(0)
move_form.invoice_vendor_bill_id = copy_invoice
move_form.save()
self.assertInvoiceValues(self.invoice, [
self.product_line_vals_1,
self.product_line_vals_2,
self.tax_line_vals_1,
self.tax_line_vals_2,
self.term_line_vals_1,
], self.move_vals)
def test_in_refund_create_1(self):
# Test creating an account_move with the least information.
move = self.env['account.move'].create({
'move_type': 'in_refund',
'partner_id': self.partner_a.id,
'invoice_date': fields.Date.from_string('2019-01-01'),
'currency_id': self.currency_data['currency'].id,
'invoice_payment_term_id': self.pay_terms_a.id,
'invoice_line_ids': [
(0, None, self.product_line_vals_1),
(0, None, self.product_line_vals_2),
]
})
self.assertInvoiceValues(move, [
{
**self.product_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -800.0,
'credit': 400.0,
},
{
**self.product_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -160.0,
'credit': 80.0,
},
{
**self.tax_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -144.0,
'credit': 72.0,
},
{
**self.tax_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -24.0,
'credit': 12.0,
},
{
**self.term_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': 1128.0,
'debit': 564.0,
},
], {
**self.move_vals,
'currency_id': self.currency_data['currency'].id,
})
def test_in_refund_write_1(self):
# Test creating an account_move with the least information.
move = self.env['account.move'].create({
'move_type': 'in_refund',
'partner_id': self.partner_a.id,
'invoice_date': fields.Date.from_string('2019-01-01'),
'currency_id': self.currency_data['currency'].id,
'invoice_payment_term_id': self.pay_terms_a.id,
'invoice_line_ids': [
(0, None, self.product_line_vals_1),
]
})
move.write({
'invoice_line_ids': [
(0, None, self.product_line_vals_2),
]
})
self.assertInvoiceValues(move, [
{
**self.product_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -800.0,
'credit': 400.0,
},
{
**self.product_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -160.0,
'credit': 80.0,
},
{
**self.tax_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -144.0,
'credit': 72.0,
},
{
**self.tax_line_vals_2,
'currency_id': self.currency_data['currency'].id,
'amount_currency': -24.0,
'credit': 12.0,
},
{
**self.term_line_vals_1,
'currency_id': self.currency_data['currency'].id,
'amount_currency': 1128.0,
'debit': 564.0,
},
], {
**self.move_vals,
'currency_id': self.currency_data['currency'].id,
})
|
from openerp.osv import fields, osv
class ple_3_22 (osv.Model):
_name = "l10n_pe.ple_3_22"
_inherit = "l10n_pe.ple"
_columns= {
'lines_ids': fields.one2many ('l10n_pe.ple_line_3_22', 'ple_3_22_id', 'Lines', readonly=True, states={'draft':[('readonly',False)],}),
}
def action_reload (self, cr, uid, ids, context=None):
#TODO
return True
def action_report (self, cr, uid, ids, context=None):
assert ids and len(ids)==1
return {
'type': 'ir.actions.report.xml',
'report_name': 'l10n_pe.sunat_3_22',
'datas': {},
}
def get_output_lines (self, cr, uid, ids, context=None):
assert ids and len(ids)==1
ple = self.browse (cr, uid, ids[0], context=context)
res = []
for r in ple.lines_ids:
elements = [
"%s%s00" % (ple.period_id.date_start[0:4], ple.period_id.date_start[5:7]),
self.convert_str(r.catalog_code_2),
self.convert_str(r.account_code_3),
self.convert_amount(r.account_description_4),
self.convert_str(r.operation_state_5),
]
res.append('|'.join(elements))
return res
def get_output_filename (self, cr, uid, ids, context=None):
return "sunat_3_22.txt"
class ple_line_3_22 (osv.Model):
_name = 'l10n_pe.ple_line_3_22'
_inherit = 'l10n_pe.ple_line'
def _get_catalog_code_selection (self, cr, uid, context=None):
bt_obj = self.pool.get('base.element')
return bt_obj.get_as_selection(cr, uid, 'PE.SUNAT.TABLA_22', context=context)
_columns = {
'ple_3_22_id': fields.many2one('l10n_pe.ple_3_22', 'PLE', on_delete='cascade'),
'state': fields.related ('ple_3_22_id', 'state', string='State', type="char"),
'company_id': fields.related ('ple_3_22_id', 'company_id', string='Company', type="many2one", relation="res.company"),
'catalog_code_2': fields.selection(_get_catalog_code_selection, "Catalog code", required=True, size=1, help="Código del catálogo"),
'account_code_3': fields.char("Account code", size=3, required=True, help="Código de la Cuenta Contable y/o Partida de los Estados Financieros"),
'account_description_4': fields.char("Account description", size=100, required=True, help="Descripción de la Cuenta Contable y/o Partida de los Estados Financieros"),
'operation_state_5': fields.selection ([
('1', '1'),
('8', '8'),
('9', '9'),
], 'Operation state', required=True, help="""
Registrar '1' cuando la operación corresponde al periodo,
'8' cuando la operación corresponde a un periodo anterior y NO ha sido anotada en dicho periodo o
'9' cuando la operación corresponde a un periodo anterior y SI ha sido anotada en dicho periodo."""),
}
_order = 'sequence'
_defaults = {
'operation_state_5': '1',
}
def onchange_account_id (self, cr, uid, ids, account_id, context=None):
vals = {}
if move_line_id:
account_obj = self.pool.get('account.account')
account = account_obj.browse (cr, uid, account_id, context=context)
vals['account_code_3'] = account.code
vals['account_description_4'] = account.name
return {'value': vals}
else:
return False
class ple_configuration (osv.osv):
_inherit = "l10n_pe.ple_configuration"
def get_report_type (self, cr, uid, context=None):
rep_types = super(ple_configuration, self).get_report_type(cr, uid, context=context)
rep_types.append (('3_22', '3.22 Cuentas y partidas de los estados financieros'))
return sorted(rep_types, key=lambda e: e[0])
|
"""
| Database of molecules that are challenging to optimize.
| Geometries from Baker J. Comput. Chem. 14 1085 (1993), as reported
in Bakken and Helgaker, J. Chem. Phys. 117, 9160 (2002), with a few
further corrections.
| No reference energies defined.
- **cp** ``'off'``
- **rlxd** ``'off'``
- **subset**
- ``'small'``
- ``'large'``
"""
import re
import qcdb
dbse = 'BAKERJCC93'
isOS = 'true'
HRXN = ['1_3_5_trifluorobenzene', '1_3_5_trisilacyclohexane',
'1_3_difluorobenzene', '1_5_difluoronaphthalene',
'2_hydroxybicyclopentane', 'ACANIL01', 'acetone', 'acetylene', 'ACHTAR10',
'allene', 'ammonia', 'benzaldehyde', 'benzene', 'benzidine', 'caffeine',
'difuropyrazine', 'dimethylpentane', 'disilyl_ether', 'ethane', 'ethanol',
'furan', 'histidine', 'hydroxysulphane', 'menthone', 'mesityl_oxide',
'methylamine', 'naphthalene', 'neopentane', 'pterin', 'water', ]
HRXN_SM = ['1_3_5_trisilacyclohexane', '2_hydroxybicyclopentane',
'acetone', 'acetylene', 'allene', 'ammonia', 'benzene', 'disilyl_ether',
'ethane', 'ethanol', 'furan', 'hydroxysulphane', 'methylamine',
'neopentane', 'water']
HRXN_LG = ['1_3_difluorobenzene', '1_3_5_trifluorobenzene',
'1_5_difluoronaphthalene', 'ACANIL01', 'ACHTAR10', 'benzaldehyde',
'benzidine', 'caffeine', 'difuropyrazine', 'dimethylpentane', 'histidine',
'menthone', 'mesityl_oxide', 'naphthalene', 'pterin']
RXNM = {} # reaction matrix of reagent contributions per reaction
ACTV = {} # order of active reagents per reaction
ACTV['%s-%s' % (dbse, '1_3_5_trifluorobenzene' )] = ['%s-%s-reagent' % (dbse, '1_3_5_trifluorobenzene')]
RXNM['%s-%s' % (dbse, '1_3_5_trifluorobenzene' )] = dict(zip(ACTV['%s-%s' % (dbse, '1_3_5_trifluorobenzene')], [+1]))
ACTV['%s-%s' % (dbse, '1_3_5_trisilacyclohexane' )] = ['%s-%s-reagent' % (dbse, '1_3_5_trisilacyclohexane')]
RXNM['%s-%s' % (dbse, '1_3_5_trisilacyclohexane' )] = dict(zip(ACTV['%s-%s' % (dbse, '1_3_5_trisilacyclohexane')], [+1]))
ACTV['%s-%s' % (dbse, '1_3_difluorobenzene' )] = ['%s-%s-reagent' % (dbse, '1_3_difluorobenzene')]
RXNM['%s-%s' % (dbse, '1_3_difluorobenzene' )] = dict(zip(ACTV['%s-%s' % (dbse, '1_3_difluorobenzene')], [+1]))
ACTV['%s-%s' % (dbse, '1_5_difluoronaphthalene' )] = ['%s-%s-reagent' % (dbse, '1_5_difluoronaphthalene')]
RXNM['%s-%s' % (dbse, '1_5_difluoronaphthalene' )] = dict(zip(ACTV['%s-%s' % (dbse, '1_5_difluoronaphthalene')], [+1]))
ACTV['%s-%s' % (dbse, '2_hydroxybicyclopentane' )] = ['%s-%s-reagent' % (dbse, '2_hydroxybicyclopentane')]
RXNM['%s-%s' % (dbse, '2_hydroxybicyclopentane' )] = dict(zip(ACTV['%s-%s' % (dbse, '2_hydroxybicyclopentane')], [+1]))
ACTV['%s-%s' % (dbse, 'ACANIL01' )] = ['%s-%s-reagent' % (dbse, 'ACANIL01')]
RXNM['%s-%s' % (dbse, 'ACANIL01' )] = dict(zip(ACTV['%s-%s' % (dbse, 'ACANIL01')], [+1]))
ACTV['%s-%s' % (dbse, 'acetone' )] = ['%s-%s-reagent' % (dbse, 'acetone')]
RXNM['%s-%s' % (dbse, 'acetone' )] = dict(zip(ACTV['%s-%s' % (dbse, 'acetone')], [+1]))
ACTV['%s-%s' % (dbse, 'acetylene' )] = ['%s-%s-reagent' % (dbse, 'acetylene')]
RXNM['%s-%s' % (dbse, 'acetylene' )] = dict(zip(ACTV['%s-%s' % (dbse, 'acetylene')], [+1]))
ACTV['%s-%s' % (dbse, 'ACHTAR10' )] = ['%s-%s-reagent' % (dbse, 'ACHTAR10')]
RXNM['%s-%s' % (dbse, 'ACHTAR10' )] = dict(zip(ACTV['%s-%s' % (dbse, 'ACHTAR10')], [+1]))
ACTV['%s-%s' % (dbse, 'allene' )] = ['%s-%s-reagent' % (dbse, 'allene')]
RXNM['%s-%s' % (dbse, 'allene' )] = dict(zip(ACTV['%s-%s' % (dbse, 'allene')], [+1]))
ACTV['%s-%s' % (dbse, 'ammonia' )] = ['%s-%s-reagent' % (dbse, 'ammonia')]
RXNM['%s-%s' % (dbse, 'ammonia' )] = dict(zip(ACTV['%s-%s' % (dbse, 'ammonia')], [+1]))
ACTV['%s-%s' % (dbse, 'benzaldehyde' )] = ['%s-%s-reagent' % (dbse, 'benzaldehyde')]
RXNM['%s-%s' % (dbse, 'benzaldehyde' )] = dict(zip(ACTV['%s-%s' % (dbse, 'benzaldehyde')], [+1]))
ACTV['%s-%s' % (dbse, 'benzene' )] = ['%s-%s-reagent' % (dbse, 'benzene')]
RXNM['%s-%s' % (dbse, 'benzene' )] = dict(zip(ACTV['%s-%s' % (dbse, 'benzene')], [+1]))
ACTV['%s-%s' % (dbse, 'benzidine' )] = ['%s-%s-reagent' % (dbse, 'benzidine')]
RXNM['%s-%s' % (dbse, 'benzidine' )] = dict(zip(ACTV['%s-%s' % (dbse, 'benzidine')], [+1]))
ACTV['%s-%s' % (dbse, 'caffeine' )] = ['%s-%s-reagent' % (dbse, 'caffeine')]
RXNM['%s-%s' % (dbse, 'caffeine' )] = dict(zip(ACTV['%s-%s' % (dbse, 'caffeine')], [+1]))
ACTV['%s-%s' % (dbse, 'difuropyrazine' )] = ['%s-%s-reagent' % (dbse, 'difuropyrazine')]
RXNM['%s-%s' % (dbse, 'difuropyrazine' )] = dict(zip(ACTV['%s-%s' % (dbse, 'difuropyrazine')], [+1]))
ACTV['%s-%s' % (dbse, 'dimethylpentane' )] = ['%s-%s-reagent' % (dbse, 'dimethylpentane')]
RXNM['%s-%s' % (dbse, 'dimethylpentane' )] = dict(zip(ACTV['%s-%s' % (dbse, 'dimethylpentane')], [+1]))
ACTV['%s-%s' % (dbse, 'disilyl_ether' )] = ['%s-%s-reagent' % (dbse, 'disilyl_ether')]
RXNM['%s-%s' % (dbse, 'disilyl_ether' )] = dict(zip(ACTV['%s-%s' % (dbse, 'disilyl_ether')], [+1]))
ACTV['%s-%s' % (dbse, 'ethane' )] = ['%s-%s-reagent' % (dbse, 'ethane')]
RXNM['%s-%s' % (dbse, 'ethane' )] = dict(zip(ACTV['%s-%s' % (dbse, 'ethane')], [+1]))
ACTV['%s-%s' % (dbse, 'ethanol' )] = ['%s-%s-reagent' % (dbse, 'ethanol')]
RXNM['%s-%s' % (dbse, 'ethanol' )] = dict(zip(ACTV['%s-%s' % (dbse, 'ethanol')], [+1]))
ACTV['%s-%s' % (dbse, 'furan' )] = ['%s-%s-reagent' % (dbse, 'furan')]
RXNM['%s-%s' % (dbse, 'furan' )] = dict(zip(ACTV['%s-%s' % (dbse, 'furan')], [+1]))
ACTV['%s-%s' % (dbse, 'histidine' )] = ['%s-%s-reagent' % (dbse, 'histidine')]
RXNM['%s-%s' % (dbse, 'histidine' )] = dict(zip(ACTV['%s-%s' % (dbse, 'histidine')], [+1]))
ACTV['%s-%s' % (dbse, 'hydroxysulphane' )] = ['%s-%s-reagent' % (dbse, 'hydroxysulphane')]
RXNM['%s-%s' % (dbse, 'hydroxysulphane' )] = dict(zip(ACTV['%s-%s' % (dbse, 'hydroxysulphane')], [+1]))
ACTV['%s-%s' % (dbse, 'menthone' )] = ['%s-%s-reagent' % (dbse, 'menthone')]
RXNM['%s-%s' % (dbse, 'menthone' )] = dict(zip(ACTV['%s-%s' % (dbse, 'menthone')], [+1]))
ACTV['%s-%s' % (dbse, 'mesityl_oxide' )] = ['%s-%s-reagent' % (dbse, 'mesityl_oxide')]
RXNM['%s-%s' % (dbse, 'mesityl_oxide' )] = dict(zip(ACTV['%s-%s' % (dbse, 'mesityl_oxide')], [+1]))
ACTV['%s-%s' % (dbse, 'methylamine' )] = ['%s-%s-reagent' % (dbse, 'methylamine')]
RXNM['%s-%s' % (dbse, 'methylamine' )] = dict(zip(ACTV['%s-%s' % (dbse, 'methylamine')], [+1]))
ACTV['%s-%s' % (dbse, 'naphthalene' )] = ['%s-%s-reagent' % (dbse, 'naphthalene')]
RXNM['%s-%s' % (dbse, 'naphthalene' )] = dict(zip(ACTV['%s-%s' % (dbse, 'naphthalene')], [+1]))
ACTV['%s-%s' % (dbse, 'neopentane' )] = ['%s-%s-reagent' % (dbse, 'neopentane')]
RXNM['%s-%s' % (dbse, 'neopentane' )] = dict(zip(ACTV['%s-%s' % (dbse, 'neopentane')], [+1]))
ACTV['%s-%s' % (dbse, 'pterin' )] = ['%s-%s-reagent' % (dbse, 'pterin')]
RXNM['%s-%s' % (dbse, 'pterin' )] = dict(zip(ACTV['%s-%s' % (dbse, 'pterin')], [+1]))
ACTV['%s-%s' % (dbse, 'water' )] = ['%s-%s-reagent' % (dbse, 'water')]
RXNM['%s-%s' % (dbse, 'water' )] = dict(zip(ACTV['%s-%s' % (dbse, 'water')], [+1]))
BIND = {}
BIND['%s-%s' % (dbse, '1_3_5_trifluorobenzene' )] = 0.000
BIND['%s-%s' % (dbse, '1_3_5_trisilacyclohexane' )] = 0.000
BIND['%s-%s' % (dbse, '1_3_difluorobenzene' )] = 0.000
BIND['%s-%s' % (dbse, '1_5_difluoronaphthalene' )] = 0.000
BIND['%s-%s' % (dbse, '2_hydroxybicyclopentane' )] = 0.000
BIND['%s-%s' % (dbse, 'ACANIL01' )] = 0.000
BIND['%s-%s' % (dbse, 'acetone' )] = 0.000
BIND['%s-%s' % (dbse, 'acetylene' )] = 0.000
BIND['%s-%s' % (dbse, 'ACHTAR10' )] = 0.000
BIND['%s-%s' % (dbse, 'allene' )] = 0.000
BIND['%s-%s' % (dbse, 'ammonia' )] = 0.000
BIND['%s-%s' % (dbse, 'benzaldehyde' )] = 0.000
BIND['%s-%s' % (dbse, 'benzene' )] = 0.000
BIND['%s-%s' % (dbse, 'benzidine' )] = 0.000
BIND['%s-%s' % (dbse, 'caffeine' )] = 0.000
BIND['%s-%s' % (dbse, 'difuropyrazine' )] = 0.000
BIND['%s-%s' % (dbse, 'dimethylpentane' )] = 0.000
BIND['%s-%s' % (dbse, 'disilyl_ether' )] = 0.000
BIND['%s-%s' % (dbse, 'ethane' )] = 0.000
BIND['%s-%s' % (dbse, 'ethanol' )] = 0.000
BIND['%s-%s' % (dbse, 'furan' )] = 0.000
BIND['%s-%s' % (dbse, 'histidine' )] = 0.000
BIND['%s-%s' % (dbse, 'hydroxysulphane' )] = 0.000
BIND['%s-%s' % (dbse, 'menthone' )] = 0.000
BIND['%s-%s' % (dbse, 'mesityl_oxide' )] = 0.000
BIND['%s-%s' % (dbse, 'methylamine' )] = 0.000
BIND['%s-%s' % (dbse, 'naphthalene' )] = 0.000
BIND['%s-%s' % (dbse, 'neopentane' )] = 0.000
BIND['%s-%s' % (dbse, 'pterin' )] = 0.000
BIND['%s-%s' % (dbse, 'water' )] = 0.000
TAGL = {}
TAGL['%s-%s' % (dbse, '1_3_5_trifluorobenzene' )] = ''
TAGL['%s-%s-reagent' % (dbse, '1_3_5_trifluorobenzene' )] = ''
TAGL['%s-%s' % (dbse, '1_3_5_trisilacyclohexane' )] = ''
TAGL['%s-%s-reagent' % (dbse, '1_3_5_trisilacyclohexane' )] = ''
TAGL['%s-%s' % (dbse, '1_3_difluorobenzene' )] = ''
TAGL['%s-%s-reagent' % (dbse, '1_3_difluorobenzene' )] = ''
TAGL['%s-%s' % (dbse, '1_5_difluoronaphthalene' )] = ''
TAGL['%s-%s-reagent' % (dbse, '1_5_difluoronaphthalene' )] = ''
TAGL['%s-%s' % (dbse, '2_hydroxybicyclopentane' )] = ''
TAGL['%s-%s-reagent' % (dbse, '2_hydroxybicyclopentane' )] = ''
TAGL['%s-%s' % (dbse, 'ACANIL01' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'ACANIL01' )] = ''
TAGL['%s-%s' % (dbse, 'acetone' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'acetone' )] = ''
TAGL['%s-%s' % (dbse, 'acetylene' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'acetylene' )] = ''
TAGL['%s-%s' % (dbse, 'ACHTAR10' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'ACHTAR10' )] = ''
TAGL['%s-%s' % (dbse, 'allene' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'allene' )] = ''
TAGL['%s-%s' % (dbse, 'ammonia' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'ammonia' )] = ''
TAGL['%s-%s' % (dbse, 'benzaldehyde' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'benzaldehyde' )] = ''
TAGL['%s-%s' % (dbse, 'benzene' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'benzene' )] = ''
TAGL['%s-%s' % (dbse, 'benzidine' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'benzidine' )] = ''
TAGL['%s-%s' % (dbse, 'caffeine' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'caffeine' )] = ''
TAGL['%s-%s' % (dbse, 'difuropyrazine' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'difuropyrazine' )] = ''
TAGL['%s-%s' % (dbse, 'dimethylpentane' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'dimethylpentane' )] = ''
TAGL['%s-%s' % (dbse, 'disilyl_ether' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'disilyl_ether' )] = ''
TAGL['%s-%s' % (dbse, 'ethane' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'ethane' )] = ''
TAGL['%s-%s' % (dbse, 'ethanol' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'ethanol' )] = ''
TAGL['%s-%s' % (dbse, 'furan' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'furan' )] = ''
TAGL['%s-%s' % (dbse, 'histidine' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'histidine' )] = ''
TAGL['%s-%s' % (dbse, 'hydroxysulphane' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'hydroxysulphane' )] = ''
TAGL['%s-%s' % (dbse, 'menthone' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'menthone' )] = ''
TAGL['%s-%s' % (dbse, 'mesityl_oxide' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'mesityl_oxide' )] = ''
TAGL['%s-%s' % (dbse, 'methylamine' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'methylamine' )] = ''
TAGL['%s-%s' % (dbse, 'naphthalene' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'naphthalene' )] = ''
TAGL['%s-%s' % (dbse, 'neopentane' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'neopentane' )] = ''
TAGL['%s-%s' % (dbse, 'pterin' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'pterin' )] = ''
TAGL['%s-%s' % (dbse, 'water' )] = ''
TAGL['%s-%s-reagent' % (dbse, 'water' )] = ''
GEOS = {}
GEOS['%s-%s-reagent' % (dbse, '1_3_5_trifluorobenzene')] = qcdb.Molecule("""
0 1
F 4.45124771 2.56992907 0.00000000
F -4.45124771 2.56992907 0.00000000
F 0.00000000 -5.13985813 0.00000000
C 2.27501122 1.31348 0.00000000
C -2.27501122 1.31348 0.00000000
C 0.00000000 -2.62695668 0.00000000
C 2.27446593 -1.31316352 0.00000000
C -2.27446593 -1.31316352 0.00000000
C 0.00000000 2.62632703 0.00000000
H 4.04176646 -2.33351496 0.00000000
H -4.04176646 -2.33351496 0.00000000
H 0.00000000 4.66702991 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, '1_3_5_trisilacyclohexane')] = qcdb.Molecule("""
0 1
Si 2.87562701 1.66024403 0.50009833
Si -2.87562701 1.66024403 0.50009833
Si 0.00000000 -3.32048805 0.50009833
C 0.00000000 3.31617083 -0.65645952
C 2.87188818 -1.65808542 -0.65645952
C -2.87188818 -1.65808542 -0.65645952
H 0.00000000 5.25402682 0.04550787
H 4.55012070 -2.62701341 0.04550787
H -4.55012070 -2.62701341 0.04550787
H 0.00000000 3.33620321 -2.71676085
H 2.88923673 -1.66810160 -2.71676085
H -2.88923673 -1.66810160 -2.71676085
H 5.14953250 2.97308398 -0.46837999
H -5.14953250 2.97308398 -0.46837999
H 2.91112385 1.68073814 3.29599415
H -2.91112385 1.68073814 3.29599415
H 0.00000000 -3.36147627 3.29599415
H 0.00000000 -5.94616795 -0.46837999
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, '1_3_difluorobenzene')] = qcdb.Molecule("""
0 1
F 4.45098629 2.53075455 0.00000000
F -4.45098629 2.53075455 0.00000000
C 2.27459315 -1.35284979 0.00000000
C -2.27459315 -1.35284979 0.00000000
C 2.27465109 1.27385640 0.00000000
C -2.27465109 1.27385640 0.00000000
C 0.00000000 2.58727941 0.00000000
C 0.00000000 -2.66641919 0.00000000
H 4.04232694 -2.37256182 0.00000000
H -4.04232694 -2.37256182 0.00000000
H 0.00000000 4.62804882 0.00000000
H 0.00000000 -4.70730774 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, '1_5_difluoronaphthalene')] = qcdb.Molecule("""
0 1
F 5.77442810 0.00000000 0.00000000
F -5.77442810 0.00000000 0.00000000
C 0.72785457 -4.70254512 0.00000000
C -0.72785457 4.70254512 0.00000000
C 3.11062174 -3.60249243 0.00000000
C -3.11062174 3.60249243 0.00000000
C 3.38479931 -0.98799287 0.00000000
C -3.38479931 0.98799287 0.00000000
C 1.23776851 0.57124055 0.00000000
C -1.23776851 -0.57124055 0.00000000
C 1.43014268 3.20907701 0.00000000
C -1.43014268 -3.20907701 0.00000000
H 0.55204008 -6.73646406 0.00000000
H -0.55204008 6.73646406 0.00000000
H 4.76445952 -4.80069021 0.00000000
H -4.76445952 4.80069021 0.00000000
H 3.24999844 4.13948522 0.00000000
H -3.24999844 -4.13948522 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, '2_hydroxybicyclopentane')] = qcdb.Molecule("""
0 1
O 0.00000000 0.00000000 3.97630549
C 0.61275612 1.71787828 -0.25674160
C -1.25240609 0.75430367 1.72991074
C -1.89991796 -1.49181590 0.06218311
C 2.64764592 0.00000000 -1.36190849
C -0.10732099 -0.53140328 -1.99092676
H -2.85601026 2.05561017 2.08353099
H 0.13348920 3.49094037 -1.26103342
H 3.57368102 -1.34558615 -0.05933199
H 3.80698111 0.79833379 -2.90613711
H -1.33579202 -3.34159783 0.85888891
H -3.90122993 -1.54049270 -0.53915310
H -0.93405780 0.26002983 -3.74579160
H 1.51218168 -0.82620025 3.41020482
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'ACANIL01')] = qcdb.Molecule("""
0 1
O 6.74334167 0.00000000 0.00000000
N 2.75125398 -0.91996681 0.00000000
C -3.75958919 -3.62046813 0.00000000
C -1.13660145 -3.38720984 0.00000000
C 0.00427371 -1.00318363 0.00000000
C -1.53985353 1.15387105 0.00000000
C -4.16293704 0.91831969 0.00000000
C -5.26811078 -1.46724271 0.00000000
C 4.57389611 0.80511522 0.00000000
C 4.13207020 3.64054919 0.00000000
H -4.62306754 -5.47176436 0.00000000
H -0.00377805 -5.08765397 0.00000000
H -0.76505606 3.03326534 0.00000000
H -5.34041651 2.58758240 0.00000000
H -7.30266577 -1.64802574 0.00000000
H 3.58082506 -2.66479209 0.00000000
H 5.95212032 4.66178191 0.00000000
H 3.08214744 4.23491124 1.70076220
H 3.08214744 4.23491124 -1.70076220
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'acetone')] = qcdb.Molecule("""
0 1
O 0.00000000 3.46695757 0.00000000
C 0.00000000 1.14032594 0.00000000
C 0.00000000 -0.29542841 2.50138172
C 0.00000000 -0.29542841 -2.50138172
H 0.00000000 1.00440652 4.13754069
H 0.00000000 1.00440652 -4.13754069
H 1.69360304 -1.50630994 2.66984804
H -1.69360304 -1.50630994 2.66984804
H 1.69360304 -1.50630994 -2.66984804
H -1.69360304 -1.50630994 -2.66984804
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'acetylene')] = qcdb.Molecule("""
0 1
C 0.00000000 0.00000000 1.13383600
C 0.00000000 0.00000000 -1.13383600
H 0.00000000 0.00000000 3.02356266
H 0.00000000 0.00000000 -3.02356266
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'ACHTAR10')] = qcdb.Molecule("""
0 1
O 0.00000000 0.00000000 3.93735249
O 1.79875939 0.00000000 -0.09531034
N -4.40589519 1.32037243 -3.31810156
C -2.43021636 -0.18962157 -2.05696026
C -0.22185404 1.49597798 -1.20775357
C 1.69726730 -0.59259412 2.46067577
C 3.97685548 -2.11479138 3.27934906
H -3.68043380 2.27933244 -4.84082518
H -5.10144333 2.68085421 -2.12147722
H -3.24985392 -1.18842676 -0.41051393
H -1.74547418 -1.68142667 -3.35347133
H 0.55351430 2.51912058 -2.85842920
H -0.88071695 2.99188292 0.10524925
H 5.73529679 -1.04410557 2.94759034
H 4.08562680 -3.90736002 2.21955987
H 3.86856770 -2.56921447 5.31306580
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'allene')] = qcdb.Molecule("""
0 1
C 0.00000000 0.00000000 0.00000000
C 0.00000000 2.49419295 0.00000000
C 0.00000000 -2.49419295 0.00000000
H 1.76772016 -3.51503166 0.00000000
H -1.76772016 -3.51503166 0.00000000
H 0.00000000 3.51503166 1.76772016
H 0.00000000 3.51503166 -1.76772016
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'ammonia')] = qcdb.Molecule("""
0 1
N 0.00000000 0.00000000 0.47690250
H 1.55848945 0.89979432 -0.15896750
H -1.55848945 0.89979432 -0.15896750
H 0.00000000 -1.79958864 -0.15896750
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'benzaldehyde')] = qcdb.Molecule("""
0 1
O 6.11695944 0.00000000 0.00000000
C -0.42811838 -2.25953622 0.00000000
C -2.92869352 -1.43478712 0.00000000
C -3.46561640 1.14118082 0.00000000
C -1.50611491 2.89722764 0.00000000
C 0.99614123 2.07851844 0.00000000
C 1.55290207 -0.51034434 0.00000000
C 4.31002394 -1.46969818 0.00000000
H 4.69277313 -3.52434043 0.00000000
H -0.04838912 -4.26733408 0.00000000
H -4.45167820 -2.79426839 0.00000000
H -5.40516702 1.77808408 0.00000000
H -1.92653663 4.89495992 0.00000000
H 2.49151439 3.47033786 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'benzene')] = qcdb.Molecule("""
0 1
C 0.00000000 2.63452745 0.00000000
C 0.00000000 -2.63452745 0.00000000
C 2.28156770 1.31726373 0.00000000
C -2.28156770 1.31726373 0.00000000
C 2.28156770 -1.31726373 0.00000000
C -2.28156770 -1.31726373 0.00000000
H 0.00000000 4.67589156 0.00000000
H 0.00000000 -4.67589156 0.00000000
H 4.04944088 2.33794578 0.00000000
H -4.04944088 2.33794578 0.00000000
H 4.04944088 -2.33794578 0.00000000
H -4.04944088 -2.33794578 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'benzidine')] = qcdb.Molecule("""
0 1
N 0.00000000 0.00000000 9.17973038
N 0.00000000 0.00000000 -9.17973038
C -2.20388942 0.56488223 5.36955702
C 2.20388942 -0.56488223 5.36955702
C -2.20388942 -0.56488223 -5.36955702
C 2.20388942 0.56488223 -5.36955702
C -2.20706622 0.56349235 2.73912945
C 2.20706622 -0.56349235 2.73912945
C -2.20706622 -0.56349235 -2.73912945
C 2.20706622 0.56349235 -2.73912945
C 0.00000000 0.00000000 1.32948630
C 0.00000000 0.00000000 -1.32948630
C 0.00000000 0.00000000 6.67931977
C 0.00000000 0.00000000 -6.67931977
H -3.93022673 1.02227253 6.36283467
H 3.93022673 -1.02227253 6.36283467
H -3.93022673 -1.02227253 -6.36283467
H 3.93022673 1.02227253 -6.36283467
H -3.95573979 1.07384957 1.81596643
H 3.95573979 -1.07384957 1.81596643
H -3.95573979 -1.07384957 -1.81596643
H 3.95573979 1.07384957 -1.81596643
H 1.67837252 -0.43031314 10.04483176
H -1.67837252 0.43031314 10.04483176
H 1.67837252 0.43031314 -10.04483176
H -1.67837252 -0.43031314 -10.04483176
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'caffeine')] = qcdb.Molecule("""
0 1
O -1.35796495 -4.55968346 0.00000000
O 6.00359465 0.00000000 0.00000000
N -4.34699530 0.40790868 0.00000000
N -2.02147868 4.34704366 0.00000000
N 2.40166495 2.29891253 0.00000000
N 2.38963107 -2.32861610 0.00000000
C -1.73514100 0.00806819 0.00000000
C -0.39656652 2.28913440 0.00000000
C -4.28628286 3.03178701 0.00000000
C -0.23380597 -2.51268919 0.00000000
C 3.66630626 -0.01011647 0.00000000
C 3.91233427 -4.71649369 0.00000000
C 3.86899427 4.70507045 0.00000000
C -6.50871497 -1.38799138 0.00000000
H -6.04146873 4.08346573 0.00000000
H 5.15026261 -4.82916673 1.68145730
H 5.15026261 -4.82916673 -1.68145730
H 2.75182289 -6.45823288 0.00000000
H 5.10374160 4.83609896 1.68332379
H 5.10374160 4.83609896 -1.68332379
H 2.65878836 6.40983420 0.00000000
H -8.34003564 -0.38023773 0.00000000
H -6.44634525 -2.62051420 1.68782779
H -6.44634525 -2.62051420 -1.68782779
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'difuropyrazine')] = qcdb.Molecule("""
0 1
O 5.24048162 0.00000000 0.00000000
O -5.24048162 0.00000000 0.00000000
N 1.15705376 -2.55150608 0.00000000
N -1.15705376 2.55150608 0.00000000
C 1.53596834 2.15160317 0.00000000
C -1.53596834 -2.15160317 0.00000000
C 2.65703648 -0.27471770 0.00000000
C -2.65703648 0.27471770 0.00000000
C 5.62186670 2.62201493 0.00000000
C -5.62186670 -2.62201493 0.00000000
C 3.54881353 4.07756019 0.00000000
C -3.54881353 -4.07756019 0.00000000
H 7.52697788 3.41239127 0.00000000
H -7.52697788 -3.41239127 0.00000000
H 3.34537092 6.12657010 0.00000000
H -3.34537092 -6.12657010 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'dimethylpentane')] = qcdb.Molecule("""
0 1
C -1.90302142 1.79989214 -3.12819161
C 0.68098191 1.17008149 -1.92744962
C 0.57347759 -0.44273007 0.55332222
C -0.57536860 1.07092655 2.79511667
C 0.00000000 0.00000000 5.44078830
C 2.40130119 0.00000000 -3.96848713
C -0.75740445 -3.03396450 0.24401702
H -3.17069973 2.76026122 -1.77509550
H -2.89812692 0.08656618 -3.79174516
H -1.70835535 3.07391957 -4.77327789
H 1.57127154 2.99847536 -1.42186024
H 2.56484657 -0.84063638 1.06972922
H -2.64484782 1.25561061 2.54633021
H 0.13997457 3.03676459 2.74695911
H -0.82887852 -1.89737304 5.71865173
H -0.77969219 1.22647944 6.94230752
H 2.05566062 -0.15968979 5.78408339
H 1.63584569 -1.79524917 -4.71467976
H 2.65159319 1.28124788 -5.60071709
H 4.31235662 -0.40611038 -3.22785050
H -0.60282164 -4.20392129 1.96624261
H -2.79329149 -2.82201336 -0.17320237
H 0.07519864 -4.15853702 -1.30499111
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'disilyl_ether')] = qcdb.Molecule("""
0 1
Si 0.00000000 -0.06571048 3.03636189
Si 0.00000000 -0.06571048 -3.03636189
O 0.00000000 -0.88817346 0.00000000
H 0.00000000 -2.19565412 4.54756839
H 2.12290049 1.35272566 3.58475023
H -2.12290049 1.35272566 3.58475023
H 0.00000000 -2.19565412 -4.54756839
H 2.12290049 1.35272566 -3.58475023
H -2.12290049 1.35272566 -3.58475023
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'ethane')] = qcdb.Molecule("""
0 1
C 0.00000000 0.00000000 1.45478763
C 0.00000000 0.00000000 -1.45478763
H 1.68084455 0.97043609 2.14455455
H 1.68084455 -0.97043609 -2.14455455
H -1.68084455 0.97043609 2.14455455
H -1.68084455 -0.97043609 -2.14455455
H 0.00000000 -1.94087219 2.14455455
H 0.00000000 1.94087219 -2.14455455
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'ethanol')] = qcdb.Molecule("""
0 1
O 2.94951269 0.00000000 0.00000000
C 0.42864361 0.89070972 0.00000000
C -1.47274991 -1.22612707 0.00000000
H 4.05795769 1.50458064 0.00000000
H 0.07017562 2.06834349 1.69306899
H 0.07017562 2.06834349 -1.69306899
H -1.36184741 -2.46035674 1.67199009
H -1.36184741 -2.46035674 -1.67199009
H -3.38002050 -0.38513679 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'furan')] = qcdb.Molecule("""
0 1
O 0.00000000 -2.71155703 0.00000000
C 1.30409645 1.35600277 0.00000000
C -1.30409645 1.35600277 0.00000000
C 2.07680908 -1.14870311 0.00000000
C -2.07680908 -1.14870311 0.00000000
H 2.51639050 2.99782755 0.00000000
H -2.51639050 2.99782755 0.00000000
H 3.99399875 -1.84934869 0.00000000
H -3.99399875 -1.84934869 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'histidine')] = qcdb.Molecule("""
0 1
O 3.93683911 0.00000000 5.02858545
O 0.00000000 0.00000000 6.75548572
N -1.62714005 -0.17063169 -6.38981145
N -1.55525882 2.75691585 -2.98858739
N -0.06519044 -3.43699076 1.78152280
C 0.00313112 -0.96382673 -4.49205004
C 0.04394056 0.76526971 -2.47860156
C -2.44306081 2.04255765 -5.31626413
C 1.61712938 0.55012280 -0.07100455
C 0.25915307 -0.68070217 2.22272340
C 1.63605981 -0.20456172 4.77295177
H 1.09061008 -2.68966721 -4.55294959
H -1.91653466 4.35407321 -1.95395961
H -3.77917584 3.21725531 -6.32817718
H 2.24417172 2.47631265 0.44479551
H 3.39934986 -0.46804317 -0.47537143
H -1.63798632 0.18516799 2.40194434
H -1.09297021 -4.24259361 3.21643716
H -1.09919371 -3.74001070 0.16810497
H 0.98612615 0.24935257 8.25422581
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'hydroxysulphane')] = qcdb.Molecule("""
0 1
S 0.00000000 0.00000000 1.64344454
O 1.55643788 0.00000000 -0.78417924
H 0.70878977 -0.98889634 -2.04698233
H -2.26522765 0.98889634 1.18771703
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'menthone')] = qcdb.Molecule("""
0 1
O 0.00000000 0.00000000 4.83502957
C -5.06597212 -1.27592091 0.49885049
C -3.60348796 -1.49111229 -2.01995066
C -1.13779972 0.12182250 -2.02402508
C 0.69335828 -0.53324847 0.24699141
C -0.81879368 -0.76420189 2.79442766
C -3.41755812 -2.06413311 2.77868746
C 0.08327139 -0.18247958 -4.66184769
C 3.16977849 1.11788425 0.33780916
C 5.23967937 0.00000000 2.05851212
C 2.74820737 3.91648659 1.03692914
H -5.73534045 0.69223903 0.75660810
H -6.80139535 -2.44289264 0.43045930
H -3.15419510 -3.50140339 -2.39715388
H -4.86109777 -0.90264082 -3.58603040
H -1.71463208 2.12647811 -1.82542348
H 1.33530286 -2.48925976 -0.10949068
H -4.41049264 -1.64891601 4.56938165
H -3.10227312 -4.12767303 2.77142958
H -1.27515064 0.19340176 -6.20625544
H 0.83979297 -2.10810531 -4.96157719
H 1.65711962 1.15531285 -4.96195049
H 4.01314574 1.10167735 -1.57473542
H 4.69908810 0.02990650 4.07747056
H 7.03475689 1.05859686 1.90111311
H 5.66887645 -1.98486988 1.56898286
H 4.52277834 5.01677786 0.95132487
H 1.98900684 4.13531008 2.97264568
H 1.40402606 4.85096335 -0.25821233
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'mesityl_oxide')] = qcdb.Molecule("""
0 1
O 4.30492455 0.00000000 0.00000000
C 0.05024721 -3.82629843 0.00000000
C -1.35087834 -1.32752917 0.00000000
C -4.20838872 -1.49335398 0.00000000
C -0.19920658 0.94023239 0.00000000
C 2.60618767 1.58735088 0.00000000
C 3.31537901 4.37315331 0.00000000
H 1.28461121 -4.00174347 1.67716810
H 1.28461121 -4.00174347 -1.67716810
H -1.21281465 -5.48980179 0.00000000
H -5.04695592 -0.57944060 1.68053694
H -5.04695592 -0.57944060 -1.68053694
H -4.87911033 -3.47264458 0.00000000
H -1.42668593 2.59614349 0.00000000
H 2.56758840 5.33389639 1.69384111
H 2.56758840 5.33389639 -1.69384111
H 5.38985873 4.60732325 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'methylamine')] = qcdb.Molecule("""
0 1
N 1.59169309 0.00000000 0.00000000
C -1.10781247 -0.03073718 0.00000000
H 2.61432616 -1.63020032 0.00000000
H -1.81666320 1.93163906 0.00000000
H 2.57804913 1.64911594 0.00000000
H -1.92979635 -0.95990875 1.69695191
H -1.92979635 -0.95990875 -1.69695191
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'naphthalene')] = qcdb.Molecule("""
0 1
C 1.31500993 4.56625993 0.00000000
C -1.31500993 4.56625993 0.00000000
C 1.31500993 -4.56625993 0.00000000
C -1.31500993 -4.56625993 0.00000000
C 2.65095410 2.30121210 0.00000000
C -2.65095410 2.30121210 0.00000000
C 2.65095410 -2.30121210 0.00000000
C -2.65095410 -2.30121210 0.00000000
C 1.35957848 0.00000000 0.00000000
C -1.35957848 0.00000000 0.00000000
H 2.32713807 6.33915590 0.00000000
H -2.32713807 6.33915590 0.00000000
H 2.32713807 -6.33915590 0.00000000
H -2.32713807 -6.33915590 0.00000000
H 4.69449351 2.36375141 0.00000000
H -4.69449351 2.36375141 0.00000000
H 4.69449351 -2.36375141 0.00000000
H -4.69449351 -2.36375141 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'neopentane')] = qcdb.Molecule("""
0 1
C 0.00000000 0.00000000 0.00000000
C 1.68781269 -1.68781269 1.68781269
C -1.68781269 1.68781269 1.68781269
C -1.68781269 -1.68781269 -1.68781269
C 1.68781269 1.68781269 -1.68781269
H 2.93275937 -0.55961452 2.93275937
H -2.93275937 0.55961452 2.93275937
H 2.93275937 0.55961452 -2.93275937
H -2.93275937 -0.55961452 -2.93275937
H 0.55961452 -2.93275937 2.93275937
H -0.55961452 2.93275937 2.93275937
H -0.55961452 -2.93275937 -2.93275937
H 0.55961452 2.93275937 -2.93275937
H 2.93275937 -2.93275937 0.55961452
H -2.93275937 2.93275937 0.55961452
H -2.93275937 -2.93275937 -0.55961452
H 2.93275937 2.93275937 -0.55961452
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'pterin')] = qcdb.Molecule("""
0 1
O 5.40068710 0.00000000 0.00000000
N 1.67450469 -4.01224809 0.00000000
N -3.29778810 -2.66298586 0.00000000
N 2.41435003 2.98093954 0.00000000
N -2.07868639 1.96577816 0.00000000
N -1.05941931 6.33383022 0.00000000
C 1.04506477 -1.58869528 0.00000000
C -1.57825490 -0.83595727 0.00000000
C -0.10078959 -5.76401042 0.00000000
C -2.66568392 -5.07794600 0.00000000
C 3.13177958 0.52435884 0.00000000
C -0.33744328 3.67065942 0.00000000
H 0.42296241 -7.73663229 0.00000000
H -4.11548210 -6.51452882 0.00000000
H 3.70204141 4.43045951 0.00000000
H -2.96601438 6.68621706 0.00000000
H 0.40817199 7.60076128 0.00000000
units bohr
""")
GEOS['%s-%s-reagent' % (dbse, 'water')] = qcdb.Molecule("""
0 1
O 0.00000000 -0.69801390 0.00000000
H 1.48150016 0.34900695 0.00000000
H -1.48150016 0.34900695 0.00000000
units bohr
""")
DATA = {}
DATA['NUCLEAR REPULSION ENERGY'] = {}
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-1_3_5_trifluorobenzene-reagent' ] = 422.92396136
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-1_3_5_trisilacyclohexane-reagent'] = 458.36587183
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-1_3_difluorobenzene-reagent' ] = 342.91092587
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-1_5_difluoronaphthalene-reagent' ] = 646.43123032
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-2_hydroxybicyclopentane-reagent' ] = 242.19428832
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-ACANIL01-reagent' ] = 482.21477925
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-acetone-reagent' ] = 117.95076939
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-acetylene-reagent' ] = 25.27722466
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-ACHTAR10-reagent' ] = 308.80224696
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-allene-reagent' ] = 58.87417679
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-ammonia-reagent' ] = 11.96515487
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-benzaldehyde-reagent' ] = 318.78609908
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-benzene-reagent' ] = 203.68596051
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-benzidine-reagent' ] = 792.45947768
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-caffeine-reagent' ] = 906.96430213
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-difuropyrazine-reagent' ] = 627.88695998
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-dimethylpentane-reagent' ] = 329.98386705
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-disilyl_ether-reagent' ] = 159.72016132
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-ethane-reagent' ] = 42.23178002
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-ethanol-reagent' ] = 81.36264622
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-furan-reagent' ] = 160.13552808
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-histidine-reagent' ] = 593.28835805
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-hydroxysulphane-reagent' ] = 61.30095938
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-menthone-reagent' ] = 661.81731171
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-mesityl_oxide-reagent' ] = 286.76670258
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-methylamine-reagent' ] = 42.02150992
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-naphthalene-reagent' ] = 460.06217417
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-neopentane-reagent' ] = 196.29453370
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-pterin-reagent' ] = 650.63929481
DATA['NUCLEAR REPULSION ENERGY']['BAKERJCC93-water-reagent' ] = 9.15711319
|
"""Tests for distutils.command.bdist."""
import os
import unittest
from test.support import run_unittest
from distutils.command.bdist import bdist
from distutils.tests import support
class BuildTestCase(support.TempdirManager,
unittest.TestCase):
def test_formats(self):
# let's create a command and make sure
# we can set the format
dist = self.create_dist()[1]
cmd = bdist(dist)
cmd.formats = ['msi']
cmd.ensure_finalized()
self.assertEqual(cmd.formats, ['msi'])
# what formats does bdist offer?
formats = ['bztar', 'gztar', 'msi', 'rpm', 'tar',
'wininst', 'zip', 'ztar']
found = sorted(cmd.format_command)
self.assertEqual(found, formats)
def test_skip_build(self):
# bug #10946: bdist --skip-build should trickle down to subcommands
dist = self.create_dist()[1]
cmd = bdist(dist)
cmd.skip_build = 1
cmd.ensure_finalized()
dist.command_obj['bdist'] = cmd
names = ['bdist_dumb', 'bdist_wininst'] # bdist_rpm does not support --skip-build
if os.name == 'nt':
names.append('bdist_msi')
for name in names:
subcmd = cmd.get_finalized_command(name)
self.assertTrue(subcmd.skip_build,
'%s should take --skip-build from bdist' % name)
def test_suite():
return unittest.makeSuite(BuildTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
|
"""check for signs of poor design"""
from collections import defaultdict
from astroid import If, BoolOp
from astroid import decorators
from pylint.interfaces import IAstroidChecker
from pylint.checkers import BaseChecker
from pylint.checkers.utils import check_messages
from pylint import utils
MSGS = {
'R0901': ('Too many ancestors (%s/%s)',
'too-many-ancestors',
'Used when class has too many parent classes, try to reduce \
this to get a simpler (and so easier to use) class.'),
'R0902': ('Too many instance attributes (%s/%s)',
'too-many-instance-attributes',
'Used when class has too many instance attributes, try to reduce \
this to get a simpler (and so easier to use) class.'),
'R0903': ('Too few public methods (%s/%s)',
'too-few-public-methods',
'Used when class has too few public methods, so be sure it\'s \
really worth it.'),
'R0904': ('Too many public methods (%s/%s)',
'too-many-public-methods',
'Used when class has too many public methods, try to reduce \
this to get a simpler (and so easier to use) class.'),
'R0911': ('Too many return statements (%s/%s)',
'too-many-return-statements',
'Used when a function or method has too many return statement, \
making it hard to follow.'),
'R0912': ('Too many branches (%s/%s)',
'too-many-branches',
'Used when a function or method has too many branches, \
making it hard to follow.'),
'R0913': ('Too many arguments (%s/%s)',
'too-many-arguments',
'Used when a function or method takes too many arguments.'),
'R0914': ('Too many local variables (%s/%s)',
'too-many-locals',
'Used when a function or method has too many local variables.'),
'R0915': ('Too many statements (%s/%s)',
'too-many-statements',
'Used when a function or method has too many statements. You \
should then split it in smaller functions / methods.'),
'R0916': ('Too many boolean expressions in if statement (%s/%s)',
'too-many-boolean-expressions',
'Used when a if statement contains too many boolean '
'expressions'),
}
def _count_boolean_expressions(bool_op):
"""Counts the number of boolean expressions in BoolOp `bool_op` (recursive)
example: a and (b or c or (d and e)) ==> 5 boolean expressions
"""
nb_bool_expr = 0
for bool_expr in bool_op.get_children():
if isinstance(bool_expr, BoolOp):
nb_bool_expr += _count_boolean_expressions(bool_expr)
else:
nb_bool_expr += 1
return nb_bool_expr
class MisdesignChecker(BaseChecker):
"""checks for sign of poor/misdesign:
* number of methods, attributes, local variables...
* size, complexity of functions, methods
"""
__implements__ = (IAstroidChecker,)
# configuration section name
name = 'design'
# messages
msgs = MSGS
priority = -2
# configuration options
options = (('max-args',
{'default' : 5, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of arguments for function / method'}
),
('max-locals',
{'default' : 15, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of locals for function / method body'}
),
('max-returns',
{'default' : 6, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of return / yield for function / '
'method body'}
),
('max-branches',
{'default' : 12, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of branch for function / method body'}
),
('max-statements',
{'default' : 50, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of statements in function / method '
'body'}
),
('max-parents',
{'default' : 7,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of parents for a class (see R0901).'}
),
('max-attributes',
{'default' : 7,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of attributes for a class \
(see R0902).'}
),
('min-public-methods',
{'default' : 2,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Minimum number of public methods for a class \
(see R0903).'}
),
('max-public-methods',
{'default' : 20,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of public methods for a class \
(see R0904).'}
),
('max-bool-expr',
{'default': 5,
'type': 'int',
'metavar': '<num>',
'help': 'Maximum number of boolean expressions in a if '
'statement'}
),
)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
self.stats = None
self._returns = None
self._branches = None
self._stmts = 0
def open(self):
"""initialize visit variables"""
self.stats = self.linter.add_stats()
self._returns = []
self._branches = defaultdict(int)
@decorators.cachedproperty
def _ignored_argument_names(self):
return utils.get_global_option(self, 'ignored-argument-names', default=None)
@check_messages('too-many-ancestors', 'too-many-instance-attributes',
'too-few-public-methods', 'too-many-public-methods')
def visit_classdef(self, node):
"""check size of inheritance hierarchy and number of instance attributes
"""
nb_parents = len(list(node.ancestors()))
if nb_parents > self.config.max_parents:
self.add_message('too-many-ancestors', node=node,
args=(nb_parents, self.config.max_parents))
if len(node.instance_attrs) > self.config.max_attributes:
self.add_message('too-many-instance-attributes', node=node,
args=(len(node.instance_attrs),
self.config.max_attributes))
@check_messages('too-few-public-methods', 'too-many-public-methods')
def leave_classdef(self, node):
"""check number of public methods"""
my_methods = sum(1 for method in node.mymethods()
if not method.name.startswith('_'))
all_methods = sum(1 for method in node.methods()
if not method.name.startswith('_'))
# Does the class contain less than n public methods ?
# This checks only the methods defined in the current class,
# since the user might not have control over the classes
# from the ancestors. It avoids some false positives
# for classes such as unittest.TestCase, which provides
# a lot of assert methods. It doesn't make sense to warn
# when the user subclasses TestCase to add his own tests.
if my_methods > self.config.max_public_methods:
self.add_message('too-many-public-methods', node=node,
args=(my_methods,
self.config.max_public_methods))
# stop here for exception, metaclass and interface classes
if node.type != 'class':
return
# Does the class contain more than n public methods ?
# This checks all the methods defined by ancestors and
# by the current class.
if all_methods < self.config.min_public_methods:
self.add_message('too-few-public-methods', node=node,
args=(all_methods,
self.config.min_public_methods))
@check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals',
'too-many-statements')
def visit_functiondef(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
"""
# init branch and returns counters
self._returns.append(0)
# check number of arguments
args = node.args.args
ignored_argument_names = self._ignored_argument_names
if args is not None:
ignored_args_num = 0
if ignored_argument_names:
ignored_args_num = sum(1 for arg in args if ignored_argument_names.match(arg.name))
argnum = len(args) - ignored_args_num
if argnum > self.config.max_args:
self.add_message('too-many-arguments', node=node,
args=(len(args), self.config.max_args))
else:
ignored_args_num = 0
# check number of local variables
locnum = len(node.locals) - ignored_args_num
if locnum > self.config.max_locals:
self.add_message('too-many-locals', node=node,
args=(locnum, self.config.max_locals))
# init statements counter
self._stmts = 1
visit_asyncfunctiondef = visit_functiondef
@check_messages('too-many-return-statements', 'too-many-branches',
'too-many-arguments', 'too-many-locals',
'too-many-statements')
def leave_functiondef(self, node):
"""most of the work is done here on close:
checks for max returns, branch, return in __init__
"""
returns = self._returns.pop()
if returns > self.config.max_returns:
self.add_message('too-many-return-statements', node=node,
args=(returns, self.config.max_returns))
branches = self._branches[node]
if branches > self.config.max_branches:
self.add_message('too-many-branches', node=node,
args=(branches, self.config.max_branches))
# check number of statements
if self._stmts > self.config.max_statements:
self.add_message('too-many-statements', node=node,
args=(self._stmts, self.config.max_statements))
leave_asyncfunctiondef = leave_functiondef
def visit_return(self, _):
"""count number of returns"""
if not self._returns:
return # return outside function, reported by the base checker
self._returns[-1] += 1
def visit_default(self, node):
"""default visit method -> increments the statements counter if
necessary
"""
if node.is_statement:
self._stmts += 1
def visit_tryexcept(self, node):
"""increments the branches counter"""
branches = len(node.handlers)
if node.orelse:
branches += 1
self._inc_branch(node, branches)
self._stmts += branches
def visit_tryfinally(self, node):
"""increments the branches counter"""
self._inc_branch(node, 2)
self._stmts += 2
@check_messages('too-many-boolean-expressions')
def visit_if(self, node):
"""increments the branches counter and checks boolean expressions"""
self._check_boolean_expressions(node)
branches = 1
# don't double count If nodes coming from some 'elif'
if node.orelse and (len(node.orelse) > 1 or
not isinstance(node.orelse[0], If)):
branches += 1
self._inc_branch(node, branches)
self._stmts += branches
def _check_boolean_expressions(self, node):
"""Go through "if" node `node` and counts its boolean expressions
if the "if" node test is a BoolOp node
"""
condition = node.test
if not isinstance(condition, BoolOp):
return
nb_bool_expr = _count_boolean_expressions(condition)
if nb_bool_expr > self.config.max_bool_expr:
self.add_message('too-many-boolean-expressions', node=condition,
args=(nb_bool_expr, self.config.max_bool_expr))
def visit_while(self, node):
"""increments the branches counter"""
branches = 1
if node.orelse:
branches += 1
self._inc_branch(node, branches)
visit_for = visit_while
def _inc_branch(self, node, branchesnum=1):
"""increments the branches counter"""
self._branches[node.scope()] += branchesnum
def register(linter):
"""required method to auto register this checker """
linter.register_checker(MisdesignChecker(linter))
|
data = (
' @ ', # 0x00
' ... ', # 0x01
', ', # 0x02
'. ', # 0x03
': ', # 0x04
' // ', # 0x05
'', # 0x06
'-', # 0x07
', ', # 0x08
'. ', # 0x09
'', # 0x0a
'', # 0x0b
'', # 0x0c
'', # 0x0d
'', # 0x0e
'[?]', # 0x0f
'0', # 0x10
'1', # 0x11
'2', # 0x12
'3', # 0x13
'4', # 0x14
'5', # 0x15
'6', # 0x16
'7', # 0x17
'8', # 0x18
'9', # 0x19
'[?]', # 0x1a
'[?]', # 0x1b
'[?]', # 0x1c
'[?]', # 0x1d
'[?]', # 0x1e
'[?]', # 0x1f
'a', # 0x20
'e', # 0x21
'i', # 0x22
'o', # 0x23
'u', # 0x24
'O', # 0x25
'U', # 0x26
'ee', # 0x27
'n', # 0x28
'ng', # 0x29
'b', # 0x2a
'p', # 0x2b
'q', # 0x2c
'g', # 0x2d
'm', # 0x2e
'l', # 0x2f
's', # 0x30
'sh', # 0x31
't', # 0x32
'd', # 0x33
'ch', # 0x34
'j', # 0x35
'y', # 0x36
'r', # 0x37
'w', # 0x38
'f', # 0x39
'k', # 0x3a
'kha', # 0x3b
'ts', # 0x3c
'z', # 0x3d
'h', # 0x3e
'zr', # 0x3f
'lh', # 0x40
'zh', # 0x41
'ch', # 0x42
'-', # 0x43
'e', # 0x44
'i', # 0x45
'o', # 0x46
'u', # 0x47
'O', # 0x48
'U', # 0x49
'ng', # 0x4a
'b', # 0x4b
'p', # 0x4c
'q', # 0x4d
'g', # 0x4e
'm', # 0x4f
't', # 0x50
'd', # 0x51
'ch', # 0x52
'j', # 0x53
'ts', # 0x54
'y', # 0x55
'w', # 0x56
'k', # 0x57
'g', # 0x58
'h', # 0x59
'jy', # 0x5a
'ny', # 0x5b
'dz', # 0x5c
'e', # 0x5d
'i', # 0x5e
'iy', # 0x5f
'U', # 0x60
'u', # 0x61
'ng', # 0x62
'k', # 0x63
'g', # 0x64
'h', # 0x65
'p', # 0x66
'sh', # 0x67
't', # 0x68
'd', # 0x69
'j', # 0x6a
'f', # 0x6b
'g', # 0x6c
'h', # 0x6d
'ts', # 0x6e
'z', # 0x6f
'r', # 0x70
'ch', # 0x71
'zh', # 0x72
'i', # 0x73
'k', # 0x74
'r', # 0x75
'f', # 0x76
'zh', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'H', # 0x81
'X', # 0x82
'W', # 0x83
'M', # 0x84
' 3 ', # 0x85
' 333 ', # 0x86
'a', # 0x87
'i', # 0x88
'k', # 0x89
'ng', # 0x8a
'c', # 0x8b
'tt', # 0x8c
'tth', # 0x8d
'dd', # 0x8e
'nn', # 0x8f
't', # 0x90
'd', # 0x91
'p', # 0x92
'ph', # 0x93
'ss', # 0x94
'zh', # 0x95
'z', # 0x96
'a', # 0x97
't', # 0x98
'zh', # 0x99
'gh', # 0x9a
'ng', # 0x9b
'c', # 0x9c
'jh', # 0x9d
'tta', # 0x9e
'ddh', # 0x9f
't', # 0xa0
'dh', # 0xa1
'ss', # 0xa2
'cy', # 0xa3
'zh', # 0xa4
'z', # 0xa5
'u', # 0xa6
'y', # 0xa7
'bh', # 0xa8
'\'', # 0xa9
'[?]', # 0xaa
'[?]', # 0xab
'[?]', # 0xac
'[?]', # 0xad
'[?]', # 0xae
'[?]', # 0xaf
'[?]', # 0xb0
'[?]', # 0xb1
'[?]', # 0xb2
'[?]', # 0xb3
'[?]', # 0xb4
'[?]', # 0xb5
'[?]', # 0xb6
'[?]', # 0xb7
'[?]', # 0xb8
'[?]', # 0xb9
'[?]', # 0xba
'[?]', # 0xbb
'[?]', # 0xbc
'[?]', # 0xbd
'[?]', # 0xbe
'[?]', # 0xbf
'[?]', # 0xc0
'[?]', # 0xc1
'[?]', # 0xc2
'[?]', # 0xc3
'[?]', # 0xc4
'[?]', # 0xc5
'[?]', # 0xc6
'[?]', # 0xc7
'[?]', # 0xc8
'[?]', # 0xc9
'[?]', # 0xca
'[?]', # 0xcb
'[?]', # 0xcc
'[?]', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'[?]', # 0xd5
'[?]', # 0xd6
'[?]', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'[?]', # 0xde
'[?]', # 0xdf
'[?]', # 0xe0
'[?]', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'[?]', # 0xe6
'[?]', # 0xe7
'[?]', # 0xe8
'[?]', # 0xe9
'[?]', # 0xea
'[?]', # 0xeb
'[?]', # 0xec
'[?]', # 0xed
'[?]', # 0xee
'[?]', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
|
from __future__ import division, print_function, absolute_import
from rep.estimators import XGBoostClassifier, XGBoostRegressor
from rep.test.test_estimators import check_classifier, check_regression, generate_classification_data
__author__ = 'Alex Rogozhnikov'
def test_xgboost():
check_classifier(XGBoostClassifier(), n_classes=2)
check_classifier(XGBoostClassifier(), n_classes=4)
check_regression(XGBoostRegressor())
def test_feature_importances():
clf = XGBoostClassifier()
X, y, sample_weight = generate_classification_data()
clf.fit(X, y, sample_weight=sample_weight)
# checking feature importance (three ways)
res_default = clf.xgboost_classifier.get_fscore()
res2 = clf._get_fscore()
res3 = clf.feature_importances_
assert res_default == res2, res_default
for i, val in enumerate(res3):
if val > 0.0:
assert val == res_default['f' + str(i)]
|
from airflow.providers.microsoft.azure.hooks.azure_cosmos import AzureCosmosDBHook
from airflow.sensors.base import BaseSensorOperator
class AzureCosmosDocumentSensor(BaseSensorOperator):
"""
Checks for the existence of a document which
matches the given query in CosmosDB. Example:
.. code-block::
azure_cosmos_sensor = AzureCosmosDocumentSensor(
database_name="somedatabase_name",
collection_name="somecollection_name",
document_id="unique-doc-id",
azure_cosmos_conn_id="azure_cosmos_default",
task_id="azure_cosmos_sensor")
:param database_name: Target CosmosDB database_name.
:type database_name: str
:param collection_name: Target CosmosDB collection_name.
:type collection_name: str
:param document_id: The ID of the target document.
:type document_id: str
:param azure_cosmos_conn_id: Reference to the
:ref:`Azure CosmosDB connection<howto/connection:azure_cosmos>`.
:type azure_cosmos_conn_id: str
"""
template_fields = ('database_name', 'collection_name', 'document_id')
def __init__(
self,
*,
database_name: str,
collection_name: str,
document_id: str,
azure_cosmos_conn_id: str = "azure_cosmos_default",
**kwargs,
) -> None:
super().__init__(**kwargs)
self.azure_cosmos_conn_id = azure_cosmos_conn_id
self.database_name = database_name
self.collection_name = collection_name
self.document_id = document_id
def poke(self, context: dict) -> bool:
self.log.info("*** Intering poke")
hook = AzureCosmosDBHook(self.azure_cosmos_conn_id)
return hook.get_document(self.document_id, self.database_name, self.collection_name) is not None
|
assert hex(unhex(hex(0xcafebabe))) == 'CAFEBABE'
assert unhex(hex(unhex('cafebabe'))) == 0xCAFEBABE
assert hex(unhex(hex(0xdecaf))) == '000DECAF'
assert unhex(hex(unhex('DECAF'))) == 0xDECAF
print 'OK'
exit()
|
"""This code example deletes content metadata key hierarchies.
To determine which content metadata key hierarchies exist, run
get_all_content_metadata_key_hierarchies.py.
This feature is only available to DFP video publishers.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: ContentMetadataKeyHierarchyService.getContentMetadataKeyHierarchiesByStatement
ContentMetadataKeyHierarchyService.perfomContentMetadataKeyHierarchyAction
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
from googleads import dfp
CONTENT_METADATA_KEY_HIERARCHY_ID = (
'INSERT_CONTENT_METADATA_KEY_HIERARCHY_ID_HERE')
def main(client, content_metadata_key_hierarchy_id):
# Initialize appropriate service.
content_metadata_key_hierarchy_service = client.GetService(
'ContentMetadataKeyHierarchyService', version='v201408')
# Create a query to select a single content metadata key hierarchy.
values = [{
'key': 'id',
'value': {
'xsi_type': 'NumberValue',
'value': content_metadata_key_hierarchy_id
}
}]
query = 'WHERE id = :id ORDER BY id ASC'
statement = dfp.FilterStatement(query, values, 1)
# Get a single content metadata key hierarchy by statement.
response = (content_metadata_key_hierarchy_service
.getContentMetadataKeyHierarchiesByStatement(
statement.ToStatement())[0])
content_metadata_key_hierarchies = (response['results']
if 'results' in response else None)
# Display results.
if content_metadata_key_hierarchies:
for content_metadata_key_hierarchy in content_metadata_key_hierarchies:
print ('Content metadata key hierarchy with ID \'%s\' and name \'%s\' '
'will be deleted.' % (content_metadata_key_hierarchy['id'],
content_metadata_key_hierarchy['name']))
# Perform action.
result = (content_metadata_key_hierarchy_service
.performContentMetadataKeyHierarchyAction(
{'type': 'DeleteContentMetadataKeyHierarchies'},
statement.ToStatement()))[0]
# Display results.
print ('Number of content metadata key hierarchies deleted: %s' %
result['numChanges'])
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, CONTENT_METADATA_KEY_HIERARCHY_ID)
|
from unittest import TestCase
from boundary import PluginList
from cli_test import CLITest
class PluginListTest(TestCase):
def setUp(self):
self.cli = PluginList()
def test_cli_description(self):
CLITest.check_description(self, self.cli)
def test_cli_help(self):
CLITest.check_cli_help(self, self.cli)
|
from __future__ import absolute_import, division, print_function, unicode_literals
from c7n.manager import resources
from c7n.query import QueryResourceManager, TypeInfo
@resources.register('waf')
class WAF(QueryResourceManager):
class resource_type(TypeInfo):
service = "waf"
enum_spec = ("list_web_acls", "WebACLs", None)
detail_spec = ("get_web_acl", "WebACLId", "WebACLId", "WebACL")
name = "Name"
id = "WebACLId"
dimension = "WebACL"
config_type = "AWS::WAF::WebACL"
arn_type = "webacl"
@resources.register('waf-regional')
class RegionalWAF(QueryResourceManager):
class resource_type(TypeInfo):
service = "waf-regional"
enum_spec = ("list_web_acls", "WebACLs", None)
detail_spec = ("get_web_acl", "WebACLId", "WebACLId", "WebACL")
name = "Name"
id = "WebACLId"
dimension = "WebACL"
config_type = "AWS::WAFRegional::WebACL"
arn_type = "webacl"
|
import uuid
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest.common.utils import data_utils
from tempest import test
class RolesNegativeTestJSON(base.BaseIdentityV2AdminTest):
def _get_role_params(self):
self.data.setup_test_user()
self.data.setup_test_role()
user = self.get_user_by_name(self.data.test_user)
tenant = self.get_tenant_by_name(self.data.test_tenant)
role = self.get_role_by_name(self.data.test_role)
return (user, tenant, role)
@test.attr(type=['negative'])
@test.idempotent_id('d5d5f1df-f8ca-4de0-b2ef-259c1cc67025')
def test_list_roles_by_unauthorized_user(self):
# Non-administrator user should not be able to list roles
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.list_roles)
@test.attr(type=['negative'])
@test.idempotent_id('11a3c7da-df6c-40c2-abc2-badd682edf9f')
def test_list_roles_request_without_token(self):
# Request to list roles without a valid token should fail
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized, self.client.list_roles)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('c0b89e56-accc-4c73-85f8-9c0f866104c1')
def test_role_create_blank_name(self):
# Should not be able to create a role with a blank name
self.assertRaises(lib_exc.BadRequest, self.client.create_role, '')
@test.attr(type=['negative'])
@test.idempotent_id('585c8998-a8a4-4641-a5dd-abef7a8ced00')
def test_create_role_by_unauthorized_user(self):
# Non-administrator user should not be able to create role
role_name = data_utils.rand_name(name='role')
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.create_role, role_name)
@test.attr(type=['negative'])
@test.idempotent_id('a7edd17a-e34a-4aab-8bb7-fa6f498645b8')
def test_create_role_request_without_token(self):
# Request to create role without a valid token should fail
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
role_name = data_utils.rand_name(name='role')
self.assertRaises(lib_exc.Unauthorized,
self.client.create_role, role_name)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('c0cde2c8-81c1-4bb0-8fe2-cf615a3547a8')
def test_role_create_duplicate(self):
# Role names should be unique
role_name = data_utils.rand_name(name='role-dup')
body = self.client.create_role(role_name)['role']
role1_id = body.get('id')
self.addCleanup(self.client.delete_role, role1_id)
self.assertRaises(lib_exc.Conflict, self.client.create_role,
role_name)
@test.attr(type=['negative'])
@test.idempotent_id('15347635-b5b1-4a87-a280-deb2bd6d865e')
def test_delete_role_by_unauthorized_user(self):
# Non-administrator user should not be able to delete role
role_name = data_utils.rand_name(name='role')
body = self.client.create_role(role_name)['role']
self.data.roles.append(body)
role_id = body.get('id')
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.delete_role, role_id)
@test.attr(type=['negative'])
@test.idempotent_id('44b60b20-70de-4dac-beaf-a3fc2650a16b')
def test_delete_role_request_without_token(self):
# Request to delete role without a valid token should fail
role_name = data_utils.rand_name(name='role')
body = self.client.create_role(role_name)['role']
self.data.roles.append(body)
role_id = body.get('id')
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized,
self.client.delete_role,
role_id)
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('38373691-8551-453a-b074-4260ad8298ef')
def test_delete_role_non_existent(self):
# Attempt to delete a non existent role should fail
non_existent_role = str(uuid.uuid4().hex)
self.assertRaises(lib_exc.NotFound, self.client.delete_role,
non_existent_role)
@test.attr(type=['negative'])
@test.idempotent_id('391df5cf-3ec3-46c9-bbe5-5cb58dd4dc41')
def test_assign_user_role_by_unauthorized_user(self):
# Non-administrator user should not be authorized to
# assign a role to user
(user, tenant, role) = self._get_role_params()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.assign_user_role,
tenant['id'], user['id'], role['id'])
@test.attr(type=['negative'])
@test.idempotent_id('f0d2683c-5603-4aee-95d7-21420e87cfd8')
def test_assign_user_role_request_without_token(self):
# Request to assign a role to a user without a valid token
(user, tenant, role) = self._get_role_params()
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized,
self.client.assign_user_role, tenant['id'],
user['id'], role['id'])
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('99b297f6-2b5d-47c7-97a9-8b6bb4f91042')
def test_assign_user_role_for_non_existent_role(self):
# Attempt to assign a non existent role to user should fail
(user, tenant, role) = self._get_role_params()
non_existent_role = str(uuid.uuid4().hex)
self.assertRaises(lib_exc.NotFound, self.client.assign_user_role,
tenant['id'], user['id'], non_existent_role)
@test.attr(type=['negative'])
@test.idempotent_id('b2285aaa-9e76-4704-93a9-7a8acd0a6c8f')
def test_assign_user_role_for_non_existent_tenant(self):
# Attempt to assign a role on a non existent tenant should fail
(user, tenant, role) = self._get_role_params()
non_existent_tenant = str(uuid.uuid4().hex)
self.assertRaises(lib_exc.NotFound, self.client.assign_user_role,
non_existent_tenant, user['id'], role['id'])
@test.attr(type=['negative'])
@test.idempotent_id('5c3132cd-c4c8-4402-b5ea-71eb44e97793')
def test_assign_duplicate_user_role(self):
# Duplicate user role should not get assigned
(user, tenant, role) = self._get_role_params()
self.client.assign_user_role(tenant['id'], user['id'], role['id'])
self.assertRaises(lib_exc.Conflict, self.client.assign_user_role,
tenant['id'], user['id'], role['id'])
@test.attr(type=['negative'])
@test.idempotent_id('d0537987-0977-448f-a435-904c15de7298')
def test_remove_user_role_by_unauthorized_user(self):
# Non-administrator user should not be authorized to
# remove a user's role
(user, tenant, role) = self._get_role_params()
self.client.assign_user_role(tenant['id'],
user['id'],
role['id'])
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.remove_user_role,
tenant['id'], user['id'], role['id'])
@test.attr(type=['negative'])
@test.idempotent_id('cac81cf4-c1d2-47dc-90d3-f2b7eb572286')
def test_remove_user_role_request_without_token(self):
# Request to remove a user's role without a valid token
(user, tenant, role) = self._get_role_params()
self.client.assign_user_role(tenant['id'],
user['id'],
role['id'])
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized,
self.client.remove_user_role, tenant['id'],
user['id'], role['id'])
self.client.auth_provider.clear_auth()
@test.attr(type=['negative'])
@test.idempotent_id('ab32d759-cd16-41f1-a86e-44405fa9f6d2')
def test_remove_user_role_non_existent_role(self):
# Attempt to delete a non existent role from a user should fail
(user, tenant, role) = self._get_role_params()
self.client.assign_user_role(tenant['id'],
user['id'],
role['id'])
non_existent_role = str(uuid.uuid4().hex)
self.assertRaises(lib_exc.NotFound, self.client.remove_user_role,
tenant['id'], user['id'], non_existent_role)
@test.attr(type=['negative'])
@test.idempotent_id('67a679ec-03dd-4551-bbfc-d1c93284f023')
def test_remove_user_role_non_existent_tenant(self):
# Attempt to remove a role from a non existent tenant should fail
(user, tenant, role) = self._get_role_params()
self.client.assign_user_role(tenant['id'],
user['id'],
role['id'])
non_existent_tenant = str(uuid.uuid4().hex)
self.assertRaises(lib_exc.NotFound, self.client.remove_user_role,
non_existent_tenant, user['id'], role['id'])
@test.attr(type=['negative'])
@test.idempotent_id('7391ab4c-06f3-477a-a64a-c8e55ce89837')
def test_list_user_roles_by_unauthorized_user(self):
# Non-administrator user should not be authorized to list
# a user's roles
(user, tenant, role) = self._get_role_params()
self.client.assign_user_role(tenant['id'], user['id'], role['id'])
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.list_user_roles, tenant['id'],
user['id'])
@test.attr(type=['negative'])
@test.idempotent_id('682adfb2-fd5f-4b0a-a9ca-322e9bebb907')
def test_list_user_roles_request_without_token(self):
# Request to list user's roles without a valid token should fail
(user, tenant, role) = self._get_role_params()
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
try:
self.assertRaises(lib_exc.Unauthorized,
self.client.list_user_roles, tenant['id'],
user['id'])
finally:
self.client.auth_provider.clear_auth()
|
"""Tests for the Ambiclimate config flow."""
from unittest.mock import AsyncMock, patch
import ambiclimate
from homeassistant import data_entry_flow
from homeassistant.components.ambiclimate import config_flow
from homeassistant.config import async_process_ha_core_config
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
from homeassistant.setup import async_setup_component
from homeassistant.util import aiohttp
async def init_config_flow(hass):
"""Init a configuration flow."""
await async_process_ha_core_config(
hass,
{"external_url": "https://example.com"},
)
await async_setup_component(hass, "http", {})
config_flow.register_flow_implementation(hass, "id", "secret")
flow = config_flow.AmbiclimateFlowHandler()
flow.hass = hass
return flow
async def test_abort_if_no_implementation_registered(hass):
"""Test we abort if no implementation is registered."""
flow = config_flow.AmbiclimateFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "missing_configuration"
async def test_abort_if_already_setup(hass):
"""Test we abort if Ambiclimate is already setup."""
flow = await init_config_flow(hass)
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
with patch.object(hass.config_entries, "async_entries", return_value=[{}]):
result = await flow.async_step_code()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_full_flow_implementation(hass):
"""Test registering an implementation and finishing flow works."""
config_flow.register_flow_implementation(hass, None, None)
flow = await init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "auth"
assert (
result["description_placeholders"]["cb_url"]
== "https://example.com/api/ambiclimate"
)
url = result["description_placeholders"]["authorization_url"]
assert "https://api.ambiclimate.com/oauth2/authorize" in url
assert "client_id=id" in url
assert "response_type=code" in url
assert "redirect_uri=https%3A%2F%2Fexample.com%2Fapi%2Fambiclimate" in url
with patch("ambiclimate.AmbiclimateOAuth.get_access_token", return_value="test"):
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Ambiclimate"
assert result["data"]["callback_url"] == "https://example.com/api/ambiclimate"
assert result["data"][CONF_CLIENT_SECRET] == "secret"
assert result["data"][CONF_CLIENT_ID] == "id"
with patch("ambiclimate.AmbiclimateOAuth.get_access_token", return_value=None):
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
with patch(
"ambiclimate.AmbiclimateOAuth.get_access_token",
side_effect=ambiclimate.AmbiclimateOauthError(),
):
result = await flow.async_step_code("123ABC")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
async def test_abort_invalid_code(hass):
"""Test if no code is given to step_code."""
config_flow.register_flow_implementation(hass, None, None)
flow = await init_config_flow(hass)
with patch("ambiclimate.AmbiclimateOAuth.get_access_token", return_value=None):
result = await flow.async_step_code("invalid")
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "access_token"
async def test_already_setup(hass):
"""Test when already setup."""
config_flow.register_flow_implementation(hass, None, None)
flow = await init_config_flow(hass)
with patch.object(hass.config_entries, "async_entries", return_value=True):
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_view(hass):
"""Test view."""
hass.config_entries.flow.async_init = AsyncMock()
request = aiohttp.MockRequest(
b"", query_string="code=test_code", mock_source="test"
)
request.app = {"hass": hass}
view = config_flow.AmbiclimateAuthCallbackView()
assert await view.get(request) == "OK!"
request = aiohttp.MockRequest(b"", query_string="", mock_source="test")
request.app = {"hass": hass}
view = config_flow.AmbiclimateAuthCallbackView()
assert await view.get(request) == "No code"
|
try:
import http.client as httplib
except ImportError:
import httplib
import socket
from contextlib import contextmanager
@contextmanager
def tcpip4_socket(host, port):
"""Open a TCP/IP4 socket to designated host/port."""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((host, port))
yield s
finally:
try:
s.shutdown(socket.SHUT_RDWR)
except socket.error:
pass
except OSError:
pass
finally:
s.close()
@contextmanager
def http_connection(host, port):
"""Open an HTTP connection to designated host/port."""
c = httplib.HTTPConnection(host, port)
try:
yield c
finally:
c.close()
|
import re
from datetime import datetime
from tower import ugettext_lazy as _
STATUS_NULL = 0 # No review type chosen yet, add-on is incomplete.
STATUS_UNREVIEWED = 1 # Waiting for prelim review.
STATUS_PENDING = 2 # Personas (lightweight themes) waiting for review.
STATUS_NOMINATED = 3 # Waiting for full review.
STATUS_PUBLIC = 4 # Fully reviewed.
STATUS_DISABLED = 5 # Rejected (single files) or disabled by Mozilla (addons).
_STATUS_LISTED = 6 # Deprecated. See bug 616242
STATUS_BETA = 7 # Beta file, only available on fully reviewed add-ons.
STATUS_LITE = 8 # Preliminary reviewed.
STATUS_LITE_AND_NOMINATED = 9 # Preliminary reviewed, waiting for full review.
STATUS_PURGATORY = 10 # A temporary home; bug 614686
STATUS_DELETED = 11 # Add-on has been deleted.
STATUS_REJECTED = 12 # This applies only to rejected personas.
STATUS_REVIEW_PENDING = 14 # Themes queue, reviewed, needs further action.
STATUS_BLOCKED = 15
STATUS_CHOICES_ADDON = {
STATUS_NULL: _(u'Incomplete'),
STATUS_UNREVIEWED: _(u'Awaiting Preliminary Review'),
STATUS_NOMINATED: _(u'Awaiting Full Review'),
STATUS_PUBLIC: _(u'Fully Reviewed'),
STATUS_DISABLED: _(u'Disabled by Mozilla'),
STATUS_LITE: _(u'Preliminarily Reviewed'),
STATUS_LITE_AND_NOMINATED: _(
u'Preliminarily Reviewed and Awaiting Full Review'),
STATUS_DELETED: _(u'Deleted'),
}
STATUS_CHOICES_PERSONA = {
STATUS_NULL: STATUS_CHOICES_ADDON[STATUS_NULL],
STATUS_PENDING: _(u'Pending approval'),
STATUS_PUBLIC: STATUS_CHOICES_ADDON[STATUS_PUBLIC],
STATUS_DISABLED: STATUS_CHOICES_ADDON[STATUS_DISABLED],
STATUS_DELETED: STATUS_CHOICES_ADDON[STATUS_DELETED],
STATUS_REJECTED: _(u'Rejected'),
# Approved, but the developer would like to put it public when they want.
STATUS_REVIEW_PENDING: _(u'Flagged for further review'),
}
STATUS_CHOICES_FILE = {
STATUS_UNREVIEWED: _(u'Awaiting Review'),
STATUS_PUBLIC: _(u'Fully Reviewed'),
STATUS_DISABLED: _(u'Disabled by Mozilla'),
STATUS_BETA: _(u'Beta'),
STATUS_LITE: _(u'Preliminarily Reviewed'),
}
STATUS_CHOICES_API = {
STATUS_NULL: 'incomplete',
STATUS_UNREVIEWED: 'unreviewed',
STATUS_PENDING: 'pending',
STATUS_NOMINATED: 'nominated',
STATUS_PUBLIC: 'public',
STATUS_DISABLED: 'disabled',
STATUS_BETA: 'beta',
STATUS_LITE: 'lite',
STATUS_LITE_AND_NOMINATED: 'lite-nominated',
STATUS_PURGATORY: 'purgatory',
STATUS_DELETED: 'deleted',
STATUS_REJECTED: 'rejected',
STATUS_REVIEW_PENDING: 'review-pending',
STATUS_BLOCKED: 'blocked',
}
STATUS_CHOICES_API_LOOKUP = {
'incomplete': STATUS_NULL,
'unreviewed': STATUS_UNREVIEWED,
'pending': STATUS_PENDING,
'nominated': STATUS_NOMINATED,
'public': STATUS_PUBLIC,
'disabled': STATUS_DISABLED,
'beta': STATUS_BETA,
'lite': STATUS_LITE,
'lite-nominated': STATUS_LITE_AND_NOMINATED,
'purgatory': STATUS_PURGATORY,
'deleted': STATUS_DELETED,
'rejected': STATUS_REJECTED,
'review-pending': STATUS_REVIEW_PENDING,
'blocked': STATUS_BLOCKED,
}
PUBLIC_IMMEDIATELY = None
PUBLIC_WAIT = datetime.max.replace(microsecond=0)
REVIEWED_STATUSES = (STATUS_LITE, STATUS_LITE_AND_NOMINATED, STATUS_PUBLIC)
UNREVIEWED_STATUSES = (STATUS_UNREVIEWED, STATUS_PENDING, STATUS_NOMINATED,
STATUS_PURGATORY)
VALID_STATUSES = (STATUS_UNREVIEWED, STATUS_PENDING, STATUS_NOMINATED,
STATUS_PUBLIC, STATUS_BETA, STATUS_LITE,
STATUS_LITE_AND_NOMINATED, STATUS_PURGATORY)
LISTED_STATUSES = tuple(st for st in VALID_STATUSES if st != STATUS_PENDING)
UNDER_REVIEW_STATUSES = (STATUS_UNREVIEWED, STATUS_NOMINATED,
STATUS_LITE_AND_NOMINATED)
LITE_STATUSES = (STATUS_LITE, STATUS_LITE_AND_NOMINATED)
MIRROR_STATUSES = (STATUS_PUBLIC, STATUS_BETA,
STATUS_LITE, STATUS_LITE_AND_NOMINATED)
FULL_REVIEW_STATUSES = [STATUS_NOMINATED, STATUS_LITE_AND_NOMINATED,
STATUS_PUBLIC]
PRELIM_REVIEW_STATUSES = [STATUS_UNREVIEWED, STATUS_LITE]
PREMIUM_STATUSES = (STATUS_NULL,) + UNDER_REVIEW_STATUSES
ADMIN_REVIEW_FULL = 1
ADMIN_REVIEW_PRELIM = 2
ADMIN_REVIEW_TYPES = {
ADMIN_REVIEW_FULL: _(u'Full'),
ADMIN_REVIEW_PRELIM: _(u'Preliminary'),
}
AUTHOR_ROLE_VIEWER = 1
AUTHOR_ROLE_DEV = 4
AUTHOR_ROLE_OWNER = 5
AUTHOR_ROLE_SUPPORT = 6
AUTHOR_CHOICES = (
(AUTHOR_ROLE_OWNER, _(u'Owner')),
(AUTHOR_ROLE_DEV, _(u'Developer')),
(AUTHOR_ROLE_VIEWER, _(u'Viewer')),
(AUTHOR_ROLE_SUPPORT, _(u'Support')),
)
ADDON_ANY = 0
ADDON_EXTENSION = 1
ADDON_THEME = 2
ADDON_DICT = 3
ADDON_SEARCH = 4
ADDON_LPAPP = 5
ADDON_LPADDON = 6
ADDON_PLUGIN = 7
ADDON_API = 8 # not actually a type but used to identify extensions + themes
ADDON_PERSONA = 9
GROUP_TYPE_ADDON = [ADDON_EXTENSION, ADDON_DICT, ADDON_SEARCH, ADDON_LPAPP,
ADDON_LPADDON, ADDON_PLUGIN, ADDON_API]
GROUP_TYPE_THEME = [ADDON_THEME, ADDON_PERSONA]
ADDON_TYPE = {
ADDON_ANY: _(u'Any'),
ADDON_EXTENSION: _(u'Extension'),
ADDON_THEME: _(u'Complete Theme'),
ADDON_DICT: _(u'Dictionary'),
ADDON_SEARCH: _(u'Search Engine'),
ADDON_LPAPP: _(u'Language Pack (Application)'),
ADDON_LPADDON: _(u'Language Pack (Add-on)'),
ADDON_PLUGIN: _(u'Plugin'),
ADDON_PERSONA: _(u'Theme'),
}
ADDON_TYPES = {
ADDON_ANY: _(u'Any'),
ADDON_EXTENSION: _(u'Extensions'),
ADDON_THEME: _(u'Complete Themes'),
ADDON_DICT: _(u'Dictionaries'),
ADDON_SEARCH: _(u'Search Tools'),
ADDON_LPAPP: _(u'Language Packs (Application)'),
ADDON_LPADDON: _(u'Language Packs (Add-on)'),
ADDON_PLUGIN: _(u'Plugins'),
ADDON_PERSONA: _(u'Themes'),
}
ADDON_SEARCH_TYPES = [
ADDON_ANY,
ADDON_EXTENSION,
ADDON_THEME,
ADDON_DICT,
ADDON_SEARCH,
ADDON_LPAPP,
ADDON_PERSONA,
]
ADDON_ADMIN_SEARCH_TYPES = ADDON_SEARCH_TYPES + [ADDON_PLUGIN]
ADDON_ICONS = {
ADDON_ANY: 'default-addon.png',
ADDON_THEME: 'default-theme.png',
}
ADDON_SLUGS = {
ADDON_EXTENSION: 'extensions',
ADDON_THEME: 'themes',
ADDON_DICT: 'language-tools',
ADDON_LPAPP: 'language-tools',
ADDON_PERSONA: 'personas',
ADDON_SEARCH: 'search-tools',
}
ADDON_SLUGS_UPDATE = {
ADDON_EXTENSION: 'extension',
ADDON_THEME: 'theme',
ADDON_DICT: 'extension',
ADDON_SEARCH: 'search',
ADDON_LPAPP: 'item',
ADDON_LPADDON: 'extension',
ADDON_PERSONA: 'background-theme',
ADDON_PLUGIN: 'plugin',
}
ADDON_SEARCH_SLUGS = {
'any': ADDON_ANY,
'extension': ADDON_EXTENSION,
'theme': ADDON_THEME,
'dictionary': ADDON_DICT,
'search': ADDON_SEARCH,
'language': ADDON_LPAPP,
'persona': ADDON_PERSONA,
}
ADDON_FREE = 0
ADDON_PREMIUM = 1
ADDON_PREMIUM_INAPP = 2
ADDON_FREE_INAPP = 3
ADDON_OTHER_INAPP = 4
ADDON_PREMIUM_TYPES = {
ADDON_FREE: _('Free'),
ADDON_PREMIUM: _('Premium'),
ADDON_PREMIUM_INAPP: _('Premium with in-app payments'),
ADDON_FREE_INAPP: _('Free with in-app payments'),
ADDON_OTHER_INAPP: _("I'll use my own system for in-app payments")
}
ADDON_PREMIUM_API = {
ADDON_FREE: 'free',
ADDON_PREMIUM: 'premium',
ADDON_PREMIUM_INAPP: 'premium-inapp',
ADDON_FREE_INAPP: 'free-inapp',
ADDON_OTHER_INAPP: 'other',
}
ADDON_PREMIUM_API_LOOKUP = dict((v, k) for k, v in ADDON_PREMIUM_API.items())
ADDON_PREMIUMS = (ADDON_PREMIUM, ADDON_PREMIUM_INAPP)
ADDON_FREES = (ADDON_FREE, ADDON_FREE_INAPP, ADDON_OTHER_INAPP)
ADDON_INAPPS = (ADDON_PREMIUM_INAPP, ADDON_FREE_INAPP)
ADDON_BECOME_PREMIUM = (ADDON_EXTENSION, ADDON_THEME, ADDON_DICT, ADDON_LPAPP)
ADDON_HAS_PAYMENTS = (ADDON_FREE_INAPP, ADDON_PREMIUM, ADDON_PREMIUM_INAPP)
MAX_TAGS = 20
MIN_TAG_LENGTH = 2
MAX_CATEGORIES = 2
ADDON_ICON_SIZES = [32, 48, 64, 128, 256, 512]
ADDON_PREVIEW_SIZES = [(200, 150), (700, 525)]
PERSONA_IMAGE_SIZES = {
'header': [(680, 100), (3000, 200)],
'footer': [None, (3000, 100)],
'icon': [None, (32, 32)],
}
IMG_TYPES = ('image/png', 'image/jpeg', 'image/jpg')
VIDEO_TYPES = ('video/webm',)
NO_COMPAT = (ADDON_SEARCH, ADDON_PERSONA)
HAS_COMPAT = dict((t, t not in NO_COMPAT) for t in ADDON_TYPES)
CONTRIB_NONE = 0
CONTRIB_PASSIVE = 1
CONTRIB_AFTER = 2
CONTRIB_ROADBLOCK = 3
CONTRIB_CHOICES = (
(CONTRIB_PASSIVE,
_(u"Only ask on this add-on's page and developer profile")),
(CONTRIB_AFTER, _(u"Ask after users start downloading this add-on")),
(CONTRIB_ROADBLOCK, _(u"Ask before users can download this add-on")),
)
PERSONAS_ADDON_ID = 10900 # Add-on ID of the Personas Plus Add-on
PERSONAS_FIREFOX_MIN = '3.6' # First Firefox version to support Personas
PERSONAS_THUNDERBIRD_MIN = '3.1' # Ditto for Thunderbird
COLLECTION_NORMAL = 0
COLLECTION_SYNCHRONIZED = 1
COLLECTION_FEATURED = 2
COLLECTION_RECOMMENDED = 3
COLLECTION_FAVORITES = 4
COLLECTION_MOBILE = 5
COLLECTION_ANONYMOUS = 6
COLLECTIONS_NO_CONTRIB = (COLLECTION_SYNCHRONIZED, COLLECTION_FAVORITES)
COLLECTION_SPECIAL_SLUGS = {
COLLECTION_MOBILE: 'mobile',
COLLECTION_FAVORITES: 'favorites',
}
COLLECTION_CHOICES = {
COLLECTION_NORMAL: 'Normal',
COLLECTION_SYNCHRONIZED: 'Synchronized',
COLLECTION_FEATURED: 'Featured',
COLLECTION_RECOMMENDED: 'Generated Recommendations',
COLLECTION_FAVORITES: 'Favorites',
COLLECTION_MOBILE: 'Mobile',
COLLECTION_ANONYMOUS: 'Anonymous',
}
COLLECTION_SEARCH_CHOICES = [
COLLECTION_NORMAL,
COLLECTION_FEATURED,
COLLECTION_RECOMMENDED,
COLLECTION_MOBILE,
COLLECTION_ANONYMOUS,
]
COLLECTION_ROLE_PUBLISHER = 0
COLLECTION_ROLE_ADMIN = 1
COLLECTION_AUTHOR_CHOICES = {
COLLECTION_ROLE_PUBLISHER: 'Publisher',
COLLECTION_ROLE_ADMIN: 'Admin',
}
VALIDATOR_SKELETON_RESULTS = {
"errors": 0,
"warnings": 0,
"notices": 0,
"success": True,
"compatibility_summary": {"notices": 0, "errors": 0, "warnings": 0},
"signing_summary": {"high": 0, "medium": 0, "trivial": 0, "low": 0},
"metadata": {"requires_chrome": False, "listed": True},
"messages": [],
"message_tree": {},
"detected_type": "extension",
"ending_tier": 5,
}
VALIDATOR_SKELETON_EXCEPTION = {
"errors": 1,
"warnings": 0,
"notices": 0,
"success": True,
"compatibility_summary": {"notices": 0, "errors": 0, "warnings": 0},
"signing_summary": {"high": 0, "medium": 0, "trivial": 0, "low": 0},
"metadata": {"requires_chrome": False, "listed": True},
"messages": [
{"id": ["validator", "unexpected_exception"],
"message": "An unexpected error has occurred.",
"description": [
"Validation was unable to complete successfully due to an "
"unexpected error.",
"The error has been logged, but please consider filing an issue "
"report here: http://mzl.la/1DG0sFd"],
"type": "error",
"tier": 1,
"for_appversions": None,
"uid": "35432f419340461897aa8362398339c4"}
],
"message_tree": {},
"detected_type": "extension",
"ending_tier": 5,
"passed_auto_validation": False,
}
FOUNDATION_ORG = 1 # The charities.id of the Mozilla Foundation.
VERSION_BETA = re.compile(r"""(a|alpha|b|beta|pre|rc) # Either of these
(([\.-]\d)?\d*) # followed by nothing
$ # or 123 or .123 or -123
""", re.VERBOSE)
VERSION_SEARCH = re.compile('\.(\d+)$')
EDITOR_VIEWING_INTERVAL = 8 # How often we ping for "who's watching?"
SITE_EVENT_OTHER = 1
SITE_EVENT_EXCEPTION = 2
SITE_EVENT_RELEASE = 3
SITE_EVENT_CHANGE = 4
SITE_EVENT_CHOICES = {
SITE_EVENT_OTHER: _('Other'),
SITE_EVENT_EXCEPTION: _('Exception'),
SITE_EVENT_RELEASE: _('Release'),
SITE_EVENT_CHANGE: _('Change'),
}
CANNED_RESPONSE_ADDON = 1
CANNED_RESPONSE_APP = 2
CANNED_RESPONSE_PERSONA = 3
CANNED_RESPONSE_CHOICES = {
CANNED_RESPONSE_ADDON: _('Add-on'),
CANNED_RESPONSE_APP: _('App'),
CANNED_RESPONSE_PERSONA: _('Persona'),
}
ADDON_ID = r"""(?P<addon_id>[^/<>"']+)"""
ADDON_UUID = r'(?P<uuid>[\w]{8}-[\w]{4}-[\w]{4}-[\w]{4}-[\w]{12})'
REVIEWED_MANUAL = 0
REVIEWED_ADDON_FULL = 10
REVIEWED_ADDON_PRELIM = 11
REVIEWED_ADDON_UPDATE = 12
REVIEWED_DICT_FULL = 20
REVIEWED_DICT_PRELIM = 21
REVIEWED_DICT_UPDATE = 22
REVIEWED_LP_FULL = 30
REVIEWED_LP_PRELIM = 31
REVIEWED_LP_UPDATE = 32
REVIEWED_OVERDUE_BONUS = 2
REVIEWED_OVERDUE_LIMIT = 7
REVIEWED_PERSONA = 40
REVIEWED_SEARCH_FULL = 50
REVIEWED_SEARCH_PRELIM = 51
REVIEWED_SEARCH_UPDATE = 52
REVIEWED_THEME_FULL = 60
REVIEWED_THEME_PRELIM = 61
REVIEWED_THEME_UPDATE = 62
REVIEWED_ADDON_REVIEW = 80
REVIEWED_ADDON_REVIEW_POORLY = 81
REVIEWED_CHOICES = {
REVIEWED_MANUAL: _('Manual Reviewer Points'),
REVIEWED_ADDON_FULL: _('Full Add-on Review'),
REVIEWED_ADDON_PRELIM: _('Preliminary Add-on Review'),
REVIEWED_ADDON_UPDATE: _('Updated Add-on Review'),
REVIEWED_DICT_FULL: _('Full Dictionary Review'),
REVIEWED_DICT_PRELIM: _('Preliminary Dictionary Review'),
REVIEWED_DICT_UPDATE: _('Updated Dictionary Review'),
REVIEWED_LP_FULL: _('Full Language Pack Review'),
REVIEWED_LP_PRELIM: _('Preliminary Language Pack Review'),
REVIEWED_LP_UPDATE: _('Updated Language Pack Review'),
REVIEWED_OVERDUE_BONUS: _('Bonus for overdue reviews'),
REVIEWED_OVERDUE_LIMIT: _('Days Before Bonus Points Applied'),
REVIEWED_PERSONA: _('Theme Review'),
REVIEWED_SEARCH_FULL: _('Full Search Provider Review'),
REVIEWED_SEARCH_PRELIM: _('Preliminary Search Provider Review'),
REVIEWED_SEARCH_UPDATE: _('Updated Search Provider Review'),
REVIEWED_THEME_FULL: _('Complete Theme Review'),
REVIEWED_THEME_PRELIM: _('Preliminary Complete Theme Review'),
REVIEWED_THEME_UPDATE: _('Updated Complete Theme Review'),
REVIEWED_ADDON_REVIEW: _('Moderated Addon Review'),
REVIEWED_ADDON_REVIEW_POORLY: _('Addon Review Moderation Reverted'),
}
REVIEWED_SCORES = {
REVIEWED_MANUAL: 0,
REVIEWED_ADDON_FULL: 120,
REVIEWED_ADDON_PRELIM: 60,
REVIEWED_ADDON_UPDATE: 80,
REVIEWED_DICT_FULL: 60,
REVIEWED_DICT_PRELIM: 20,
REVIEWED_DICT_UPDATE: 60,
REVIEWED_LP_FULL: 60,
REVIEWED_LP_PRELIM: 20,
REVIEWED_LP_UPDATE: 60,
REVIEWED_OVERDUE_BONUS: 2,
REVIEWED_OVERDUE_LIMIT: 7,
REVIEWED_PERSONA: 5,
REVIEWED_SEARCH_FULL: 30,
REVIEWED_SEARCH_PRELIM: 10,
REVIEWED_SEARCH_UPDATE: 30,
REVIEWED_THEME_FULL: 80,
REVIEWED_THEME_PRELIM: 40,
REVIEWED_THEME_UPDATE: 80,
REVIEWED_ADDON_REVIEW: 1,
REVIEWED_ADDON_REVIEW_POORLY: -1, # -REVIEWED_ADDON_REVIEW
}
REVIEWED_AMO = (
REVIEWED_ADDON_FULL,
REVIEWED_ADDON_PRELIM,
REVIEWED_ADDON_UPDATE,
REVIEWED_DICT_FULL,
REVIEWED_DICT_PRELIM,
REVIEWED_DICT_UPDATE,
REVIEWED_LP_FULL,
REVIEWED_LP_PRELIM,
REVIEWED_LP_UPDATE,
REVIEWED_SEARCH_FULL,
REVIEWED_SEARCH_PRELIM,
REVIEWED_SEARCH_UPDATE,
REVIEWED_THEME_FULL,
REVIEWED_THEME_PRELIM,
REVIEWED_THEME_UPDATE,
REVIEWED_ADDON_REVIEW,
)
REVIEWED_LEVELS = [
{'name': _('Level 1'), 'points': 2160},
{'name': _('Level 2'), 'points': 4320},
{'name': _('Level 3'), 'points': 8700},
{'name': _('Level 4'), 'points': 21000},
{'name': _('Level 5'), 'points': 45000},
{'name': _('Level 6'), 'points': 96000},
{'name': _('Level 7'), 'points': 300000},
{'name': _('Level 8'), 'points': 1200000},
{'name': _('Level 9'), 'points': 3000000},
]
UNSIGNED = 0
SIGNED_FULL = 1
SIGNED_PRELIM = 2
SIGNED_CHOICES = {
UNSIGNED: _(u'Not signed'),
SIGNED_FULL: _(u'Signed for a full review'),
SIGNED_PRELIM: _(u'Signed of a preliminary review'),
}
|
from __future__ import absolute_import, division, print_function
from .common import Benchmark
import numpy as np
from numpy.random import RandomState
try:
from numpy.random import Generator
except ImportError:
pass
class Random(Benchmark):
params = ['normal', 'uniform', 'weibull 1', 'binomial 10 0.5',
'poisson 10']
def setup(self, name):
items = name.split()
name = items.pop(0)
params = [float(x) for x in items]
self.func = getattr(np.random, name)
self.params = tuple(params) + ((100, 100),)
def time_rng(self, name):
self.func(*self.params)
class Shuffle(Benchmark):
def setup(self):
self.a = np.arange(100000)
def time_100000(self):
np.random.shuffle(self.a)
class Randint(Benchmark):
def time_randint_fast(self):
"""Compare to uint32 below"""
np.random.randint(0, 2**30, size=10**5)
def time_randint_slow(self):
"""Compare to uint32 below"""
np.random.randint(0, 2**30 + 1, size=10**5)
class Randint_dtype(Benchmark):
high = {
'bool': 1,
'uint8': 2**7,
'uint16': 2**15,
'uint32': 2**31,
'uint64': 2**63
}
param_names = ['dtype']
params = ['bool', 'uint8', 'uint16', 'uint32', 'uint64']
def setup(self, name):
from numpy.lib import NumpyVersion
if NumpyVersion(np.__version__) < '1.11.0.dev0':
raise NotImplementedError
def time_randint_fast(self, name):
high = self.high[name]
np.random.randint(0, high, size=10**5, dtype=name)
def time_randint_slow(self, name):
high = self.high[name]
np.random.randint(0, high + 1, size=10**5, dtype=name)
class Permutation(Benchmark):
def setup(self):
self.n = 10000
self.a_1d = np.random.random(self.n)
self.a_2d = np.random.random((self.n, 2))
def time_permutation_1d(self):
np.random.permutation(self.a_1d)
def time_permutation_2d(self):
np.random.permutation(self.a_2d)
def time_permutation_int(self):
np.random.permutation(self.n)
nom_size = 100000
class RNG(Benchmark):
param_names = ['rng']
params = ['PCG64', 'MT19937', 'Philox', 'SFC64', 'numpy']
def setup(self, bitgen):
if bitgen == 'numpy':
self.rg = np.random.RandomState()
else:
self.rg = Generator(getattr(np.random, bitgen)())
self.rg.random()
self.int32info = np.iinfo(np.int32)
self.uint32info = np.iinfo(np.uint32)
self.uint64info = np.iinfo(np.uint64)
def time_raw(self, bitgen):
if bitgen == 'numpy':
self.rg.random_integers(self.int32info.max, size=nom_size)
else:
self.rg.integers(self.int32info.max, size=nom_size, endpoint=True)
def time_32bit(self, bitgen):
min, max = self.uint32info.min, self.uint32info.max
if bitgen == 'numpy':
self.rg.randint(min, max + 1, nom_size, dtype=np.uint32)
else:
self.rg.integers(min, max + 1, nom_size, dtype=np.uint32)
def time_64bit(self, bitgen):
min, max = self.uint64info.min, self.uint64info.max
if bitgen == 'numpy':
self.rg.randint(min, max + 1, nom_size, dtype=np.uint64)
else:
self.rg.integers(min, max + 1, nom_size, dtype=np.uint64)
def time_normal_zig(self, bitgen):
self.rg.standard_normal(nom_size)
class Bounded(Benchmark):
u8 = np.uint8
u16 = np.uint16
u32 = np.uint32
u64 = np.uint64
param_names = ['rng', 'dt_max']
params = [['PCG64', 'MT19937', 'Philox', 'SFC64', 'numpy'],
[[u8, 95],
[u8, 64], # Worst case for legacy
[u8, 127], # Best case for legacy
[u16, 95],
[u16, 1024], # Worst case for legacy
[u16, 1535], # Typ. avg. case for legacy
[u16, 2047], # Best case for legacy
[u32, 1024], # Worst case for legacy
[u32, 1535], # Typ. avg. case for legacy
[u32, 2047], # Best case for legacy
[u64, 95],
[u64, 1024], # Worst case for legacy
[u64, 1535], # Typ. avg. case for legacy
[u64, 2047], # Best case for legacy
]]
def setup(self, bitgen, args):
if bitgen == 'numpy':
self.rg = np.random.RandomState()
else:
self.rg = Generator(getattr(np.random, bitgen)())
self.rg.random()
def time_bounded(self, bitgen, args):
"""
Timer for 8-bit bounded values.
Parameters (packed as args)
----------
dt : {uint8, uint16, uint32, unit64}
output dtype
max : int
Upper bound for range. Lower is always 0. Must be <= 2**bits.
"""
dt, max = args
if bitgen == 'numpy':
self.rg.randint(0, max + 1, nom_size, dtype=dt)
else:
self.rg.integers(0, max + 1, nom_size, dtype=dt)
class Choice(Benchmark):
params = [1e3, 1e6, 1e8]
def setup(self, v):
self.a = np.arange(v)
self.rng = np.random.default_rng()
def time_legacy_choice(self, v):
np.random.choice(self.a, 1000, replace=False)
def time_choice(self, v):
self.rng.choice(self.a, 1000, replace=False)
|
"""
This module defines :class:`DataObject`, the abstract base class
used by all :module:`neo.core` classes that can contain data (i.e. are not container classes).
It contains basic functionality that is shared among all those data objects.
"""
from copy import deepcopy
import warnings
import quantities as pq
import numpy as np
from neo.core.baseneo import BaseNeo, _check_annotations
def _normalize_array_annotations(value, length):
"""Check consistency of array annotations
Recursively check that value is either an array or list containing only "simple" types
(number, string, date/time) or is a dict of those.
Args:
:value: (np.ndarray, list or dict) value to be checked for consistency
:length: (int) required length of the array annotation
Returns:
np.ndarray The array_annotations from value in correct form
Raises:
ValueError: In case value is not accepted as array_annotation(s)
"""
# First stage, resolve dict of annotations into single annotations
if isinstance(value, dict):
for key in value.keys():
if isinstance(value[key], dict):
raise ValueError("Nested dicts are not allowed as array annotations")
value[key] = _normalize_array_annotations(value[key], length)
elif value is None:
raise ValueError("Array annotations must not be None")
# If not array annotation, pass on to regular check and make it a list, that is checked again
# This covers array annotations with length 1
elif not isinstance(value, (list, np.ndarray)) or (
isinstance(value, pq.Quantity) and value.shape == ()):
_check_annotations(value)
value = _normalize_array_annotations(np.array([value]), length)
# If array annotation, check for correct length, only single dimension and allowed data
else:
# Get length that is required for array annotations, which is equal to the length
# of the object's data
own_length = length
# Escape check if empty array or list and just annotate an empty array (length 0)
# This enables the user to easily create dummy array annotations that will be filled
# with data later on
if len(value) == 0:
if not isinstance(value, np.ndarray):
value = np.ndarray((0,))
val_length = own_length
else:
# Note: len(o) also works for np.ndarray, it then uses the first dimension,
# which is exactly the desired behaviour here
val_length = len(value)
if not own_length == val_length:
raise ValueError(
"Incorrect length of array annotation: {} != {}".format(val_length, own_length))
# Local function used to check single elements of a list or an array
# They must not be lists or arrays and fit the usual annotation data types
def _check_single_elem(element):
# Nested array annotations not allowed currently
# If element is a list or a np.ndarray, it's not conform except if it's a quantity of
# length 1
if isinstance(element, list) or (isinstance(element, np.ndarray) and not (
isinstance(element, pq.Quantity) and (
element.shape == () or element.shape == (1,)))):
raise ValueError("Array annotations should only be 1-dimensional")
if isinstance(element, dict):
raise ValueError("Dictionaries are not supported as array annotations")
# Perform regular check for elements of array or list
_check_annotations(element)
# Arrays only need testing of single element to make sure the others are the same
if isinstance(value, np.ndarray):
# Type of first element is representative for all others
# Thus just performing a check on the first element is enough
# Even if it's a pq.Quantity, which can be scalar or array, this is still true
# Because a np.ndarray cannot contain scalars and sequences simultaneously
# If length of data is 0, then nothing needs to be checked
if len(value):
# Perform check on first element
_check_single_elem(value[0])
return value
# In case of list, it needs to be ensured that all data are of the same type
else:
# Conversion to numpy array makes all elements same type
# Converts elements to most general type
try:
value = np.array(value)
# Except when scalar and non-scalar values are mixed, this causes conversion to fail
except ValueError as e:
msg = str(e)
if "setting an array element with a sequence." in msg:
raise ValueError("Scalar values and arrays/lists cannot be "
"combined into a single array annotation")
else:
raise e
# If most specialized data type that possibly fits all elements is object,
# raise an Error with a telling error message, because this means the elements
# are not compatible
if value.dtype == object:
raise ValueError("Cannot convert list of incompatible types into a single"
" array annotation")
# Check the first element for correctness
# If its type is correct for annotations, all others are correct as well
# Note: Emtpy lists cannot reach this point
_check_single_elem(value[0])
return value
class DataObject(BaseNeo, pq.Quantity):
'''
This is the base class from which all objects containing data inherit
It contains common functionality for all those objects and handles array_annotations.
Common functionality that is not included in BaseNeo includes:
- duplicating with new data
- rescaling the object
- copying the object
- returning it as pq.Quantity or np.ndarray
- handling of array_annotations
Array_annotations are a kind of annotation that contains metadata for every data point,
i.e. per timestamp (in SpikeTrain, Event and Epoch) or signal channel (in AnalogSignal
and IrregularlySampledSignal).
They can contain the same data types as regular annotations, but are always represented
as numpy arrays of the same length as the number of data points of the annotated neo object.
Args:
name (str, optional): Name of the Neo object
description (str, optional): Human readable string description of the Neo object
file_origin (str, optional): Origin of the data contained in this Neo object
array_annotations (dict, optional): Dictionary containing arrays / lists which annotate
individual data points of the Neo object.
kwargs: regular annotations stored in a separate annotation dictionary
'''
def __init__(self, name=None, description=None, file_origin=None, array_annotations=None,
**annotations):
"""
This method is called by each data object and initializes the newly created object by
adding array annotations and calling __init__ of the super class, where more annotations
and attributes are processed.
"""
if not hasattr(self, 'array_annotations') or not self.array_annotations:
self.array_annotations = ArrayDict(self._get_arr_ann_length())
if array_annotations is not None:
self.array_annotate(**array_annotations)
BaseNeo.__init__(self, name=name, description=description, file_origin=file_origin,
**annotations)
def array_annotate(self, **array_annotations):
"""
Add array annotations (annotations for individual data points) as arrays to a Neo data
object.
Example:
>>> obj.array_annotate(code=['a', 'b', 'a'], category=[2, 1, 1])
>>> obj.array_annotations['code'][1]
'b'
"""
self.array_annotations.update(array_annotations)
def array_annotations_at_index(self, index):
"""
Return dictionary of array annotations at a given index or list of indices
:param index: int, list, numpy array: The index (indices) from which the annotations
are extracted
:return: dictionary of values or numpy arrays containing all array annotations
for given index/indices
Example:
>>> obj.array_annotate(code=['a', 'b', 'a'], category=[2, 1, 1])
>>> obj.array_annotations_at_index(1)
{code='b', category=1}
"""
# Taking only a part of the array annotations
# Thus not using ArrayDict here, because checks for length are not needed
index_annotations = {}
# Use what is given as an index to determine the corresponding annotations,
# if not possible, numpy raises an Error
for ann in self.array_annotations.keys():
# NO deepcopy, because someone might want to alter the actual object using this
try:
index_annotations[ann] = self.array_annotations[ann][index]
except IndexError as e:
# IndexError caused by 'dummy' array annotations should not result in failure
# Taking a slice from nothing results in nothing
if len(self.array_annotations[ann]) == 0 and not self._get_arr_ann_length() == 0:
index_annotations[ann] = self.array_annotations[ann]
else:
raise e
return index_annotations
def _merge_array_annotations(self, other):
'''
Merges array annotations of 2 different objects.
The merge happens in such a way that the result fits the merged data
In general this means concatenating the arrays from the 2 objects.
If an annotation is only present in one of the objects, it will be omitted
:return Merged array_annotations
'''
merged_array_annotations = {}
omitted_keys_self = []
# Concatenating arrays for each key
for key in self.array_annotations:
try:
value = deepcopy(self.array_annotations[key])
other_value = deepcopy(other.array_annotations[key])
# Quantities need to be rescaled to common unit
if isinstance(value, pq.Quantity):
try:
other_value = other_value.rescale(value.units)
except ValueError:
raise ValueError("Could not merge array annotations "
"due to different units")
merged_array_annotations[key] = np.append(value, other_value) * value.units
else:
merged_array_annotations[key] = np.append(value, other_value)
except KeyError:
# Save the omitted keys to be able to print them
omitted_keys_self.append(key)
continue
# Also save omitted keys from 'other'
omitted_keys_other = [key for key in other.array_annotations if
key not in self.array_annotations]
# Warn if keys were omitted
if omitted_keys_other or omitted_keys_self:
warnings.warn("The following array annotations were omitted, because they were only "
"present in one of the merged objects: {} from the one that was merged "
"into and {} from the one that was merged into the other"
"".format(omitted_keys_self, omitted_keys_other), UserWarning)
# Return the merged array_annotations
return merged_array_annotations
def rescale(self, units):
'''
Return a copy of the object converted to the specified
units
:return: Copy of self with specified units
'''
# Use simpler functionality, if nothing will be changed
dim = pq.quantity.validate_dimensionality(units)
if self.dimensionality == dim:
return self.copy()
# Rescale the object into a new object
obj = self.duplicate_with_new_data(signal=self.view(pq.Quantity).rescale(dim), units=units)
# Expected behavior is deepcopy, so deepcopying array_annotations
obj.array_annotations = deepcopy(self.array_annotations)
obj.segment = self.segment
return obj
# Needed to implement this so array annotations are copied as well, ONLY WHEN copying 1:1
def copy(self, **kwargs):
'''
Returns a shallow copy of the object
:return: Copy of self
'''
obj = super().copy(**kwargs)
obj.array_annotations = self.array_annotations
return obj
def as_array(self, units=None):
"""
Return the object's data as a plain NumPy array.
If `units` is specified, first rescale to those units.
"""
if units:
return self.rescale(units).magnitude
else:
return self.magnitude
def as_quantity(self):
"""
Return the object's data as a quantities array.
"""
return self.view(pq.Quantity)
def _get_arr_ann_length(self):
"""
Return the length of the object's data as required for array annotations
This is the last dimension of every object.
:return Required length of array annotations for this object
"""
# Number of items is last dimension in of data object
# This method should be overridden in case this changes
try:
length = self.shape[-1]
# Note: This is because __getitem__[int] returns a scalar Epoch/Event/SpikeTrain
# To be removed if __getitem__[int] is changed
except IndexError:
length = 1
return length
def __deepcopy__(self, memo):
"""
Create a deep copy of the data object.
All attributes and annotations are also deep copied.
References to parent objects are not kept, they are set to None.
:param memo: (dict) Objects that have been deep copied already
:return: (DataObject) Deep copy of the input DataObject
"""
cls = self.__class__
necessary_attrs = {}
# Units need to be specified explicitly for analogsignals/irregularlysampledsignals
for k in self._necessary_attrs + (('units',),):
necessary_attrs[k[0]] = getattr(self, k[0], self)
# Create object using constructor with necessary attributes
new_obj = cls(**necessary_attrs)
# Add all attributes
new_obj.__dict__.update(self.__dict__)
memo[id(self)] = new_obj
for k, v in self.__dict__.items():
# Single parent objects should not be deepcopied, because this is not expected behavior
# and leads to a lot of stuff being copied (e.g. all other children of the parent as well),
# thus creating a lot of overhead
# But keeping the reference to the same parent is not desired either, because this would be unidirectional
# When deepcopying top-down, e.g. a whole block, the links will be handled by the parent
if k in self._parent_attrs:
setattr(new_obj, k, None)
continue
try:
setattr(new_obj, k, deepcopy(v, memo))
except TypeError:
setattr(new_obj, k, v)
return new_obj
class ArrayDict(dict):
"""Dictionary subclass to handle array annotations
When setting `obj.array_annotations[key]=value`, checks for consistency
should not be bypassed.
This class overrides __setitem__ from dict to perform these checks every time.
The method used for these checks is given as an argument for __init__.
"""
def __init__(self, length, check_function=_normalize_array_annotations, *args, **kwargs):
super().__init__(*args, **kwargs)
self.check_function = check_function
self.length = length
def __setitem__(self, key, value):
# Directly call the defined function
# Need to wrap key and value in a dict in order to make sure
# that nested dicts are detected
value = self.check_function({key: value}, self.length)[key]
super().__setitem__(key, value)
# Updating the dict also needs to perform checks, so rerouting this to __setitem__
def update(self, *args, **kwargs):
if args:
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, "
"got %d" % len(args))
other = dict(args[0])
for key in other:
self[key] = other[key]
for key in kwargs:
self[key] = kwargs[key]
def __reduce__(self):
return super().__reduce__()
|
__version__ = "2.0"
__author__ = ["Richard Jones <richard@cottagelabs.com>"]
__license__ = "bsd"
|
from appengine_wrappers import urlfetch
from future import Future
class _AsyncFetchDelegate(object):
def __init__(self, rpc):
self._rpc = rpc
def Get(self):
return self._rpc.get_result()
class AppEngineUrlFetcher(object):
"""A wrapper around the App Engine urlfetch module that allows for easy
async fetches.
"""
def __init__(self, base_path):
self._base_path = base_path
def Fetch(self, url):
"""Fetches a file synchronously.
"""
if self._base_path is not None:
return urlfetch.fetch(self._base_path + '/' + url,
headers={ 'Cache-Control': 'max-age=0' })
else:
return urlfetch.fetch(url, headers={ 'Cache-Control': 'max-age=0' })
def FetchAsync(self, url):
"""Fetches a file asynchronously, and returns a Future with the result.
"""
rpc = urlfetch.create_rpc()
if self._base_path is not None:
urlfetch.make_fetch_call(rpc,
self._base_path + '/' + url,
headers={ 'Cache-Control': 'max-age=0' })
else:
urlfetch.make_fetch_call(rpc,
url,
headers={ 'Cache-Control': 'max-age=0' })
return Future(delegate=_AsyncFetchDelegate(rpc))
|
import numpy as np
import numpy.testing as npt
from nose.tools import (assert_equal, assert_almost_equal, assert_raises,
assert_true)
from skbio.stats import subsample_counts
from skbio.diversity.alpha import lladser_pe, lladser_ci
from skbio.diversity.alpha._lladser import (
_expand_counts, _lladser_point_estimates,
_get_interval_for_r_new_otus, _lladser_ci_series, _lladser_ci_from_r)
def create_fake_observation():
"""Create a subsample with defined property"""
# Create a subsample of a larger sample such that we can compute
# the expected probability of the unseen portion.
# This is used in the tests of lladser_pe and lladser_ci
counts = np.ones(1001, dtype='int64')
counts[0] = 9000
total = counts.sum()
fake_obs = subsample_counts(counts, 1000)
exp_p = 1 - sum([x/total for (x, y) in zip(counts, fake_obs) if y > 0])
return fake_obs, exp_p
def test_lladser_pe():
"""lladser_pe returns point estimates within the expected variance"""
obs = lladser_pe([3], r=4)
assert_true(np.isnan(obs))
np.random.seed(123456789)
fake_obs, exp_p = create_fake_observation()
reps = 100
sum = 0
for i in range(reps):
sum += lladser_pe(fake_obs, r=30)
obs = sum / reps
# Estimator has variance of (1-p)^2/(r-2),
# which for r=30 and p~=0.9 is 0.0289
assert_almost_equal(obs, exp_p, delta=0.03)
def test_lladser_ci_nan():
"""lladser_ci returns nan if sample is too short to make an estimate"""
obs = lladser_ci([3], r=4)
assert_true(len(obs) == 2 and np.isnan(obs[0]) and np.isnan(obs[1]))
def test_lladser_ci():
"""lladser_ci estimate using defaults contains p with 95% prob"""
np.random.seed(12345678)
reps = 100
sum = 0
for i in range(reps):
fake_obs, exp_p = create_fake_observation()
(low, high) = lladser_ci(fake_obs, r=10)
if (low <= exp_p <= high):
sum += 1
assert_true(sum/reps >= 0.95)
def test_lladser_ci_f3():
"""lladser_ci estimate using f=3 contains p with 95% prob"""
# Test different values of f=3 and r=14, which lie exactly on the
# 95% interval line. For 100 reps using simple cumulative binomial
# probs we expect to have more than 5 misses of the interval in 38%
# of all test runs. To make this test pass reliable we thus have to
# set a defined seed
np.random.seed(12345678)
reps = 100
sum = 0
for i in range(reps):
# re-create the obs for every estimate, such that they are truly
# independent events
fake_obs, exp_p = create_fake_observation()
(low, high) = lladser_ci(fake_obs, r=14, f=3)
if (low <= exp_p <= high):
sum += 1
assert_true(sum/reps >= 0.95)
def test_expand_counts():
arr = np.array([2, 0, 1, 2])
npt.assert_array_equal(_expand_counts(arr), np.array([0, 0, 2, 3, 3]))
def test_lladser_point_estimates():
s = [5, 1, 5, 1, 2, 3, 1, 5, 3, 2, 5, 3]
r = 3
observed = list(_lladser_point_estimates(s, r))
assert_equal(len(observed), 3)
for k in range(3):
x = observed[k]
t = x[2]
assert_equal(x[0], (r - 1) / t)
# Estimator has variance of (1-p)^2/(r-2),
# which for r=7 and p=0.5 is 0.05
seq = "WBWBWBWBWBWBWBWBWBWBWBWBWBWBWBWBWBW"
reps = 1000
sum = 0
for i in range(reps):
p, _, _ = list(_lladser_point_estimates(seq, r=7))[0]
sum += p
assert_true(0.45 < sum / reps and sum / reps < 0.55)
def test_lladser_point_estimates_invalid_r():
with assert_raises(ValueError):
list(_lladser_point_estimates([5, 1, 5, 1, 2, 3, 1, 5, 3, 2, 5, 3], 2))
def test_get_interval_for_r_new_otus():
s = [5, 1, 5, 1, 2, 3, 1, 5, 3, 2, 5]
expected = [(3, set([5]), 4, 0),
(4, set([5, 1]), 6, 1),
(4, set([5, 1, 2]), 9, 4)]
for x, y in zip(_get_interval_for_r_new_otus(s, 2), expected):
assert_equal(x, y)
s = [5, 5, 5, 5, 5]
# never saw new one
assert_equal(list(_get_interval_for_r_new_otus(s, 2)), [])
def test_lladser_ci_series_exact():
# have seen RWB
urn_1 = 'RWBWWBWRRWRYWRPPZ'
results = list(_lladser_ci_series(urn_1, r=4))
assert_equal(len(results), 3)
def test_lladser_ci_series_random():
seq = "WBWBWBWBWBWB"
observations = []
alpha = 0.95
reps = 1000
for i in range(reps):
obs = list(_lladser_ci_series(seq, r=4, alpha=alpha))[0]
observations.append(obs)
tps = list(filter(lambda a_b: a_b[0] < 0.5 and 0.5 < a_b[1], observations))
assert_true(len(tps) >= alpha * reps) # 100%-95%
def test_lladser_ci_from_r():
f = 10
t = 10
r = 4
obs_low, obs_high = _lladser_ci_from_r(r=r, t=t, f=f)
assert_almost_equal(obs_low, 0.0806026244)
assert_almost_equal(obs_high, 0.806026244)
r = 20
t = 100
obs_low, obs_high = _lladser_ci_from_r(r=r, t=t, f=f)
assert_almost_equal(obs_low, 0.02787923964)
assert_almost_equal(obs_high, 0.2787923964)
# make sure we test with each possible alpha
alpha = 0.99
obs_low, obs_high = _lladser_ci_from_r(r=r, t=t, f=f, alpha=alpha)
assert_almost_equal(obs_low, 0.03184536992)
assert_almost_equal(obs_high, 0.3184536992)
alpha = 0.9
r = 3
obs_low, obs_high = _lladser_ci_from_r(r=r, t=t, f=f, alpha=alpha)
assert_almost_equal(obs_low, 0.005635941995)
assert_almost_equal(obs_high, 0.05635941995)
# test other ci_types
ci_type = 'ULCU'
obs_low, obs_high = _lladser_ci_from_r(r=r, t=t, f=f, alpha=alpha,
ci_type=ci_type)
assert_almost_equal(obs_low, 0.01095834700)
assert_almost_equal(obs_high, 0.1095834700)
alpha = 0.95
t = 10
ci_type = 'U'
obs_low, obs_high = _lladser_ci_from_r(r=r, t=t, f=f, alpha=alpha,
ci_type=ci_type)
assert_almost_equal(obs_low, 0)
assert_almost_equal(obs_high, 0.6295793622)
ci_type = 'L'
obs_low, obs_high = _lladser_ci_from_r(r=r, t=t, f=f, alpha=alpha,
ci_type=ci_type)
assert_almost_equal(obs_low, 0.0817691447)
assert_almost_equal(obs_high, 1)
def test_lladser_ci_from_r_invalid_input():
# unsupported alpha for ci_type='U'
with assert_raises(ValueError):
_lladser_ci_from_r(r=3, t=10, f=10, alpha=0.90, ci_type='U')
# unsupported r for ci_type='U'
with assert_raises(ValueError):
_lladser_ci_from_r(r=42, t=10, f=10, alpha=0.95, ci_type='U')
# unsupported alpha for ci_type='L'
with assert_raises(ValueError):
_lladser_ci_from_r(r=3, t=10, f=10, alpha=0.90, ci_type='L')
# unsupported r for ci_type='L'
with assert_raises(ValueError):
_lladser_ci_from_r(r=50, t=10, f=10, alpha=0.95, ci_type='L')
# unknown ci_type
with assert_raises(ValueError):
_lladser_ci_from_r(r=4, t=10, f=10, alpha=0.95, ci_type='brofist')
# requesting CI for not precomputed values
with assert_raises(ValueError):
_lladser_ci_from_r(r=500, t=10, f=10)
if __name__ == '__main__':
import nose
nose.runmodule()
|
from django.utils import translation
import pytest
from unittest.mock import Mock
from olympia import amo
from olympia.activity.models import ActivityLog
from olympia.amo import LOG
from olympia.amo.tests import addon_factory, days_ago, user_factory
from olympia.amo.tests.test_helpers import render
from olympia.devhub.templatetags import jinja_helpers
pytestmark = pytest.mark.django_db
def test_dev_page_title():
translation.activate('en-US')
request = Mock()
addon = Mock()
addon.name = 'name'
ctx = {'request': request, 'addon': addon}
title = 'Oh hai!'
s1 = render('{{ dev_page_title("%s") }}' % title, ctx)
s2 = render('{{ page_title("%s :: Developer Hub") }}' % title, ctx)
assert s1 == s2
s1 = render('{{ dev_page_title() }}', ctx)
s2 = render('{{ page_title("Developer Hub") }}', ctx)
assert s1 == s2
s1 = render('{{ dev_page_title("%s", addon) }}' % title, ctx)
s2 = render(f'{{{{ page_title("{title} :: {addon.name}") }}}}', ctx)
assert s1 == s2
def test_summarize_validation():
v = Mock()
v.errors = 1
v.warnings = 1
assert '1 error, 1 warning' == render(
'{{ summarize_validation(validation) }}', {'validation': v}
)
v.errors = 2
assert '2 errors, 1 warning' == render(
'{{ summarize_validation(validation) }}', {'validation': v}
)
v.warnings = 2
assert '2 errors, 2 warnings' == render(
'{{ summarize_validation(validation) }}', {'validation': v}
)
def test_log_action_class():
v = Mock()
for k, v in amo.LOG_BY_ID.items():
if v.action_class is not None:
cls = 'action-' + v.action_class
else:
cls = ''
assert render('{{ log_action_class(id) }}', {'id': v.id}) == cls
@pytest.mark.parametrize(
'action1,action2,action3,expected_count',
(
# Tests with Developer_Reply
(
LOG.REVIEWER_REPLY_VERSION,
LOG.DEVELOPER_REPLY_VERSION,
LOG.REVIEWER_REPLY_VERSION,
1,
),
(
LOG.REVIEWER_REPLY_VERSION,
LOG.REVIEWER_REPLY_VERSION,
LOG.DEVELOPER_REPLY_VERSION,
0,
),
# Tests with Approval
(
LOG.APPROVE_VERSION,
LOG.REVIEWER_REPLY_VERSION,
LOG.REVIEWER_REPLY_VERSION,
2,
),
(
LOG.REVIEWER_REPLY_VERSION,
LOG.APPROVE_VERSION,
LOG.REVIEWER_REPLY_VERSION,
1,
),
(
LOG.REVIEWER_REPLY_VERSION,
LOG.REVIEWER_REPLY_VERSION,
LOG.APPROVE_VERSION,
0,
),
# Tests with Rejection
(LOG.REJECT_VERSION, LOG.REVIEWER_REPLY_VERSION, LOG.REVIEWER_REPLY_VERSION, 2),
(LOG.REVIEWER_REPLY_VERSION, LOG.REJECT_VERSION, LOG.REVIEWER_REPLY_VERSION, 1),
(LOG.REVIEWER_REPLY_VERSION, LOG.REVIEWER_REPLY_VERSION, LOG.REJECT_VERSION, 0),
),
)
def test_pending_activity_log_count_for_developer(
action1, action2, action3, expected_count
):
user = user_factory()
addon = addon_factory()
version = addon.current_version
ActivityLog.create(action1, addon, version, user=user).update(created=days_ago(2))
ActivityLog.create(action2, addon, version, user=user).update(created=days_ago(1))
ActivityLog.create(action3, addon, version, user=user).update(created=days_ago(0))
count = jinja_helpers.pending_activity_log_count_for_developer(version)
assert count == expected_count
|
"""Command line tool for generating ProtoRPC definitions from descriptors."""
import errno
import logging
import optparse
import os
import sys
from protorpc import descriptor
from protorpc import generate_python
from protorpc import protobuf
from protorpc import registry
from protorpc import transport
from protorpc import util
EXCLUDED_PACKAGES = frozenset(['protorpc.registry',
'protorpc.messages',
'protorpc.descriptor',
'protorpc.message_types',
])
commands = {}
def usage():
"""Print usage help and exit with an error code."""
parser.print_help()
sys.exit(2)
def fatal_error(message):
"""Print fatal error messages exit with an error code.
Args:
message: Message to print to stderr before exit.
"""
sys.stderr.write(message)
sys.exit(1)
def open_input_file(filename):
"""Open file for reading.
Args:
filename: Name of input file to open or None to open stdin.
Returns:
Opened file if string provided, stdin if filename is None.
"""
# TODO(rafek): Detect missing or invalid files, generating user friendly
# error messages.
if filename is None:
return sys.stdin
else:
try:
return open(filename, 'rb')
except IOError, err:
fatal_error(str(err))
def generate_file_descriptor(dest_dir, file_descriptor):
"""Generate a single file descriptor to destination directory.
Will generate a single Python file from a file descriptor under dest_dir.
The sub-directory where the file is generated is determined by the package
name of descriptor.
Descriptors without package names will not be generated.
Descriptors that are part of the ProtoRPC distribution will not be generated.
Args:
dest_dir: Directory under which to generate files.
file_descriptor: FileDescriptor instance to generate source code from.
"""
package = file_descriptor.package
if not package:
# TODO(rafek): Option to cause an error on this condition.
logging.warn('Will not generate descriptor without package name')
return
if package in EXCLUDED_PACKAGES:
logging.warn('Will not generate main ProtoRPC class %s' % package)
return
package_path = package.split('.')
directory = package_path[:-1]
package_file_name = package_path[-1]
directory_name = os.path.join(dest_dir, *directory)
output_file_name = os.path.join(directory_name,
'%s.py' % (package_file_name,))
try:
os.makedirs(directory_name)
except OSError, err:
if err.errno != errno.EEXIST:
raise
output_file = open(output_file_name, 'w')
logging.info('Writing package %s to %s',
file_descriptor.package, output_file_name)
# TODO(rafek): Option to prevent overwriting.
generate_python.format_python_file(file_descriptor, output_file)
@util.positional(1)
def command(name, required=(), optional=()):
"""Decorator used for declaring commands used on command line.
Each command of this tool can have any number of sequential required
parameters and optional parameters. The required and optional parameters
will be displayed in the command usage. Arguments passed in to the command
are checked to ensure they have at least the required parameters and not
too many parameters beyond the optional ones. When there are not enough
or too few parameters the usage message is generated and the program exits
with an error code.
Functions decorated thus are added to commands by their name.
Resulting decorated functions will have required and optional attributes
assigned to them so that appear in the usage message.
Args:
name: Name of command that will follow the program name on the command line.
required: List of required parameter names as displayed in the usage
message.
optional: List of optional parameter names as displayed in the usage
message.
"""
def check_params_decorator(function):
def check_params_wrapper(options, *args):
if not (len(required) <= len(args) <= len(optional)):
sys.stderr.write("Incorrect usage for command '%s'\n\n" % name)
usage()
function(options, *args)
check_params_wrapper.required = required
check_params_wrapper.optional = optional
commands[name] = check_params_wrapper
return check_params_wrapper
return check_params_decorator
@command('file', optional=['input-filename', 'output-filename'])
def file_command(options, input_filename=None, output_filename=None):
"""Generate a single descriptor file to Python.
Args:
options: Parsed command line options.
input_filename: File to read protobuf FileDescriptor from. If None
will read from stdin.
output_filename: File to write Python source code to. If None will
generate to stdout.
"""
with open_input_file(input_filename) as input_file:
descriptor_content = input_file.read()
if output_filename:
output_file = open(output_filename, 'w')
else:
output_file = sys.stdout
file_descriptor = protobuf.decode_message(descriptor.FileDescriptor,
descriptor_content)
generate_python.format_python_file(file_descriptor, output_file)
@command('fileset', optional=['filename'])
def fileset_command(options, input_filename=None):
"""Generate source directory structure from FileSet.
Args:
options: Parsed command line options.
input_filename: File to read protobuf FileSet from. If None will read from
stdin.
"""
with open_input_file(input_filename) as input_file:
descriptor_content = input_file.read()
dest_dir = os.path.expanduser(options.dest_dir)
if not os.path.isdir(dest_dir) and os.path.exists(dest_dir):
fatal_error("Destination '%s' is not a directory" % dest_dir)
file_set = protobuf.decode_message(descriptor.FileSet,
descriptor_content)
for file_descriptor in file_set.files:
generate_file_descriptor(dest_dir, file_descriptor)
@command('registry',
required=['host'],
optional=['service-name', 'registry-path'])
def registry_command(options,
host,
service_name=None,
registry_path='/protorpc'):
"""Generate source directory structure from remote registry service.
Args:
options: Parsed command line options.
host: Web service host where registry service is located. May include
port.
service_name: Name of specific service to read. Will generate only Python
files that service is dependent on. If None, will generate source code
for all services known by the registry.
registry_path: Path to find registry if not the default 'protorpc'.
"""
dest_dir = os.path.expanduser(options.dest_dir)
url = 'http://%s%s' % (host, registry_path)
reg = registry.RegistryService.Stub(transport.HttpTransport(url))
if service_name is None:
service_names = [service.name for service in reg.services().services]
else:
service_names = [service_name]
file_set = reg.get_file_set(names=service_names).file_set
for file_descriptor in file_set.files:
generate_file_descriptor(dest_dir, file_descriptor)
def make_opt_parser():
"""Create options parser with automatically generated command help.
Will iterate over all functions in commands and generate an appropriate
usage message for them with all their required and optional parameters.
"""
command_descriptions = []
for name in sorted(commands.iterkeys()):
command = commands[name]
params = ' '.join(['<%s>' % param for param in command.required] +
['[<%s>]' % param for param in command.optional])
command_descriptions.append('%%prog [options] %s %s' % (name, params))
command_usage = 'usage: %s\n' % '\n '.join(command_descriptions)
parser = optparse.OptionParser(usage=command_usage)
parser.add_option('-d', '--dest_dir',
dest='dest_dir',
default=os.getcwd(),
help='Write generated files to DIR',
metavar='DIR')
return parser
parser = make_opt_parser()
def main():
# TODO(rafek): Customize verbosity.
logging.basicConfig(level=logging.INFO)
options, positional = parser.parse_args()
if not positional:
usage()
command_name = positional[0]
command = commands.get(command_name)
if not command:
sys.stderr.write("Unknown command '%s'\n\n" % command_name)
usage()
parameters = positional[1:]
command(options, *parameters)
if __name__ == '__main__':
main()
|
from ntsecuritycon import *
import win32api, win32security, winerror
def GetDomainName():
try:
tok = win32security.OpenThreadToken(win32api.GetCurrentThread(),
TOKEN_QUERY, 1)
except win32api.error as details:
if details[0] != winerror.ERROR_NO_TOKEN:
raise
# attempt to open the process token, since no thread token
# exists
tok = win32security.OpenProcessToken(win32api.GetCurrentProcess(),
TOKEN_QUERY)
sid, attr = win32security.GetTokenInformation(tok, TokenUser)
win32api.CloseHandle(tok)
name, dom, typ = win32security.LookupAccountSid(None, sid)
return dom
if __name__=='__main__':
print("Domain name is", GetDomainName())
|
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import cint
from frappe.model.naming import validate_name
@frappe.whitelist()
def rename_doc(doctype, old, new, force=False, merge=False, ignore_permissions=False):
"""
Renames a doc(dt, old) to doc(dt, new) and
updates all linked fields of type "Link"
"""
if not frappe.db.exists(doctype, old):
return
force = cint(force)
merge = cint(merge)
meta = frappe.get_meta(doctype)
# call before_rename
old_doc = frappe.get_doc(doctype, old)
out = old_doc.run_method("before_rename", old, new, merge) or {}
new = (out.get("new") or new) if isinstance(out, dict) else (out or new)
if doctype != "DocType":
new = validate_rename(doctype, new, meta, merge, force, ignore_permissions)
if not merge:
rename_parent_and_child(doctype, old, new, meta)
# update link fields' values
link_fields = get_link_fields(doctype)
update_link_field_values(link_fields, old, new, doctype)
rename_dynamic_links(doctype, old, new)
if doctype=='DocType':
rename_doctype(doctype, old, new, force)
update_comments(doctype, old, new, force)
update_attachments(doctype, old, new)
if merge:
frappe.delete_doc(doctype, old)
# call after_rename
new_doc = frappe.get_doc(doctype, new)
# copy any flags if required
new_doc._local = getattr(old_doc, "_local", None)
new_doc.run_method("after_rename", old, new, merge)
rename_versions(doctype, old, new)
# update user_permissions
frappe.db.sql("""update tabDefaultValue set defvalue=%s where parenttype='User Permission'
and defkey=%s and defvalue=%s""", (new, doctype, old))
frappe.clear_cache()
return new
def update_attachments(doctype, old, new):
try:
frappe.db.sql("""update `tabFile Data` set attached_to_name=%s
where attached_to_name=%s and attached_to_doctype=%s""", (new, old, doctype))
except Exception, e:
if e.args[0]!=1054: # in patch?
raise
def rename_versions(doctype, old, new):
frappe.db.sql("""update tabVersion set docname=%s where ref_doctype=%s and docname=%s""",
(new, doctype, old))
def rename_parent_and_child(doctype, old, new, meta):
# rename the doc
frappe.db.sql("update `tab%s` set name=%s where name=%s" \
% (doctype, '%s', '%s'), (new, old))
update_child_docs(old, new, meta)
def validate_rename(doctype, new, meta, merge, force, ignore_permissions):
# using for update so that it gets locked and someone else cannot edit it while this rename is going on!
exists = frappe.db.sql("select name from `tab{doctype}` where name=%s for update".format(doctype=doctype), new)
exists = exists[0][0] if exists else None
if merge and not exists:
frappe.msgprint(_("{0} {1} does not exist, select a new target to merge").format(doctype, new), raise_exception=1)
if (not merge) and exists == new:
frappe.msgprint(_("Another {0} with name {1} exists, select another name").format(doctype, new), raise_exception=1)
if not (ignore_permissions or frappe.has_permission(doctype, "write")):
frappe.msgprint(_("You need write permission to rename"), raise_exception=1)
if not force and not meta.allow_rename:
frappe.msgprint(_("{0} not allowed to be renamed").format(_(doctype)), raise_exception=1)
# validate naming like it's done in doc.py
new = validate_name(doctype, new, merge=merge)
return new
def rename_doctype(doctype, old, new, force=False):
# change options for fieldtype Table
update_options_for_fieldtype("Table", old, new)
update_options_for_fieldtype("Link", old, new)
# change options where select options are hardcoded i.e. listed
select_fields = get_select_fields(old, new)
update_link_field_values(select_fields, old, new, doctype)
update_select_field_values(old, new)
# change parenttype for fieldtype Table
update_parenttype_values(old, new)
# rename comments
frappe.db.sql("""update tabComment set comment_doctype=%s where comment_doctype=%s""",
(new, old))
def update_comments(doctype, old, new, force=False):
frappe.db.sql("""update `tabComment` set comment_docname=%s
where comment_doctype=%s and comment_docname=%s""", (new, doctype, old))
def update_child_docs(old, new, meta):
# update "parent"
for df in meta.get_table_fields():
frappe.db.sql("update `tab%s` set parent=%s where parent=%s" \
% (df.options, '%s', '%s'), (new, old))
def update_link_field_values(link_fields, old, new, doctype):
for field in link_fields:
if field['issingle']:
frappe.db.sql("""\
update `tabSingles` set value=%s
where doctype=%s and field=%s and value=%s""",
(new, field['parent'], field['fieldname'], old))
else:
# because the table hasn't been renamed yet!
parent = field['parent'] if field['parent']!=new else old
frappe.db.sql("""\
update `tab%s` set `%s`=%s
where `%s`=%s""" \
% (parent, field['fieldname'], '%s',
field['fieldname'], '%s'),
(new, old))
def get_link_fields(doctype):
# get link fields from tabDocField
link_fields = frappe.db.sql("""\
select parent, fieldname,
(select ifnull(issingle, 0) from tabDocType dt
where dt.name = df.parent) as issingle
from tabDocField df
where
df.options=%s and df.fieldtype='Link'""", (doctype,), as_dict=1)
# get link fields from tabCustom Field
custom_link_fields = frappe.db.sql("""\
select dt as parent, fieldname,
(select ifnull(issingle, 0) from tabDocType dt
where dt.name = df.dt) as issingle
from `tabCustom Field` df
where
df.options=%s and df.fieldtype='Link'""", (doctype,), as_dict=1)
# add custom link fields list to link fields list
link_fields += custom_link_fields
# remove fields whose options have been changed using property setter
property_setter_link_fields = frappe.db.sql("""\
select ps.doc_type as parent, ps.field_name as fieldname,
(select ifnull(issingle, 0) from tabDocType dt
where dt.name = ps.doc_type) as issingle
from `tabProperty Setter` ps
where
ps.property_type='options' and
ps.field_name is not null and
ps.value=%s""", (doctype,), as_dict=1)
link_fields += property_setter_link_fields
return link_fields
def update_options_for_fieldtype(fieldtype, old, new):
frappe.db.sql("""update `tabDocField` set options=%s
where fieldtype=%s and options=%s""", (new, fieldtype, old))
frappe.db.sql("""update `tabCustom Field` set options=%s
where fieldtype=%s and options=%s""", (new, fieldtype, old))
frappe.db.sql("""update `tabProperty Setter` set value=%s
where property='options' and value=%s""", (new, old))
def get_select_fields(old, new):
"""
get select type fields where doctype's name is hardcoded as
new line separated list
"""
# get link fields from tabDocField
select_fields = frappe.db.sql("""\
select parent, fieldname,
(select ifnull(issingle, 0) from tabDocType dt
where dt.name = df.parent) as issingle
from tabDocField df
where
df.parent != %s and df.fieldtype = 'Select' and
df.options like "%%%%%s%%%%" """ \
% ('%s', old), (new,), as_dict=1)
# get link fields from tabCustom Field
custom_select_fields = frappe.db.sql("""\
select dt as parent, fieldname,
(select ifnull(issingle, 0) from tabDocType dt
where dt.name = df.dt) as issingle
from `tabCustom Field` df
where
df.dt != %s and df.fieldtype = 'Select' and
df.options like "%%%%%s%%%%" """ \
% ('%s', old), (new,), as_dict=1)
# add custom link fields list to link fields list
select_fields += custom_select_fields
# remove fields whose options have been changed using property setter
property_setter_select_fields = frappe.db.sql("""\
select ps.doc_type as parent, ps.field_name as fieldname,
(select ifnull(issingle, 0) from tabDocType dt
where dt.name = ps.doc_type) as issingle
from `tabProperty Setter` ps
where
ps.doc_type != %s and
ps.property_type='options' and
ps.field_name is not null and
ps.value like "%%%%%s%%%%" """ \
% ('%s', old), (new,), as_dict=1)
select_fields += property_setter_select_fields
return select_fields
def update_select_field_values(old, new):
frappe.db.sql("""\
update `tabDocField` set options=replace(options, %s, %s)
where
parent != %s and fieldtype = 'Select' and
(options like "%%%%\\n%s%%%%" or options like "%%%%%s\\n%%%%")""" % \
('%s', '%s', '%s', old, old), (old, new, new))
frappe.db.sql("""\
update `tabCustom Field` set options=replace(options, %s, %s)
where
dt != %s and fieldtype = 'Select' and
(options like "%%%%\\n%s%%%%" or options like "%%%%%s\\n%%%%")""" % \
('%s', '%s', '%s', old, old), (old, new, new))
frappe.db.sql("""\
update `tabProperty Setter` set value=replace(value, %s, %s)
where
doc_type != %s and field_name is not null and
property='options' and
(value like "%%%%\\n%s%%%%" or value like "%%%%%s\\n%%%%")""" % \
('%s', '%s', '%s', old, old), (old, new, new))
def update_parenttype_values(old, new):
child_doctypes = frappe.db.sql("""\
select options, fieldname from `tabDocField`
where parent=%s and fieldtype='Table'""", (new,), as_dict=1)
custom_child_doctypes = frappe.db.sql("""\
select options, fieldname from `tabCustom Field`
where dt=%s and fieldtype='Table'""", (new,), as_dict=1)
child_doctypes += custom_child_doctypes
fields = [d['fieldname'] for d in child_doctypes]
property_setter_child_doctypes = frappe.db.sql("""\
select value as options from `tabProperty Setter`
where doc_type=%s and property='options' and
field_name in ("%s")""" % ('%s', '", "'.join(fields)),
(new,))
child_doctypes += property_setter_child_doctypes
child_doctypes = (d['options'] for d in child_doctypes)
for doctype in child_doctypes:
frappe.db.sql("""\
update `tab%s` set parenttype=%s
where parenttype=%s""" % (doctype, '%s', '%s'),
(new, old))
dynamic_link_queries = [
"""select parent, fieldname, options from tabDocField where fieldtype='Dynamic Link'""",
"""select dt as parent, fieldname, options from `tabCustom Field` where fieldtype='Dynamic Link'""",
]
def rename_dynamic_links(doctype, old, new):
for query in dynamic_link_queries:
for df in frappe.db.sql(query, as_dict=True):
# dynamic link in single, just one value to check
if frappe.get_meta(df.parent).issingle:
refdoc = frappe.db.get_singles_dict(df.parent)
if refdoc.get(df.options)==doctype and refdoc.get(df.fieldname)==old:
frappe.db.sql("""update tabSingles set value=%s where
field=%s and value=%s and doctype=%s""", (new, df.fieldname, old, df.parent))
else:
# because the table hasn't been renamed yet!
parent = df.parent if df.parent != new else old
# replace for each value where renamed
for to_change in frappe.db.sql_list("""select name from `tab{parent}` where
{options}=%s and {fieldname}=%s""".format(parent=parent, options=df.options,
fieldname=df.fieldname), (doctype, old)):
frappe.db.sql("""update `tab{parent}` set {fieldname}=%s
where name=%s""".format(**df), (new, to_change))
|
"""Test result object"""
import sys
import traceback
import unittest
from StringIO import StringIO
from unittest2 import util
from unittest2.compatibility import wraps
__unittest = True
def failfast(method):
@wraps(method)
def inner(self, *args, **kw):
if getattr(self, 'failfast', False):
self.stop()
return method(self, *args, **kw)
return inner
STDOUT_LINE = '\nStdout:\n%s'
STDERR_LINE = '\nStderr:\n%s'
class TestResult(unittest.TestResult):
"""Holder for test result information.
Test results are automatically managed by the TestCase and TestSuite
classes, and do not need to be explicitly manipulated by writers of tests.
Each instance holds the total number of tests run, and collections of
failures and errors that occurred among those test runs. The collections
contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
formatted traceback of the error that occurred.
"""
_previousTestClass = None
_moduleSetUpFailed = False
def __init__(self):
self.failfast = False
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
self.buffer = False
self._stdout_buffer = None
self._stderr_buffer = None
self._original_stdout = sys.stdout
self._original_stderr = sys.stderr
self._mirrorOutput = False
def startTest(self, test):
"Called when the given test is about to be run"
self.testsRun += 1
self._mirrorOutput = False
if self.buffer:
if self._stderr_buffer is None:
self._stderr_buffer = StringIO()
self._stdout_buffer = StringIO()
sys.stdout = self._stdout_buffer
sys.stderr = self._stderr_buffer
def startTestRun(self):
"""Called once before any tests are executed.
See startTest for a method called before each test.
"""
def stopTest(self, test):
"""Called when the given test has been run"""
if self.buffer:
if self._mirrorOutput:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
self._original_stdout.write(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
self._original_stderr.write(STDERR_LINE % error)
sys.stdout = self._original_stdout
sys.stderr = self._original_stderr
self._stdout_buffer.seek(0)
self._stdout_buffer.truncate()
self._stderr_buffer.seek(0)
self._stderr_buffer.truncate()
self._mirrorOutput = False
def stopTestRun(self):
"""Called once after all tests are executed.
See stopTest for a method called after each test.
"""
@failfast
def addError(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
"""
self.errors.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
@failfast
def addFailure(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info()."""
self.failures.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
def addSkip(self, test, reason):
"""Called when a test is skipped."""
self.skipped.append((test, reason))
def addExpectedFailure(self, test, err):
"""Called when an expected failure/error occured."""
self.expectedFailures.append(
(test, self._exc_info_to_string(err, test)))
@failfast
def addUnexpectedSuccess(self, test):
"""Called when a test was expected to fail, but succeed."""
self.unexpectedSuccesses.append(test)
def wasSuccessful(self):
"Tells whether or not this result was a success"
return (len(self.failures) + len(self.errors) == 0)
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = True
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msgLines = traceback.format_exception(exctype, value, tb, length)
else:
msgLines = traceback.format_exception(exctype, value, tb)
if self.buffer:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
msgLines.append(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
msgLines.append(STDERR_LINE % error)
return ''.join(msgLines)
def _is_relevant_tb_level(self, tb):
return '__unittest' in tb.tb_frame.f_globals
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def __repr__(self):
return "<%s run=%i errors=%i failures=%i>" % \
(util.strclass(self.__class__), self.testsRun, len(self.errors),
len(self.failures))
|
import inc_const as const
PJSUA = ["--null-audio", # UA0
"--null-audio", # UA1
"--null-audio" # UA2
]
PJSUA_EXPECTS = [
# A calls B
[0, "", "m"],
[0, "", "$PJSUA_URI[1]"],
[0, const.STATE_CALLING, ""],
[1, const.EVENT_INCOMING_CALL, "a"],
[1, "", "200"],
[0, const.STATE_CONFIRMED, ""],
[1, const.STATE_CONFIRMED, ""],
# B holds A
[1, "", "H"],
[0, const.MEDIA_HOLD, ""],
[1, const.MEDIA_HOLD, ""],
# B calls C
[1, "", "m"],
[1, "", "$PJSUA_URI[2]"],
[1, const.STATE_CALLING, ""],
[2, const.EVENT_INCOMING_CALL, "a"],
[2, "", "200"],
[1, const.STATE_CONFIRMED, ""],
[2, const.STATE_CONFIRMED, ""],
# B holds C
[1, "", "]"],
[1, "", "H"],
[2, const.MEDIA_HOLD, ""],
[1, const.MEDIA_HOLD, ""],
[1, "", "]"],
# B transfer A to C
[1, "", "X"],
[1, "", "1"],
[0, "Call .* is being transfered", ""],
[1, "Subscription state .* ACCEPTED", ""],
[0, const.STATE_CALLING, ""],
[2, "Call .* is being replaced", ""],
[1, "call transfered successfully", ""],
[0, const.MEDIA_ACTIVE, ""],
[2, const.MEDIA_ACTIVE, ""],
[1, const.STATE_DISCONNECTED, ""]
]
|
"""
What:
Reproduces Figure 13, third column, of Sanz-Leon P., Knock, S. A., Spiegler, A. and Jirsa V.
Mathematical framework for large-scale brain network modelling in The Virtual Brain.
Neuroimage, 2014, (in review)
Needs:
A working installation of tvb
Run:
python region_deterministic_bnm_g2d_c.py -s True -f True
#Subsequent calls can be made with:
python region_deterministic_bnm_g2d_c.py -f True
.. author:: Paula Sanz-Leon
"""
import numpy
import argparse
from tvb.simulator.lab import *
import matplotlib.pylab as pylab
pylab.rcParams['figure.figsize'] = 10, 7
pylab.rcParams.update({'font.size': 22})
pylab.rcParams.update({'axes.linewidth': 3})
parser = argparse.ArgumentParser(description='Reproduce results of Figure 13 presented in Sanz-Leon et al 2014')
parser.add_argument('-s','--sim', help='Run the simulations', default=False)
parser.add_argument('-f','--fig', help='Plot the figures', default=False)
args = vars(parser.parse_args())
idx = ['c0', 'c1', 'c2']
gcs = [0.0, 0.0042, 0.042]
simulation_length = 512
speed = 4.0
if args['sim']:
for i in range(3):
oscilator = models.Generic2dOscillator(a=0.5, b=0.6, c=-4.)
white_matter = connectivity.Connectivity(load_default=True)
white_matter.speed = numpy.array([speed])
# 0, 0.0042, 0.042
white_matter_coupling = coupling.Linear(a=gcs[i])
#Initialise an Integrator
heunint = integrators.HeunDeterministic(dt=2**-4)
#Initialise some Monitors with period in physical time
momo = monitors.Raw()
mama = monitors.TemporalAverage(period=2**-2)
#Bundle them
what_to_watch = (momo, mama)
#Initialise a Simulator -- Model, Connectivity, Integrator, and Monitors.
sim = simulator.Simulator(model = oscilator, connectivity = white_matter,
coupling = white_matter_coupling,
integrator = heunint, monitors = what_to_watch)
sim.configure()
LOG.info("Starting simulation...")
#Perform the simulation
raw_data = []
raw_time = []
tavg_data = []
tavg_time = []
for raw, tavg in sim(simulation_length=simulation_length):
if not raw is None:
raw_time.append(raw[0])
raw_data.append(raw[1])
if not tavg is None:
tavg_time.append(tavg[0])
tavg_data.append(tavg[1])
LOG.info("Finished simulation.")
#Make the lists numpy.arrays for easier use.
RAW = numpy.asarray(raw_data)
TAVG = numpy.asarray(tavg_data)
numpy.save('region_deterministic_bnm_g2d_raw_' + idx[i] + '.npy', RAW)
numpy.save('region_deterministic_bnm_g2d_tavg_' + idx[i] + '.npy', TAVG)
numpy.save('region_deterministic_bnm_g2d_rawtime_' + idx[i] + '.npy', raw_time)
numpy.save('region_deterministic_bnm_g2d_tavgtime_' + idx[i] + '.npy', tavg_time)
if args['fig']:
for i in range(3):
RAW = numpy.load('region_deterministic_bnm_g2d_raw_' + idx[i] + '.npy')
raw_time = numpy.load('region_deterministic_bnm_g2d_rawtime_' + idx[i] + '.npy')
fig=figure(1)
clf()
ax1 = subplot(1, 2, 1)
plot(raw_time, RAW[:, 0, :, 0],'k', alpha=0.042, linewidth=3)
plot(raw_time, RAW[:, 1, :, 0],'r', alpha=0.042, linewidth=3)
plot(raw_time, RAW[:, 0, :, 0].mean(axis=1), 'k', linewidth=3)
plot(raw_time, RAW[:, 1, :, 0].mean(axis=1), 'r', linewidth=3)
xlabel('time[ms]')
ylabel('[au]')
ylim([-35, 10])
xlim([0, simulation_length])
xticks((0, simulation_length /2. , simulation_length),
('0', str(int(simulation_length //2)), str(simulation_length)))
yticks((-30, 0, 5), ('-30', '0', '5'))
for label in ax1.get_yticklabels():
label.set_fontsize(24)
for label in ax1.get_xticklabels():
label.set_fontsize(24)
ax = subplot(1, 2, 2)
plot(RAW[:, 0, :, 0], RAW[:, 1, :, 0], 'b', alpha=0.042, linewidth=3)
plot(RAW[:, 0, :, 0].mean(axis=1), RAW[:, 1, :, 0].mean(axis=1), 'b', alpha=1., linewidth=3)
plot(RAW[0, 0, :, 0], RAW[0, 1, :, 0], 'bo', alpha=0.15)
ylim([-35, 10])
xlim([-3, 6])
xticks((-3, 1.5, 6), ('-3', '1.5', '6'))
yticks((-30, 0, 5), ('-30', '0', '5'))
for label in ax.get_yticklabels():
label.set_fontsize(24)
for label in ax.get_xticklabels():
label.set_fontsize(24)
ax.yaxis.set_label_position("right")
xlabel(r'$V$')
ylabel(r'$W$')
fig_name = 'G2D_default_speed_' + str(int(speed)) + '-config_gcs-' + idx[i] + '.pdf'
savefig(fig_name)
|
"""Provides :py:class:`SavedSettingsObject` implementing settings-dict
based property storage.
See Also
--------
:py:mod:`libbe.storage.util.properties` : underlying property definitions
"""
import libbe
from properties import Property, doc_property, local_property, \
defaulting_property, checked_property, fn_checked_property, \
cached_property, primed_property, change_hook_property, \
settings_property
if libbe.TESTING == True:
import doctest
import unittest
class _Token (object):
"""`Control' value class for properties.
We want values that only mean something to the `settings_object`
module.
"""
pass
class UNPRIMED (_Token):
"Property has not been primed (loaded)."
pass
class EMPTY (_Token):
"""Property has been primed but has no user-set value, so use
default/generator value.
"""
pass
def prop_save_settings(self, old, new):
"""The default action undertaken when a property changes.
"""
if self.storage != None and self.storage.is_writeable():
self.save_settings()
def prop_load_settings(self):
"""The default action undertaken when an UNPRIMED property is
accessed.
Attempt to run `.load_settings()`, which calls
`._setup_saved_settings()` internally. If `.storage` is
inaccessible, don't do anything.
"""
if self.storage != None and self.storage.is_readable():
self.load_settings()
def setting_name_to_attr_name(self, name):
"""Convert keys to the `.settings` dict into their associated
SavedSettingsObject attribute names.
Examples
--------
>>> print setting_name_to_attr_name(None,"User-id")
user_id
See Also
--------
attr_name_to_setting_name : inverse
"""
return name.lower().replace('-', '_')
def attr_name_to_setting_name(self, name):
"""Convert SavedSettingsObject attribute names to `.settings` dict
keys.
Examples:
>>> print attr_name_to_setting_name(None, "user_id")
User-id
See Also
--------
setting_name_to_attr_name : inverse
"""
return name.capitalize().replace('_', '-')
def versioned_property(name, doc,
default=None, generator=None,
change_hook=prop_save_settings,
mutable=False,
primer=prop_load_settings,
allowed=None, check_fn=None,
settings_properties=[],
required_saved_properties=[],
require_save=False):
"""Combine the common decorators in a single function.
Use zero or one (but not both) of default or generator, since a
working default will keep the generator from functioning. Use the
default if you know what you want the default value to be at
'coding time'. Use the generator if you can write a function to
determine a valid default at run time. If both default and
generator are None, then the property will be a defaulting
property which defaults to None.
allowed and check_fn have a similar relationship, although you can
use both of these if you want. allowed compares the proposed
value against a list determined at 'coding time' and check_fn
allows more flexible comparisons to take place at run time.
Set require_save to True if you want to save the default/generated
value for a property, to protect against future changes. E.g., we
currently expect all comments to be 'text/plain' but in the future
we may want to default to 'text/html'. If we don't want the old
comments to be interpreted as 'text/html', we would require that
the content type be saved.
change_hook, primer, settings_properties, and
required_saved_properties are only options to get their defaults
into our local scope. Don't mess with them.
Set mutable=True if:
* default is a mutable
* your generator function may return mutables
* you set change_hook and might have mutable property values
See the docstrings in `libbe.properties` for details on how each of
these cases are handled.
The value stored in `.settings[name]` will be
* no value (or UNPRIMED) if the property has been neither set,
nor loaded as blank.
* EMPTY if the value has been loaded as blank.
* some value if the property has been either loaded or set.
"""
settings_properties.append(name)
if require_save == True:
required_saved_properties.append(name)
def decorator(funcs):
fulldoc = doc
if default != None or generator == None:
defaulting = defaulting_property(default=default, null=EMPTY,
mutable_default=mutable)
fulldoc += "\n\nThis property defaults to %s." % default
if generator != None:
cached = cached_property(generator=generator, initVal=EMPTY,
mutable=mutable)
fulldoc += "\n\nThis property is generated with %s." % generator
if check_fn != None:
fn_checked = fn_checked_property(value_allowed_fn=check_fn)
fulldoc += "\n\nThis property is checked with %s." % check_fn
if allowed != None:
checked = checked_property(allowed=allowed)
fulldoc += "\n\nThe allowed values for this property are: %s." \
% (', '.join(allowed))
hooked = change_hook_property(hook=change_hook, mutable=mutable,
default=EMPTY)
primed = primed_property(primer=primer, initVal=UNPRIMED,
unprimeableVal=EMPTY)
settings = settings_property(name=name, null=UNPRIMED)
docp = doc_property(doc=fulldoc)
deco = hooked(primed(settings(docp(funcs))))
if default != None or generator == None:
deco = defaulting(deco)
if generator != None:
deco = cached(deco)
if check_fn != None:
deco = fn_checked(deco)
if allowed != None:
deco = checked(deco)
return Property(deco)
return decorator
class SavedSettingsObject(object):
"""Setup a framework for lazy saving and loading of `.settings`
properties.
This is useful for BE objects with saved properties
(e.g. :py:class:`~libbe.bugdir.BugDir`, :py:class:`~libbe.bug.Bug`,
:py:class:`~libbe.comment.Comment`). For example usage, consider the
unittests at the end of the module.
See Also
--------
versioned_property, prop_save_settings, prop_load_settings
setting_name_to_attr_name, attr_name_to_setting_name
"""
# Keep a list of properties that may be stored in the .settings dict.
#settings_properties = []
# A list of properties that we save to disk, even if they were
# never set (in which case we save the default value). This
# protects against future changes in default values.
#required_saved_properties = []
_setting_name_to_attr_name = setting_name_to_attr_name
_attr_name_to_setting_name = attr_name_to_setting_name
def __init__(self):
self.storage = None
self.settings = {}
def load_settings(self):
"""Load the settings from disk."""
# Override. Must call ._setup_saved_settings({}) with
# from-storage settings.
self._setup_saved_settings({})
def _setup_saved_settings(self, settings=None):
"""
Sets up a settings dict loaded from storage. Fills in
all missing settings entries with EMPTY.
"""
if settings == None:
settings = {}
for property in self.settings_properties:
if property not in self.settings \
or self.settings[property] == UNPRIMED:
if property in settings:
self.settings[property] = settings[property]
else:
self.settings[property] = EMPTY
def save_settings(self):
"""Save the settings to disk."""
# Override. Should save the dict output of ._get_saved_settings()
settings = self._get_saved_settings()
pass # write settings to disk....
def _get_saved_settings(self):
"""
In order to avoid overwriting unread on-disk data, make sure
we've loaded anything sitting on the disk. In the current
implementation, all the settings are stored in a single file,
so we need to load _all_ the saved settings. Another approach
would be per-setting saves, in which case you could skip this
step, since any setting changes would have forced that setting
load already.
"""
settings = {}
for k in self.settings_properties: # force full load
if not k in self.settings or self.settings[k] == UNPRIMED:
value = getattr(
self, self._setting_name_to_attr_name(k))
for k in self.settings_properties:
if k in self.settings and self.settings[k] != EMPTY:
settings[k] = self.settings[k]
elif k in self.required_saved_properties:
settings[k] = getattr(
self, self._setting_name_to_attr_name(k))
return settings
def clear_cached_setting(self, setting=None):
"If setting=None, clear *all* cached settings"
if setting != None:
if hasattr(self, "_%s_cached_value" % setting):
delattr(self, "_%s_cached_value" % setting)
else:
for setting in settings_properties:
self.clear_cached_setting(setting)
if libbe.TESTING == True:
import copy
class TestStorage (list):
def __init__(self):
list.__init__(self)
self.readable = True
self.writeable = True
def is_readable(self):
return self.readable
def is_writeable(self):
return self.writeable
class TestObject (SavedSettingsObject):
def load_settings(self):
self.load_count += 1
if len(self.storage) == 0:
settings = {}
else:
settings = copy.deepcopy(self.storage[-1])
self._setup_saved_settings(settings)
def save_settings(self):
settings = self._get_saved_settings()
self.storage.append(copy.deepcopy(settings))
def __init__(self):
SavedSettingsObject.__init__(self)
self.load_count = 0
self.storage = TestStorage()
class SavedSettingsObjectTests(unittest.TestCase):
def testSimplePropertyDoc(self):
"""Testing a minimal versioned property docstring"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
@versioned_property(
name="Content-type",
doc="A test property",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def content_type(): return {}
expected = "A test property\n\nThis property defaults to None."
self.failUnless(Test.content_type.__doc__ == expected,
Test.content_type.__doc__)
def testSimplePropertyFromMemory(self):
"""Testing a minimal versioned property from memory"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
@versioned_property(
name="Content-type",
doc="A test property",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def content_type(): return {}
t = Test()
self.failUnless(len(t.settings) == 0, len(t.settings))
# accessing t.content_type triggers the priming, but
# t.storage.is_readable() == False, so nothing happens.
t.storage.readable = False
self.failUnless(t.content_type == None, t.content_type)
self.failUnless(t.settings == {}, t.settings)
self.failUnless(len(t.settings) == 0, len(t.settings))
self.failUnless(t.content_type == None, t.content_type)
# accessing t.content_type triggers the priming again, and
# now that t.storage.is_readable() == True, this fills out
# t.settings with EMPTY data. At this point there should
# be one load and no saves.
t.storage.readable = True
self.failUnless(t.content_type == None, t.content_type)
self.failUnless(len(t.settings) == 1, len(t.settings))
self.failUnless(t.settings["Content-type"] == EMPTY,
t.settings["Content-type"])
self.failUnless(t.content_type == None, t.content_type)
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 0, len(t.storage))
# an explicit call to load settings forces a reload,
# but nothing else changes.
t.load_settings()
self.failUnless(len(t.settings) == 1, len(t.settings))
self.failUnless(t.settings["Content-type"] == EMPTY,
t.settings["Content-type"])
self.failUnless(t.content_type == None, t.content_type)
self.failUnless(t.load_count == 2, t.load_count)
self.failUnless(len(t.storage) == 0, len(t.storage))
# now we set a value
t.content_type = 5
self.failUnless(t.settings["Content-type"] == 5,
t.settings["Content-type"])
self.failUnless(t.load_count == 2, t.load_count)
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'Content-type':5}], t.storage)
# getting its value changes nothing
self.failUnless(t.content_type == 5, t.content_type)
self.failUnless(t.settings["Content-type"] == 5,
t.settings["Content-type"])
self.failUnless(t.load_count == 2, t.load_count)
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'Content-type':5}], t.storage)
# now we set another value
t.content_type = "text/plain"
self.failUnless(t.content_type == "text/plain", t.content_type)
self.failUnless(t.settings["Content-type"] == "text/plain",
t.settings["Content-type"])
self.failUnless(t.load_count == 2, t.load_count)
self.failUnless(len(t.storage) == 2, len(t.storage))
self.failUnless(t.storage == [{'Content-type':5},
{'Content-type':'text/plain'}],
t.storage)
# t._get_saved_settings() returns a dict of required or
# non-default values.
self.failUnless(t._get_saved_settings() == \
{"Content-type":"text/plain"},
t._get_saved_settings())
# now we clear to the post-primed value
t.content_type = EMPTY
self.failUnless(t.settings["Content-type"] == EMPTY,
t.settings["Content-type"])
self.failUnless(t.content_type == None, t.content_type)
self.failUnless(len(t.settings) == 1, len(t.settings))
self.failUnless(t.settings["Content-type"] == EMPTY,
t.settings["Content-type"])
self.failUnless(t._get_saved_settings() == {},
t._get_saved_settings())
self.failUnless(t.storage == [{'Content-type':5},
{'Content-type':'text/plain'},
{}],
t.storage)
def testSimplePropertyFromStorage(self):
"""Testing a minimal versioned property from storage"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
@versioned_property(
name="prop-a",
doc="A test property",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def prop_a(): return {}
@versioned_property(
name="prop-b",
doc="Another test property",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def prop_b(): return {}
t = Test()
t.storage.append({'prop-a':'saved'})
# setting prop-b forces a load (to check for changes),
# which also pulls in prop-a.
t.prop_b = 'new-b'
settings = {'prop-b':'new-b', 'prop-a':'saved'}
self.failUnless(t.settings == settings, t.settings)
self.failUnless(t._get_saved_settings() == settings,
t._get_saved_settings())
# test that _get_saved_settings() works even when settings
# were _not_ loaded beforehand
t = Test()
t.storage.append({'prop-a':'saved'})
settings ={'prop-a':'saved'}
self.failUnless(t.settings == {}, t.settings)
self.failUnless(t._get_saved_settings() == settings,
t._get_saved_settings())
def testSimplePropertySetStorageSave(self):
"""Set a property, then attach storage and save"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
@versioned_property(
name="prop-a",
doc="A test property",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def prop_a(): return {}
@versioned_property(
name="prop-b",
doc="Another test property",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def prop_b(): return {}
t = Test()
storage = t.storage
t.storage = None
t.prop_a = 'text/html'
t.storage = storage
t.save_settings()
self.failUnless(t.prop_a == 'text/html', t.prop_a)
self.failUnless(t.settings == {'prop-a':'text/html',
'prop-b':EMPTY},
t.settings)
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'prop-a':'text/html'}],
t.storage)
def testDefaultingProperty(self):
"""Testing a defaulting versioned property"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
@versioned_property(
name="Content-type",
doc="A test property",
default="text/plain",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def content_type(): return {}
t = Test()
self.failUnless(t.settings == {}, t.settings)
self.failUnless(t.content_type == "text/plain", t.content_type)
self.failUnless(t.settings == {"Content-type":EMPTY},
t.settings)
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 0, len(t.storage))
self.failUnless(t._get_saved_settings() == {},
t._get_saved_settings())
t.content_type = "text/html"
self.failUnless(t.content_type == "text/html",
t.content_type)
self.failUnless(t.settings == {"Content-type":"text/html"},
t.settings)
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'Content-type':'text/html'}],
t.storage)
self.failUnless(t._get_saved_settings() == \
{"Content-type":"text/html"},
t._get_saved_settings())
def testRequiredDefaultingProperty(self):
"""Testing a required defaulting versioned property"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
@versioned_property(
name="Content-type",
doc="A test property",
default="text/plain",
settings_properties=settings_properties,
required_saved_properties=required_saved_properties,
require_save=True)
def content_type(): return {}
t = Test()
self.failUnless(t.settings == {}, t.settings)
self.failUnless(t.content_type == "text/plain", t.content_type)
self.failUnless(t.settings == {"Content-type":EMPTY},
t.settings)
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 0, len(t.storage))
self.failUnless(t._get_saved_settings() == \
{"Content-type":"text/plain"},
t._get_saved_settings())
t.content_type = "text/html"
self.failUnless(t.content_type == "text/html",
t.content_type)
self.failUnless(t.settings == {"Content-type":"text/html"},
t.settings)
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'Content-type':'text/html'}],
t.storage)
self.failUnless(t._get_saved_settings() == \
{"Content-type":"text/html"},
t._get_saved_settings())
def testClassVersionedPropertyDefinition(self):
"""Testing a class-specific _versioned property decorator"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
def _versioned_property(
settings_properties=settings_properties,
required_saved_properties=required_saved_properties,
**kwargs):
if "settings_properties" not in kwargs:
kwargs["settings_properties"] = settings_properties
if "required_saved_properties" not in kwargs:
kwargs["required_saved_properties"] = \
required_saved_properties
return versioned_property(**kwargs)
@_versioned_property(name="Content-type",
doc="A test property",
default="text/plain",
require_save=True)
def content_type(): return {}
t = Test()
self.failUnless(t._get_saved_settings() == \
{"Content-type":"text/plain"},
t._get_saved_settings())
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 0, len(t.storage))
t.content_type = "text/html"
self.failUnless(t._get_saved_settings() == \
{"Content-type":"text/html"},
t._get_saved_settings())
self.failUnless(t.load_count == 1, t.load_count)
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'Content-type':'text/html'}],
t.storage)
def testMutableChangeHookedProperty(self):
"""Testing a mutable change-hooked property"""
class Test (TestObject):
settings_properties = []
required_saved_properties = []
@versioned_property(
name="List-type",
doc="A test property",
mutable=True,
change_hook=prop_save_settings,
settings_properties=settings_properties,
required_saved_properties=required_saved_properties)
def list_type(): return {}
t = Test()
self.failUnless(len(t.storage) == 0, len(t.storage))
self.failUnless(t.list_type == None, t.list_type)
self.failUnless(len(t.storage) == 0, len(t.storage))
self.failUnless(t.settings["List-type"]==EMPTY,
t.settings["List-type"])
t.list_type = []
self.failUnless(t.settings["List-type"] == [],
t.settings["List-type"])
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'List-type':[]}],
t.storage)
t.list_type.append(5) # external modification not detected yet
self.failUnless(len(t.storage) == 1, len(t.storage))
self.failUnless(t.storage == [{'List-type':[]}],
t.storage)
self.failUnless(t.settings["List-type"] == [5],
t.settings["List-type"])
self.failUnless(t.list_type == [5], t.list_type)# get triggers save
self.failUnless(len(t.storage) == 2, len(t.storage))
self.failUnless(t.storage == [{'List-type':[]},
{'List-type':[5]}],
t.storage)
unitsuite = unittest.TestLoader().loadTestsFromTestCase( \
SavedSettingsObjectTests)
suite = unittest.TestSuite([unitsuite, doctest.DocTestSuite()])
|
"""Various tools used by MIME-reading or MIME-writing programs."""
import os
import rfc822
import tempfile
__all__ = ["Message","choose_boundary","encode","decode","copyliteral",
"copybinary"]
class Message(rfc822.Message):
"""A derived class of rfc822.Message that knows about MIME headers and
contains some hooks for decoding encoded and multipart messages."""
def __init__(self, fp, seekable = 1):
rfc822.Message.__init__(self, fp, seekable)
self.encodingheader = \
self.getheader('content-transfer-encoding')
self.typeheader = \
self.getheader('content-type')
self.parsetype()
self.parseplist()
def parsetype(self):
str = self.typeheader
if str is None:
str = 'text/plain'
if ';' in str:
i = str.index(';')
self.plisttext = str[i:]
str = str[:i]
else:
self.plisttext = ''
fields = str.split('/')
for i in range(len(fields)):
fields[i] = fields[i].strip().lower()
self.type = '/'.join(fields)
self.maintype = fields[0]
self.subtype = '/'.join(fields[1:])
def parseplist(self):
str = self.plisttext
self.plist = []
while str[:1] == ';':
str = str[1:]
if ';' in str:
# XXX Should parse quotes!
end = str.index(';')
else:
end = len(str)
f = str[:end]
if '=' in f:
i = f.index('=')
f = f[:i].strip().lower() + \
'=' + f[i+1:].strip()
self.plist.append(f.strip())
str = str[end:]
def getplist(self):
return self.plist
def getparam(self, name):
name = name.lower() + '='
n = len(name)
for p in self.plist:
if p[:n] == name:
return rfc822.unquote(p[n:])
return None
def getparamnames(self):
result = []
for p in self.plist:
i = p.find('=')
if i >= 0:
result.append(p[:i].lower())
return result
def getencoding(self):
if self.encodingheader is None:
return '7bit'
return self.encodingheader.lower()
def gettype(self):
return self.type
def getmaintype(self):
return self.maintype
def getsubtype(self):
return self.subtype
_prefix = None
def choose_boundary():
"""Return a random string usable as a multipart boundary.
The method used is so that it is *very* unlikely that the same
string of characters will every occur again in the Universe,
so the caller needn't check the data it is packing for the
occurrence of the boundary.
The boundary contains dots so you have to quote it in the header."""
global _prefix
import time
import random
if _prefix is None:
import socket
import os
hostid = socket.gethostbyname(socket.gethostname())
try:
uid = `os.getuid()`
except:
uid = '1'
try:
pid = `os.getpid()`
except:
pid = '1'
_prefix = hostid + '.' + uid + '.' + pid
timestamp = '%.3f' % time.time()
seed = `random.randint(0, 32767)`
return _prefix + '.' + timestamp + '.' + seed
def decode(input, output, encoding):
"""Decode common content-transfer-encodings (base64, quopri, uuencode)."""
if encoding == 'base64':
import base64
return base64.decode(input, output)
if encoding == 'quoted-printable':
import quopri
return quopri.decode(input, output)
if encoding in ('uuencode', 'x-uuencode', 'uue', 'x-uue'):
import uu
return uu.decode(input, output)
if encoding in ('7bit', '8bit'):
return output.write(input.read())
if decodetab.has_key(encoding):
pipethrough(input, decodetab[encoding], output)
else:
raise ValueError, \
'unknown Content-Transfer-Encoding: %s' % encoding
def encode(input, output, encoding):
"""Encode common content-transfer-encodings (base64, quopri, uuencode)."""
if encoding == 'base64':
import base64
return base64.encode(input, output)
if encoding == 'quoted-printable':
import quopri
return quopri.encode(input, output, 0)
if encoding in ('uuencode', 'x-uuencode', 'uue', 'x-uue'):
import uu
return uu.encode(input, output)
if encoding in ('7bit', '8bit'):
return output.write(input.read())
if encodetab.has_key(encoding):
pipethrough(input, encodetab[encoding], output)
else:
raise ValueError, \
'unknown Content-Transfer-Encoding: %s' % encoding
uudecode_pipe = '''(
TEMP=/tmp/@uu.$$
sed "s%^begin [0-7][0-7]* .*%begin 600 $TEMP%" | uudecode
cat $TEMP
rm $TEMP
)'''
decodetab = {
'uuencode': uudecode_pipe,
'x-uuencode': uudecode_pipe,
'uue': uudecode_pipe,
'x-uue': uudecode_pipe,
'quoted-printable': 'mmencode -u -q',
'base64': 'mmencode -u -b',
}
encodetab = {
'x-uuencode': 'uuencode tempfile',
'uuencode': 'uuencode tempfile',
'x-uue': 'uuencode tempfile',
'uue': 'uuencode tempfile',
'quoted-printable': 'mmencode -q',
'base64': 'mmencode -b',
}
def pipeto(input, command):
pipe = os.popen(command, 'w')
copyliteral(input, pipe)
pipe.close()
def pipethrough(input, command, output):
tempname = tempfile.mktemp()
temp = open(tempname, 'w')
copyliteral(input, temp)
temp.close()
pipe = os.popen(command + ' <' + tempname, 'r')
copybinary(pipe, output)
pipe.close()
os.unlink(tempname)
def copyliteral(input, output):
while 1:
line = input.readline()
if not line: break
output.write(line)
def copybinary(input, output):
BUFSIZE = 8192
while 1:
line = input.read(BUFSIZE)
if not line: break
output.write(line)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.