repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
40223148/finaltest | static/Brython3.1.1-20150328-091302/Lib/_thread.py | 740 | 4879 | """Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
# Brython-specific to avoid circular references between threading and _threading_local
class _local:
pass | gpl-3.0 |
serensoner/CouchPotatoServer | libs/suds/builder.py | 197 | 4220 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{builder} module provides an wsdl/xsd defined types factory
"""
from logging import getLogger
from suds import *
from suds.sudsobject import Factory
log = getLogger(__name__)
class Builder:
""" Builder used to construct an object for types defined in the schema """
def __init__(self, resolver):
"""
@param resolver: A schema object name resolver.
@type resolver: L{resolver.Resolver}
"""
self.resolver = resolver
def build(self, name):
""" build a an object for the specified typename as defined in the schema """
if isinstance(name, basestring):
type = self.resolver.find(name)
if type is None:
raise TypeNotFound(name)
else:
type = name
cls = type.name
if type.mixed():
data = Factory.property(cls)
else:
data = Factory.object(cls)
resolved = type.resolve()
md = data.__metadata__
md.sxtype = resolved
md.ordering = self.ordering(resolved)
history = []
self.add_attributes(data, resolved)
for child, ancestry in type.children():
if self.skip_child(child, ancestry):
continue
self.process(data, child, history[:])
return data
def process(self, data, type, history):
""" process the specified type then process its children """
if type in history:
return
if type.enum():
return
history.append(type)
resolved = type.resolve()
value = None
if type.unbounded():
value = []
else:
if len(resolved) > 0:
if resolved.mixed():
value = Factory.property(resolved.name)
md = value.__metadata__
md.sxtype = resolved
else:
value = Factory.object(resolved.name)
md = value.__metadata__
md.sxtype = resolved
md.ordering = self.ordering(resolved)
setattr(data, type.name, value)
if value is not None:
data = value
if not isinstance(data, list):
self.add_attributes(data, resolved)
for child, ancestry in resolved.children():
if self.skip_child(child, ancestry):
continue
self.process(data, child, history[:])
def add_attributes(self, data, type):
""" add required attributes """
for attr, ancestry in type.attributes():
name = '_%s' % attr.name
value = attr.get_default()
setattr(data, name, value)
def skip_child(self, child, ancestry):
""" get whether or not to skip the specified child """
if child.any(): return True
for x in ancestry:
if x.choice():
return True
return False
def ordering(self, type):
""" get the ordering """
result = []
for child, ancestry in type.resolve():
name = child.name
if child.name is None:
continue
if child.isattr():
name = '_%s' % child.name
result.append(name)
return result
| gpl-3.0 |
chaffra/sympy | sympy/integrals/tests/test_risch.py | 31 | 33684 | """Most of these tests come from the examples in Bronstein's book."""
from sympy import (Poly, I, S, Function, log, symbols, exp, tan, sqrt,
Symbol, Lambda, sin, Eq, Piecewise, factor)
from sympy.integrals.risch import (gcdex_diophantine, frac_in, as_poly_1t,
derivation, splitfactor, splitfactor_sqf, canonical_representation,
hermite_reduce, polynomial_reduce, residue_reduce, residue_reduce_to_basic,
integrate_primitive, integrate_hyperexponential_polynomial,
integrate_hyperexponential, integrate_hypertangent_polynomial,
integrate_nonlinear_no_specials, integer_powers, DifferentialExtension,
risch_integrate, DecrementLevel, NonElementaryIntegral, recognize_log_derivative,
recognize_derivative, laurent_series)
from sympy.utilities.pytest import raises
from sympy.abc import x, t, nu, z, a, y
t0, t1, t2 = symbols('t:3')
i = Symbol('i')
def test_gcdex_diophantine():
assert gcdex_diophantine(Poly(x**4 - 2*x**3 - 6*x**2 + 12*x + 15),
Poly(x**3 + x**2 - 4*x - 4), Poly(x**2 - 1)) == \
(Poly((-x**2 + 4*x - 3)/5), Poly((x**3 - 7*x**2 + 16*x - 10)/5))
def test_frac_in():
assert frac_in(Poly((x + 1)/x*t, t), x) == \
(Poly(t*x + t, x), Poly(x, x))
assert frac_in((x + 1)/x*t, x) == \
(Poly(t*x + t, x), Poly(x, x))
assert frac_in((Poly((x + 1)/x*t, t), Poly(t + 1, t)), x) == \
(Poly(t*x + t, x), Poly((1 + t)*x, x))
raises(ValueError, lambda: frac_in((x + 1)/log(x)*t, x))
assert frac_in(Poly((2 + 2*x + x*(1 + x))/(1 + x)**2, t), x, cancel=True) == \
(Poly(x + 2, x), Poly(x + 1, x))
def test_as_poly_1t():
assert as_poly_1t(2/t + t, t, z) in [
Poly(t + 2*z, t, z), Poly(t + 2*z, z, t)]
assert as_poly_1t(2/t + 3/t**2, t, z) in [
Poly(2*z + 3*z**2, t, z), Poly(2*z + 3*z**2, z, t)]
assert as_poly_1t(2/((exp(2) + 1)*t), t, z) in [
Poly(2/(exp(2) + 1)*z, t, z), Poly(2/(exp(2) + 1)*z, z, t)]
assert as_poly_1t(2/((exp(2) + 1)*t) + t, t, z) in [
Poly(t + 2/(exp(2) + 1)*z, t, z), Poly(t + 2/(exp(2) + 1)*z, z, t)]
assert as_poly_1t(S(0), t, z) == Poly(0, t, z)
def test_derivation():
p = Poly(4*x**4*t**5 + (-4*x**3 - 4*x**4)*t**4 + (-3*x**2 + 2*x**3)*t**3 +
(2*x + 7*x**2 + 2*x**3)*t**2 + (1 - 4*x - 4*x**2)*t - 1 + 2*x, t)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(-t**2 - 3/(2*x)*t + 1/(2*x), t)]})
assert derivation(p, DE) == Poly(-20*x**4*t**6 + (2*x**3 + 16*x**4)*t**5 +
(21*x**2 + 12*x**3)*t**4 + (7*x/2 - 25*x**2 - 12*x**3)*t**3 +
(-5 - 15*x/2 + 7*x**2)*t**2 - (3 - 8*x - 10*x**2 - 4*x**3)/(2*x)*t +
(1 - 4*x**2)/(2*x), t)
assert derivation(Poly(1, t), DE) == Poly(0, t)
assert derivation(Poly(t, t), DE) == DE.d
assert derivation(Poly(t**2 + 1/x*t + (1 - 2*x)/(4*x**2), t), DE) == \
Poly(-2*t**3 - 4/x*t**2 - (5 - 2*x)/(2*x**2)*t - (1 - 2*x)/(2*x**3), t, domain='ZZ(x)')
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t1), Poly(t, t)]})
assert derivation(Poly(x*t*t1, t), DE) == Poly(t*t1 + x*t*t1 + t, t)
assert derivation(Poly(x*t*t1, t), DE, coefficientD=True) == \
Poly((1 + t1)*t, t)
DE = DifferentialExtension(extension={'D': [Poly(1, x)]})
assert derivation(Poly(x, x), DE) == Poly(1, x)
# Test basic option
assert derivation((x + 1)/(x - 1), DE, basic=True) == -2/(1 - 2*x + x**2)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t, t)]})
assert derivation((t + 1)/(t - 1), DE, basic=True) == -2*t/(1 - 2*t + t**2)
assert derivation(t + 1, DE, basic=True) == t
def test_splitfactor():
p = Poly(4*x**4*t**5 + (-4*x**3 - 4*x**4)*t**4 + (-3*x**2 + 2*x**3)*t**3 +
(2*x + 7*x**2 + 2*x**3)*t**2 + (1 - 4*x - 4*x**2)*t - 1 + 2*x, t, field=True)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(-t**2 - 3/(2*x)*t + 1/(2*x), t)]})
assert splitfactor(p, DE) == (Poly(4*x**4*t**3 + (-8*x**3 - 4*x**4)*t**2 +
(4*x**2 + 8*x**3)*t - 4*x**2, t), Poly(t**2 + 1/x*t + (1 - 2*x)/(4*x**2), t, domain='ZZ(x)'))
assert splitfactor(Poly(x, t), DE) == (Poly(x, t), Poly(1, t))
r = Poly(-4*x**4*z**2 + 4*x**6*z**2 - z*x**3 - 4*x**5*z**3 + 4*x**3*z**3 + x**4 + z*x**5 - x**6, t)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t)]})
assert splitfactor(r, DE, coefficientD=True) == \
(Poly(x*z - x**2 - z*x**3 + x**4, t), Poly(-x**2 + 4*x**2*z**2, t))
assert splitfactor_sqf(r, DE, coefficientD=True) == \
(((Poly(x*z - x**2 - z*x**3 + x**4, t), 1),), ((Poly(-x**2 + 4*x**2*z**2, t), 1),))
assert splitfactor(Poly(0, t), DE) == (Poly(0, t), Poly(1, t))
assert splitfactor_sqf(Poly(0, t), DE) == (((Poly(0, t), 1),), ())
def test_canonical_representation():
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1 + t**2, t)]})
assert canonical_representation(Poly(x - t, t), Poly(t**2, t), DE) == \
(Poly(0, t), (Poly(0, t),
Poly(1, t)), (Poly(-t + x, t),
Poly(t**2, t)))
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t**2 + 1, t)]})
assert canonical_representation(Poly(t**5 + t**3 + x**2*t + 1, t),
Poly((t**2 + 1)**3, t), DE) == \
(Poly(0, t), (Poly(t**5 + t**3 + x**2*t + 1, t),
Poly(t**6 + 3*t**4 + 3*t**2 + 1, t)), (Poly(0, t), Poly(1, t)))
def test_hermite_reduce():
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t**2 + 1, t)]})
assert hermite_reduce(Poly(x - t, t), Poly(t**2, t), DE) == \
((Poly(-x, t), Poly(t, t)), (Poly(0, t), Poly(1, t)), (Poly(-x, t), Poly(1, t)))
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(-t**2 - t/x - (1 - nu**2/x**2), t)]})
assert hermite_reduce(
Poly(x**2*t**5 + x*t**4 - nu**2*t**3 - x*(x**2 + 1)*t**2 - (x**2 - nu**2)*t - x**5/4, t),
Poly(x**2*t**4 + x**2*(x**2 + 2)*t**2 + x**2 + x**4 + x**6/4, t), DE) == \
((Poly(-x**2 - 4, t), Poly(4*t**2 + 2*x**2 + 4, t)),
(Poly((-2*nu**2 - x**4)*t - (2*x**3 + 2*x), t), Poly(2*x**2*t**2 + x**4 + 2*x**2, t)),
(Poly(x*t + 1, t), Poly(x, t)))
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t)]})
a = Poly((-2 + 3*x)*t**3 + (-1 + x)*t**2 + (-4*x + 2*x**2)*t + x**2, t)
d = Poly(x*t**6 - 4*x**2*t**5 + 6*x**3*t**4 - 4*x**4*t**3 + x**5*t**2, t)
assert hermite_reduce(a, d, DE) == \
((Poly(3*t**2 + t + 3*x, t), Poly(3*t**4 - 9*x*t**3 + 9*x**2*t**2 - 3*x**3*t, t)),
(Poly(0, t), Poly(1, t)),
(Poly(0, t), Poly(1, t)))
assert hermite_reduce(
Poly(-t**2 + 2*t + 2, t),
Poly(-x*t**2 + 2*x*t - x, t), DE) == \
((Poly(3, t), Poly(t - 1, t)),
(Poly(0, t), Poly(1, t)),
(Poly(1, t), Poly(x, t)))
assert hermite_reduce(
Poly(-x**2*t**6 + (-1 - 2*x**3 + x**4)*t**3 + (-3 - 3*x**4)*t**2 - 2*x*t - x - 3*x**2, t),
Poly(x**4*t**6 - 2*x**2*t**3 + 1, t), DE) == \
((Poly(x**3*t + x**4 + 1, t), Poly(x**3*t**3 - x, t)),
(Poly(0, t), Poly(1, t)),
(Poly(-1, t), Poly(x**2, t)))
assert hermite_reduce(
Poly((-2 + 3*x)*t**3 + (-1 + x)*t**2 + (-4*x + 2*x**2)*t + x**2, t),
Poly(x*t**6 - 4*x**2*t**5 + 6*x**3*t**4 - 4*x**4*t**3 + x**5*t**2, t), DE) == \
((Poly(3*t**2 + t + 3*x, t), Poly(3*t**4 - 9*x*t**3 + 9*x**2*t**2 - 3*x**3*t, t)),
(Poly(0, t), Poly(1, t)),
(Poly(0, t), Poly(1, t)))
def test_polynomial_reduce():
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1 + t**2, t)]})
assert polynomial_reduce(Poly(1 + x*t + t**2, t), DE) == \
(Poly(t, t), Poly(x*t, t))
assert polynomial_reduce(Poly(0, t), DE) == \
(Poly(0, t), Poly(0, t))
def test_laurent_series():
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1, t)]})
a = Poly(36, t)
d = Poly((t - 2)*(t**2 - 1)**2, t)
F = Poly(t**2 - 1, t)
n = 2
assert laurent_series(a, d, F, n, DE) == \
(Poly(-3*t**3 + 3*t**2 - 6*t - 8, t), Poly(t**5 + t**4 - 2*t**3 - 2*t**2 + t + 1, t),
[Poly(-3*t**3 - 6*t**2, t), Poly(2*t**6 + 6*t**5 - 8*t**3, t)])
def test_recognize_derivative():
DE = DifferentialExtension(extension={'D': [Poly(1, t)]})
a = Poly(36, t)
d = Poly((t - 2)*(t**2 - 1)**2, t)
assert recognize_derivative(a, d, DE) == False
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t)]})
a = Poly(2, t)
d = Poly(t**2 - 1, t)
assert recognize_derivative(a, d, DE) == False
assert recognize_derivative(Poly(x*t, t), Poly(1, t), DE) == True
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t**2 + 1, t)]})
assert recognize_derivative(Poly(t, t), Poly(1, t), DE) == True
def test_recognize_log_derivative():
a = Poly(2*x**2 + 4*x*t - 2*t - x**2*t, t)
d = Poly((2*x + t)*(t + x**2), t)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t, t)]})
assert recognize_log_derivative(a, d, DE, z) == True
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t)]})
assert recognize_log_derivative(Poly(t + 1, t), Poly(t + x, t), DE) == True
assert recognize_log_derivative(Poly(2, t), Poly(t**2 - 1, t), DE) == True
DE = DifferentialExtension(extension={'D': [Poly(1, x)]})
assert recognize_log_derivative(Poly(1, x), Poly(x**2 - 2, x), DE) == False
assert recognize_log_derivative(Poly(1, x), Poly(x**2 + x, x), DE) == True
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t**2 + 1, t)]})
assert recognize_log_derivative(Poly(1, t), Poly(t**2 - 2, t), DE) == False
assert recognize_log_derivative(Poly(1, t), Poly(t**2 + t, t), DE) == False
def test_residue_reduce():
a = Poly(2*t**2 - t - x**2, t)
d = Poly(t**3 - x**2*t, t)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t)], 'Tfuncs': [log]})
assert residue_reduce(a, d, DE, z, invert=False) == \
([(Poly(z**2 - S(1)/4, z), Poly((1 + 3*x*z - 6*z**2 -
2*x**2 + 4*x**2*z**2)*t - x*z + x**2 + 2*x**2*z**2 - 2*z*x**3, t))], False)
assert residue_reduce(a, d, DE, z, invert=True) == \
([(Poly(z**2 - S(1)/4, z), Poly(t + 2*x*z, t))], False)
assert residue_reduce(Poly(-2/x, t), Poly(t**2 - 1, t,), DE, z, invert=False) == \
([(Poly(z**2 - 1, z), Poly(-2*z*t/x - 2/x, t))], True)
ans = residue_reduce(Poly(-2/x, t), Poly(t**2 - 1, t), DE, z, invert=True)
assert ans == ([(Poly(z**2 - 1, z), Poly(t + z, t))], True)
assert residue_reduce_to_basic(ans[0], DE, z) == -log(-1 + log(x)) + log(1 + log(x))
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(-t**2 - t/x - (1 - nu**2/x**2), t)]})
# TODO: Skip or make faster
assert residue_reduce(Poly((-2*nu**2 - x**4)/(2*x**2)*t - (1 + x**2)/x, t),
Poly(t**2 + 1 + x**2/2, t), DE, z) == \
([(Poly(z + S(1)/2, z, domain='QQ'), Poly(t**2 + 1 + x**2/2, t, domain='EX'))], True)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1 + t**2, t)]})
assert residue_reduce(Poly(-2*x*t + 1 - x**2, t),
Poly(t**2 + 2*x*t + 1 + x**2, t), DE, z) == \
([(Poly(z**2 + S(1)/4, z), Poly(t + x + 2*z, t))], True)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t, t)]})
assert residue_reduce(Poly(t, t), Poly(t + sqrt(2), t), DE, z) == \
([(Poly(z - 1, z), Poly(t + sqrt(2), t))], True)
def test_integrate_hyperexponential():
# TODO: Add tests for integrate_hyperexponential() from the book
a = Poly((1 + 2*t1 + t1**2 + 2*t1**3)*t**2 + (1 + t1**2)*t + 1 + t1**2, t)
d = Poly(1, t)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1 + t1**2, t1),
Poly(t*(1 + t1**2), t)], 'Tfuncs': [tan, Lambda(i, exp(tan(i)))]})
assert integrate_hyperexponential(a, d, DE) == \
(exp(2*tan(x))*tan(x) + exp(tan(x)), 1 + t1**2, True)
a = Poly((t1**3 + (x + 1)*t1**2 + t1 + x + 2)*t, t)
assert integrate_hyperexponential(a, d, DE) == \
((x + tan(x))*exp(tan(x)), 0, True)
a = Poly(t, t)
d = Poly(1, t)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(2*x*t, t)],
'Tfuncs': [Lambda(i, exp(x**2))]})
assert integrate_hyperexponential(a, d, DE) == \
(0, NonElementaryIntegral(exp(x**2), x), False)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t, t)], 'Tfuncs': [exp]})
assert integrate_hyperexponential(a, d, DE) == (exp(x), 0, True)
a = Poly(25*t**6 - 10*t**5 + 7*t**4 - 8*t**3 + 13*t**2 + 2*t - 1, t)
d = Poly(25*t**6 + 35*t**4 + 11*t**2 + 1, t)
assert integrate_hyperexponential(a, d, DE) == \
(-(11 - 10*exp(x))/(5 + 25*exp(2*x)) + log(1 + exp(2*x)), -1, True)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t0, t0), Poly(t0*t, t)],
'Tfuncs': [exp, Lambda(i, exp(exp(i)))]})
assert integrate_hyperexponential(Poly(2*t0*t**2, t), Poly(1, t), DE) == (exp(2*exp(x)), 0, True)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t0, t0), Poly(-t0*t, t)],
'Tfuncs': [exp, Lambda(i, exp(-exp(i)))]})
assert integrate_hyperexponential(Poly(-27*exp(9) - 162*t0*exp(9) +
27*x*t0*exp(9), t), Poly((36*exp(18) + x**2*exp(18) - 12*x*exp(18))*t, t), DE) == \
(27*exp(exp(x))/(-6*exp(9) + x*exp(9)), 0, True)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t, t)], 'Tfuncs': [exp]})
assert integrate_hyperexponential(Poly(x**2/2*t, t), Poly(1, t), DE) == \
((2 - 2*x + x**2)*exp(x)/2, 0, True)
assert integrate_hyperexponential(Poly(1 + t, t), Poly(t, t), DE) == \
(-exp(-x), 1, True) # x - exp(-x)
assert integrate_hyperexponential(Poly(x, t), Poly(t + 1, t), DE) == \
(0, NonElementaryIntegral(x/(1 + exp(x)), x), False)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t0), Poly(2*x*t1, t1)],
'Tfuncs': [log, Lambda(i, exp(i**2))]})
elem, nonelem, b = integrate_hyperexponential(Poly((8*x**7 - 12*x**5 + 6*x**3 - x)*t1**4 +
(8*t0*x**7 - 8*t0*x**6 - 4*t0*x**5 + 2*t0*x**3 + 2*t0*x**2 - t0*x +
24*x**8 - 36*x**6 - 4*x**5 + 22*x**4 + 4*x**3 - 7*x**2 - x + 1)*t1**3
+ (8*t0*x**8 - 4*t0*x**6 - 16*t0*x**5 - 2*t0*x**4 + 12*t0*x**3 +
t0*x**2 - 2*t0*x + 24*x**9 - 36*x**7 - 8*x**6 + 22*x**5 + 12*x**4 -
7*x**3 - 6*x**2 + x + 1)*t1**2 + (8*t0*x**8 - 8*t0*x**6 - 16*t0*x**5 +
6*t0*x**4 + 10*t0*x**3 - 2*t0*x**2 - t0*x + 8*x**10 - 12*x**8 - 4*x**7
+ 2*x**6 + 12*x**5 + 3*x**4 - 9*x**3 - x**2 + 2*x)*t1 + 8*t0*x**7 -
12*t0*x**6 - 4*t0*x**5 + 8*t0*x**4 - t0*x**2 - 4*x**7 + 4*x**6 +
4*x**5 - 4*x**4 - x**3 + x**2, t1), Poly((8*x**7 - 12*x**5 + 6*x**3 -
x)*t1**4 + (24*x**8 + 8*x**7 - 36*x**6 - 12*x**5 + 18*x**4 + 6*x**3 -
3*x**2 - x)*t1**3 + (24*x**9 + 24*x**8 - 36*x**7 - 36*x**6 + 18*x**5 +
18*x**4 - 3*x**3 - 3*x**2)*t1**2 + (8*x**10 + 24*x**9 - 12*x**8 -
36*x**7 + 6*x**6 + 18*x**5 - x**4 - 3*x**3)*t1 + 8*x**10 - 12*x**8 +
6*x**6 - x**4, t1), DE)
assert factor(elem) == -((x - 1)*log(x)/((x + exp(x**2))*(2*x**2 - 1)))
assert (nonelem, b) == (NonElementaryIntegral(exp(x**2)/(exp(x**2) + 1), x), False)
def test_integrate_hyperexponential_polynomial():
# Without proper cancellation within integrate_hyperexponential_polynomial(),
# this will take a long time to complete, and will return a complicated
# expression
p = Poly((-28*x**11*t0 - 6*x**8*t0 + 6*x**9*t0 - 15*x**8*t0**2 +
15*x**7*t0**2 + 84*x**10*t0**2 - 140*x**9*t0**3 - 20*x**6*t0**3 +
20*x**7*t0**3 - 15*x**6*t0**4 + 15*x**5*t0**4 + 140*x**8*t0**4 -
84*x**7*t0**5 - 6*x**4*t0**5 + 6*x**5*t0**5 + x**3*t0**6 - x**4*t0**6 +
28*x**6*t0**6 - 4*x**5*t0**7 + x**9 - x**10 + 4*x**12)/(-8*x**11*t0 +
28*x**10*t0**2 - 56*x**9*t0**3 + 70*x**8*t0**4 - 56*x**7*t0**5 +
28*x**6*t0**6 - 8*x**5*t0**7 + x**4*t0**8 + x**12)*t1**2 +
(-28*x**11*t0 - 12*x**8*t0 + 12*x**9*t0 - 30*x**8*t0**2 +
30*x**7*t0**2 + 84*x**10*t0**2 - 140*x**9*t0**3 - 40*x**6*t0**3 +
40*x**7*t0**3 - 30*x**6*t0**4 + 30*x**5*t0**4 + 140*x**8*t0**4 -
84*x**7*t0**5 - 12*x**4*t0**5 + 12*x**5*t0**5 - 2*x**4*t0**6 +
2*x**3*t0**6 + 28*x**6*t0**6 - 4*x**5*t0**7 + 2*x**9 - 2*x**10 +
4*x**12)/(-8*x**11*t0 + 28*x**10*t0**2 - 56*x**9*t0**3 +
70*x**8*t0**4 - 56*x**7*t0**5 + 28*x**6*t0**6 - 8*x**5*t0**7 +
x**4*t0**8 + x**12)*t1 + (-2*x**2*t0 + 2*x**3*t0 + x*t0**2 -
x**2*t0**2 + x**3 - x**4)/(-4*x**5*t0 + 6*x**4*t0**2 - 4*x**3*t0**3 +
x**2*t0**4 + x**6), t1, z, expand=False)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t0), Poly(2*x*t1, t1)]})
assert integrate_hyperexponential_polynomial(p, DE, z) == (
Poly((x - t0)*t1**2 + (-2*t0 + 2*x)*t1, t1), Poly(-2*x*t0 + x**2 +
t0**2, t1), True)
DE = DifferentialExtension(extension={'D':[Poly(1, x), Poly(t0, t0)]})
assert integrate_hyperexponential_polynomial(Poly(0, t0), DE, z) == (
Poly(0, t0), Poly(1, t0), True)
def test_integrate_hyperexponential_returns_piecewise():
a, b = symbols('a b')
DE = DifferentialExtension(a**x, x)
assert integrate_hyperexponential(DE.fa, DE.fd, DE) == (Piecewise(
(x, Eq(log(a), 0)), (exp(x*log(a))/log(a), True)), 0, True)
DE = DifferentialExtension(a**(b*x), x)
assert integrate_hyperexponential(DE.fa, DE.fd, DE) == (Piecewise(
(x, Eq(b*log(a), 0)), (exp(b*x*log(a))/(b*log(a)), True)), 0, True)
DE = DifferentialExtension(exp(a*x), x)
assert integrate_hyperexponential(DE.fa, DE.fd, DE) == (Piecewise(
(x, Eq(a, 0)), (exp(a*x)/a, True)), 0, True)
DE = DifferentialExtension(x*exp(a*x), x)
assert integrate_hyperexponential(DE.fa, DE.fd, DE) == (Piecewise(
(x**2/2, Eq(a**3, 0)), ((x*a**2 - a)*exp(a*x)/a**3, True)), 0, True)
DE = DifferentialExtension(x**2*exp(a*x), x)
assert integrate_hyperexponential(DE.fa, DE.fd, DE) == (Piecewise(
(x**3/3, Eq(a**6, 0)),
((x**2*a**5 - 2*x*a**4 + 2*a**3)*exp(a*x)/a**6, True)), 0, True)
DE = DifferentialExtension(x**y + z, y)
assert integrate_hyperexponential(DE.fa, DE.fd, DE) == (Piecewise((y,
Eq(log(x), 0)), (exp(log(x)*y)/log(x), True)), z, True)
DE = DifferentialExtension(x**y + z + x**(2*y), y)
assert integrate_hyperexponential(DE.fa, DE.fd, DE) == (Piecewise((2*y,
Eq(2*log(x)**2, 0)), ((exp(2*log(x)*y)*log(x) +
2*exp(log(x)*y)*log(x))/(2*log(x)**2), True)), z, True)
# TODO: Add a test where two different parts of the extension use a
# Piecewise, like y**x + z**x.
def test_integrate_primitive():
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t)],
'Tfuncs': [log]})
assert integrate_primitive(Poly(t, t), Poly(1, t), DE) == (x*log(x), -1, True)
assert integrate_primitive(Poly(x, t), Poly(t, t), DE) == (0, NonElementaryIntegral(x/log(x), x), False)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t1), Poly(1/(x + 1), t2)],
'Tfuncs': [log, Lambda(i, log(i + 1))]})
assert integrate_primitive(Poly(t1, t2), Poly(t2, t2), DE) == \
(0, NonElementaryIntegral(log(x)/log(1 + x), x), False)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t1), Poly(1/(x*t1), t2)],
'Tfuncs': [log, Lambda(i, log(log(i)))]})
assert integrate_primitive(Poly(t2, t2), Poly(t1, t2), DE) == \
(0, NonElementaryIntegral(log(log(x))/log(x), x), False)
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(1/x, t0)],
'Tfuncs': [log]})
assert integrate_primitive(Poly(x**2*t0**3 + (3*x**2 + x)*t0**2 + (3*x**2
+ 2*x)*t0 + x**2 + x, t0), Poly(x**2*t0**4 + 4*x**2*t0**3 + 6*x**2*t0**2 +
4*x**2*t0 + x**2, t0), DE) == \
(-1/(log(x) + 1), NonElementaryIntegral(1/(log(x) + 1), x), False)
def test_integrate_hypertangent_polynomial():
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t**2 + 1, t)]})
assert integrate_hypertangent_polynomial(Poly(t**2 + x*t + 1, t), DE) == \
(Poly(t, t), Poly(x/2, t))
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(a*(t**2 + 1), t)]})
assert integrate_hypertangent_polynomial(Poly(t**5, t), DE) == \
(Poly(1/(4*a)*t**4 - 1/(2*a)*t**2, t), Poly(1/(2*a), t))
def test_integrate_nonlinear_no_specials():
a, d, = Poly(x**2*t**5 + x*t**4 - nu**2*t**3 - x*(x**2 + 1)*t**2 - (x**2 -
nu**2)*t - x**5/4, t), Poly(x**2*t**4 + x**2*(x**2 + 2)*t**2 + x**2 + x**4 + x**6/4, t)
# f(x) == phi_nu(x), the logarithmic derivative of J_v, the Bessel function,
# which has no specials (see Chapter 5, note 4 of Bronstein's book).
f = Function('phi_nu')
DE = DifferentialExtension(extension={'D': [Poly(1, x),
Poly(-t**2 - t/x - (1 - nu**2/x**2), t)], 'Tfuncs': [f]})
assert integrate_nonlinear_no_specials(a, d, DE) == \
(-log(1 + f(x)**2 + x**2/2)/2 - (4 + x**2)/(4 + 2*x**2 + 4*f(x)**2), True)
assert integrate_nonlinear_no_specials(Poly(t, t), Poly(1, t), DE) == \
(0, False)
def test_integer_powers():
assert integer_powers([x, x/2, x**2 + 1, 2*x/3]) == [
(x/6, [(x, 6), (x/2, 3), (2*x/3, 4)]),
(1 + x**2, [(1 + x**2, 1)])]
def test_DifferentialExtension_exp():
assert DifferentialExtension(exp(x) + exp(x**2), x, dummy=False)._important_attrs == \
(Poly(t1 + t0, t1), Poly(1, t1), [Poly(1, x,), Poly(t0, t0),
Poly(2*x*t1, t1)], [x, t0, t1], [Lambda(i, exp(i)),
Lambda(i, exp(i**2))], [], [1, 2], [x, x**2], [], [])
assert DifferentialExtension(exp(x) + exp(2*x), x, dummy=False)._important_attrs == \
(Poly(t0**2 + t0, t0), Poly(1, t0), [Poly(1, x), Poly(t0, t0)], [x, t0],
[Lambda(i, exp(i))], [], [1], [x], [], [])
assert DifferentialExtension(exp(x) + exp(x/2), x, dummy=False)._important_attrs == \
(Poly(t0**2 + t0, t0), Poly(1, t0), [Poly(1, x), Poly(t0/2, t0)],
[x, t0], [Lambda(i, exp(i/2))], [], [1], [x/2], [], [])
assert DifferentialExtension(exp(x) + exp(x**2) + exp(x + x**2), x,
dummy=False)._important_attrs == \
(Poly((1 + t0)*t1 + t0, t1), Poly(1, t1), [Poly(1, x), Poly(t0, t0),
Poly(2*x*t1, t1)], [x, t0, t1], [Lambda(i, exp(i)),
Lambda(i, exp(i**2))], [], [1, 2], [x, x**2], [], [])
assert DifferentialExtension(exp(x) + exp(x**2) + exp(x + x**2 + 1), x,
dummy=False)._important_attrs == \
(Poly((1 + S.Exp1*t0)*t1 + t0, t1), Poly(1, t1), [Poly(1, x),
Poly(t0, t0), Poly(2*x*t1, t1)], [x, t0, t1], [Lambda(i, exp(i)),
Lambda(i, exp(i**2))], [], [1, 2], [x, x**2], [], [])
assert DifferentialExtension(exp(x) + exp(x**2) + exp(x/2 + x**2), x,
dummy=False)._important_attrs == \
(Poly((t0 + 1)*t1 + t0**2, t1), Poly(1, t1), [Poly(1, x),
Poly(t0/2, t0), Poly(2*x*t1, t1)], [x, t0, t1],
[Lambda(i, exp(i/2)), Lambda(i, exp(i**2))],
[(exp(x/2), sqrt(exp(x)))], [1, 2], [x/2, x**2], [], [])
assert DifferentialExtension(exp(x) + exp(x**2) + exp(x/2 + x**2 + 3), x,
dummy=False)._important_attrs == \
(Poly((t0*exp(3) + 1)*t1 + t0**2, t1), Poly(1, t1), [Poly(1, x),
Poly(t0/2, t0), Poly(2*x*t1, t1)], [x, t0, t1], [Lambda(i, exp(i/2)),
Lambda(i, exp(i**2))], [(exp(x/2), sqrt(exp(x)))], [1, 2], [x/2, x**2],
[], [])
assert DifferentialExtension(sqrt(exp(x)), x, dummy=False)._important_attrs == \
(Poly(t0, t0), Poly(1, t0), [Poly(1, x), Poly(t0/2, t0)], [x, t0],
[Lambda(i, exp(i/2))], [(exp(x/2), sqrt(exp(x)))], [1], [x/2], [], [])
assert DifferentialExtension(exp(x/2), x, dummy=False)._important_attrs == \
(Poly(t0, t0), Poly(1, t0), [Poly(1, x), Poly(t0/2, t0)], [x, t0],
[Lambda(i, exp(i/2))], [], [1], [x/2], [], [])
def test_DifferentialExtension_log():
assert DifferentialExtension(log(x)*log(x + 1)*log(2*x**2 + 2*x), x,
dummy=False)._important_attrs == \
(Poly(t0*t1**2 + (t0*log(2) + t0**2)*t1, t1), Poly(1, t1),
[Poly(1, x), Poly(1/x, t0),
Poly(1/(x + 1), t1, expand=False)], [x, t0, t1],
[Lambda(i, log(i)), Lambda(i, log(i + 1))], [], [], [],
[1, 2], [x, x + 1])
assert DifferentialExtension(x**x*log(x), x, dummy=False)._important_attrs == \
(Poly(t0*t1, t1), Poly(1, t1), [Poly(1, x), Poly(1/x, t0),
Poly((1 + t0)*t1, t1)], [x, t0, t1], [Lambda(i, log(i)),
Lambda(i, exp(t0*i))], [(exp(x*log(x)), x**x)], [2], [t0*x], [1], [x])
def test_DifferentialExtension_symlog():
# See comment on test_risch_integrate below
assert DifferentialExtension(log(x**x), x, dummy=False)._important_attrs == \
(Poly(t0*x, t1), Poly(1, t1), [Poly(1, x), Poly(1/x, t0), Poly((t0 +
1)*t1, t1)], [x, t0, t1], [Lambda(i, log(i)), Lambda(i, exp(i*t0))],
[(exp(x*log(x)), x**x)], [2], [t0*x], [1], [x])
assert DifferentialExtension(log(x**y), x, dummy=False)._important_attrs == \
(Poly(y*t0, t0), Poly(1, t0), [Poly(1, x), Poly(1/x, t0)], [x, t0],
[Lambda(i, log(i))], [(y*log(x), log(x**y))], [], [], [1], [x])
assert DifferentialExtension(log(sqrt(x)), x, dummy=False)._important_attrs == \
(Poly(t0, t0), Poly(2, t0), [Poly(1, x), Poly(1/x, t0)], [x, t0],
[Lambda(i, log(i))], [(log(x)/2, log(sqrt(x)))], [], [], [1], [x])
def test_DifferentialExtension_handle_first():
assert DifferentialExtension(exp(x)*log(x), x, handle_first='log',
dummy=False)._important_attrs == \
(Poly(t0*t1, t1), Poly(1, t1), [Poly(1, x), Poly(1/x, t0),
Poly(t1, t1)], [x, t0, t1], [Lambda(i, log(i)), Lambda(i, exp(i))],
[], [2], [x], [1], [x])
assert DifferentialExtension(exp(x)*log(x), x, handle_first='exp',
dummy=False)._important_attrs == \
(Poly(t0*t1, t1), Poly(1, t1), [Poly(1, x), Poly(t0, t0),
Poly(1/x, t1)], [x, t0, t1], [Lambda(i, exp(i)), Lambda(i, log(i))],
[], [1], [x], [2], [x])
# This one must have the log first, regardless of what we set it to
# (because the log is inside of the exponential: x**x == exp(x*log(x)))
assert DifferentialExtension(-x**x*log(x)**2 + x**x - x**x/x, x,
handle_first='exp', dummy=False)._important_attrs == \
DifferentialExtension(-x**x*log(x)**2 + x**x - x**x/x, x,
handle_first='log', dummy=False)._important_attrs == \
(Poly((-1 + x - x*t0**2)*t1, t1), Poly(x, t1),
[Poly(1, x), Poly(1/x, t0), Poly((1 + t0)*t1, t1)], [x, t0, t1],
[Lambda(i, log(i)), Lambda(i, exp(t0*i))], [(exp(x*log(x)), x**x)],
[2], [t0*x], [1], [x])
def test_DifferentialExtension_all_attrs():
# Test 'unimportant' attributes
DE = DifferentialExtension(exp(x)*log(x), x, dummy=False, handle_first='exp')
assert DE.f == exp(x)*log(x)
assert DE.newf == t0*t1
assert DE.x == x
assert DE.cases == ['base', 'exp', 'primitive']
assert DE.case == 'primitive'
assert DE.level == -1
assert DE.t == t1 == DE.T[DE.level]
assert DE.d == Poly(1/x, t1) == DE.D[DE.level]
raises(ValueError, lambda: DE.increment_level())
DE.decrement_level()
assert DE.level == -2
assert DE.t == t0 == DE.T[DE.level]
assert DE.d == Poly(t0, t0) == DE.D[DE.level]
assert DE.case == 'exp'
DE.decrement_level()
assert DE.level == -3
assert DE.t == x == DE.T[DE.level] == DE.x
assert DE.d == Poly(1, x) == DE.D[DE.level]
assert DE.case == 'base'
raises(ValueError, lambda: DE.decrement_level())
DE.increment_level()
DE.increment_level()
assert DE.level == -1
assert DE.t == t1 == DE.T[DE.level]
assert DE.d == Poly(1/x, t1) == DE.D[DE.level]
assert DE.case == 'primitive'
def test_DifferentialExtension_extension_flag():
raises(ValueError, lambda: DifferentialExtension(extension={'T': [x, t]}))
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t, t)]})
assert DE._important_attrs == (None, None, [Poly(1, x), Poly(t, t)], [x, t],
None, None, None, None, None, None)
assert DE.d == Poly(t, t)
assert DE.t == t
assert DE.level == -1
assert DE.cases == ['base', 'exp']
assert DE.x == x
assert DE.case == 'exp'
DE = DifferentialExtension(extension={'D': [Poly(1, x), Poly(t, t)],
'E_K': [1], 'E_args': [x], 'L_K': [], 'L_args': []})
assert DE._important_attrs == (None, None, [Poly(1, x), Poly(t, t)], [x, t],
None, None, [1], [x], [], [])
raises(ValueError, lambda: DifferentialExtension())
def test_DifferentialExtension_misc():
# Odd ends
assert DifferentialExtension(sin(y)*exp(x), x, dummy=False)._important_attrs == \
(Poly(sin(y)*t0, t0, domain='ZZ[sin(y)]'), Poly(1, t0, domain='ZZ'),
[Poly(1, x, domain='ZZ'), Poly(t0, t0, domain='ZZ')], [x, t0],
[Lambda(i, exp(i))], [], [1], [x], [], [])
raises(NotImplementedError, lambda: DifferentialExtension(sin(x), x))
assert DifferentialExtension(10**x, x, dummy=False)._important_attrs == \
(Poly(t0, t0), Poly(1, t0), [Poly(1, x), Poly(log(10)*t0, t0)], [x, t0],
[Lambda(i, exp(i*log(10)))], [(exp(x*log(10)), 10**x)], [1], [x*log(10)],
[], [])
assert DifferentialExtension(log(x) + log(x**2), x, dummy=False)._important_attrs in [
(Poly(3*t0, t0), Poly(2, t0), [Poly(1, x), Poly(2/x, t0)], [x, t0],
[Lambda(i, log(i**2))], [], [], [], [1], [x**2]),
(Poly(3*t0, t0), Poly(1, t0), [Poly(1, x), Poly(1/x, t0)], [x, t0],
[Lambda(i, log(i))], [], [], [], [1], [x])]
assert DifferentialExtension(S.Zero, x, dummy=False)._important_attrs == \
(Poly(0, x), Poly(1, x), [Poly(1, x)], [x], [], [], [], [], [], [])
def test_DifferentialExtension_Rothstein():
# Rothstein's integral
f = (2581284541*exp(x) + 1757211400)/(39916800*exp(3*x) +
119750400*exp(x)**2 + 119750400*exp(x) + 39916800)*exp(1/(exp(x) + 1) - 10*x)
assert DifferentialExtension(f, x, dummy=False)._important_attrs == \
(Poly((1757211400 + 2581284541*t0)*t1, t1), Poly(39916800 +
119750400*t0 + 119750400*t0**2 + 39916800*t0**3, t1),
[Poly(1, x), Poly(t0, t0), Poly(-(10 + 21*t0 + 10*t0**2)/(1 + 2*t0 +
t0**2)*t1, t1, domain='ZZ(t0)')], [x, t0, t1],
[Lambda(i, exp(i)), Lambda(i, exp(1/(t0 + 1) - 10*i))], [], [1, 2],
[x, 1/(t0 + 1) - 10*x], [], [])
class TestingException(Exception):
"""Dummy Exception class for testing."""
pass
def test_DecrementLevel():
DE = DifferentialExtension(x*log(exp(x) + 1), x, dummy=False)
assert DE.level == -1
assert DE.t == t1
assert DE.d == Poly(t0/(t0 + 1), t1)
assert DE.case == 'primitive'
with DecrementLevel(DE):
assert DE.level == -2
assert DE.t == t0
assert DE.d == Poly(t0, t0)
assert DE.case == 'exp'
with DecrementLevel(DE):
assert DE.level == -3
assert DE.t == x
assert DE.d == Poly(1, x)
assert DE.case == 'base'
assert DE.level == -2
assert DE.t == t0
assert DE.d == Poly(t0, t0)
assert DE.case == 'exp'
assert DE.level == -1
assert DE.t == t1
assert DE.d == Poly(t0/(t0 + 1), t1)
assert DE.case == 'primitive'
# Test that __exit__ is called after an exception correctly
try:
with DecrementLevel(DE):
raise TestingException
except TestingException:
pass
else:
raise AssertionError("Did not raise.")
assert DE.level == -1
assert DE.t == t1
assert DE.d == Poly(t0/(t0 + 1), t1)
assert DE.case == 'primitive'
def test_risch_integrate():
assert risch_integrate(t0*exp(x), x) == t0*exp(x)
assert risch_integrate(sin(x), x, rewrite_complex=True) == -exp(I*x)/2 - exp(-I*x)/2
# From my GSoC writeup
assert risch_integrate((1 + 2*x**2 + x**4 + 2*x**3*exp(2*x**2))/
(x**4*exp(x**2) + 2*x**2*exp(x**2) + exp(x**2)), x) == \
NonElementaryIntegral(exp(-x**2), x) + exp(x**2)/(1 + x**2)
assert risch_integrate(0, x) == 0
# These are tested here in addition to in test_DifferentialExtension above
# (symlogs) to test that backsubs works correctly. The integrals should be
# written in terms of the original logarithms in the integrands.
# XXX: Unfortunately, making backsubs work on this one is a little
# trickier, because x**x is converted to exp(x*log(x)), and so log(x**x)
# is converted to x*log(x). (x**2*log(x)).subs(x*log(x), log(x**x)) is
# smart enough, the issue is that these splits happen at different places
# in the algorithm. Maybe a heuristic is in order
assert risch_integrate(log(x**x), x) == x**2*log(x)/2 - x**2/4
assert risch_integrate(log(x**y), x) == x*log(x**y) - x*y
assert risch_integrate(log(sqrt(x)), x) == x*log(sqrt(x)) - x/2
def test_risch_integrate_float():
assert risch_integrate((-60*exp(x) - 19.2*exp(4*x))*exp(4*x), x) == -2.4*exp(8*x) - 12.0*exp(5*x)
def test_NonElementaryIntegral():
assert isinstance(risch_integrate(exp(x**2), x), NonElementaryIntegral)
assert isinstance(risch_integrate(x**x*log(x), x), NonElementaryIntegral)
# Make sure methods of Integral still give back a NonElementaryIntegral
assert isinstance(NonElementaryIntegral(x**x*t0, x).subs(t0, log(x)), NonElementaryIntegral)
def test_xtothex():
a = risch_integrate(x**x, x)
assert a == NonElementaryIntegral(x**x, x)
assert isinstance(a, NonElementaryIntegral)
| bsd-3-clause |
staticlibs/android-ndk-r9d-arm-linux-androideabi-4.8 | lib/python2.7/pdb.py | 119 | 46016 | #! /usr/bin/env python
"""A Python debugger."""
# (See pdb.doc for documentation.)
import sys
import linecache
import cmd
import bdb
from repr import Repr
import os
import re
import pprint
import traceback
class Restart(Exception):
"""Causes a debugger to be restarted for the debugged python program."""
pass
# Create a custom safe Repr instance and increase its maxstring.
# The default of 30 truncates error messages too easily.
_repr = Repr()
_repr.maxstring = 200
_saferepr = _repr.repr
__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace",
"post_mortem", "help"]
def find_function(funcname, filename):
cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname))
try:
fp = open(filename)
except IOError:
return None
# consumer of this info expects the first line to be 1
lineno = 1
answer = None
while 1:
line = fp.readline()
if line == '':
break
if cre.match(line):
answer = funcname, filename, lineno
break
lineno = lineno + 1
fp.close()
return answer
# Interaction prompt line will separate file and call info from code
# text using value of line_prefix string. A newline and arrow may
# be to your liking. You can set it once pdb is imported using the
# command "pdb.line_prefix = '\n% '".
# line_prefix = ': ' # Use this to get the old situation back
line_prefix = '\n-> ' # Probably a better default
class Pdb(bdb.Bdb, cmd.Cmd):
def __init__(self, completekey='tab', stdin=None, stdout=None, skip=None):
bdb.Bdb.__init__(self, skip=skip)
cmd.Cmd.__init__(self, completekey, stdin, stdout)
if stdout:
self.use_rawinput = 0
self.prompt = '(Pdb) '
self.aliases = {}
self.mainpyfile = ''
self._wait_for_mainpyfile = 0
# Try to load readline if it exists
try:
import readline
except ImportError:
pass
# Read $HOME/.pdbrc and ./.pdbrc
self.rcLines = []
if 'HOME' in os.environ:
envHome = os.environ['HOME']
try:
rcFile = open(os.path.join(envHome, ".pdbrc"))
except IOError:
pass
else:
for line in rcFile.readlines():
self.rcLines.append(line)
rcFile.close()
try:
rcFile = open(".pdbrc")
except IOError:
pass
else:
for line in rcFile.readlines():
self.rcLines.append(line)
rcFile.close()
self.commands = {} # associates a command list to breakpoint numbers
self.commands_doprompt = {} # for each bp num, tells if the prompt
# must be disp. after execing the cmd list
self.commands_silent = {} # for each bp num, tells if the stack trace
# must be disp. after execing the cmd list
self.commands_defining = False # True while in the process of defining
# a command list
self.commands_bnum = None # The breakpoint number for which we are
# defining a list
def reset(self):
bdb.Bdb.reset(self)
self.forget()
def forget(self):
self.lineno = None
self.stack = []
self.curindex = 0
self.curframe = None
def setup(self, f, t):
self.forget()
self.stack, self.curindex = self.get_stack(f, t)
self.curframe = self.stack[self.curindex][0]
# The f_locals dictionary is updated from the actual frame
# locals whenever the .f_locals accessor is called, so we
# cache it here to ensure that modifications are not overwritten.
self.curframe_locals = self.curframe.f_locals
self.execRcLines()
# Can be executed earlier than 'setup' if desired
def execRcLines(self):
if self.rcLines:
# Make local copy because of recursion
rcLines = self.rcLines
# executed only once
self.rcLines = []
for line in rcLines:
line = line[:-1]
if len(line) > 0 and line[0] != '#':
self.onecmd(line)
# Override Bdb methods
def user_call(self, frame, argument_list):
"""This method is called when there is the remote possibility
that we ever need to stop in this function."""
if self._wait_for_mainpyfile:
return
if self.stop_here(frame):
print >>self.stdout, '--Call--'
self.interaction(frame, None)
def user_line(self, frame):
"""This function is called when we stop or break at this line."""
if self._wait_for_mainpyfile:
if (self.mainpyfile != self.canonic(frame.f_code.co_filename)
or frame.f_lineno<= 0):
return
self._wait_for_mainpyfile = 0
if self.bp_commands(frame):
self.interaction(frame, None)
def bp_commands(self,frame):
"""Call every command that was set for the current active breakpoint
(if there is one).
Returns True if the normal interaction function must be called,
False otherwise."""
# self.currentbp is set in bdb in Bdb.break_here if a breakpoint was hit
if getattr(self, "currentbp", False) and \
self.currentbp in self.commands:
currentbp = self.currentbp
self.currentbp = 0
lastcmd_back = self.lastcmd
self.setup(frame, None)
for line in self.commands[currentbp]:
self.onecmd(line)
self.lastcmd = lastcmd_back
if not self.commands_silent[currentbp]:
self.print_stack_entry(self.stack[self.curindex])
if self.commands_doprompt[currentbp]:
self.cmdloop()
self.forget()
return
return 1
def user_return(self, frame, return_value):
"""This function is called when a return trap is set here."""
if self._wait_for_mainpyfile:
return
frame.f_locals['__return__'] = return_value
print >>self.stdout, '--Return--'
self.interaction(frame, None)
def user_exception(self, frame, exc_info):
"""This function is called if an exception occurs,
but only if we are to stop at or just below this level."""
if self._wait_for_mainpyfile:
return
exc_type, exc_value, exc_traceback = exc_info
frame.f_locals['__exception__'] = exc_type, exc_value
if type(exc_type) == type(''):
exc_type_name = exc_type
else: exc_type_name = exc_type.__name__
print >>self.stdout, exc_type_name + ':', _saferepr(exc_value)
self.interaction(frame, exc_traceback)
# General interaction function
def interaction(self, frame, traceback):
self.setup(frame, traceback)
self.print_stack_entry(self.stack[self.curindex])
self.cmdloop()
self.forget()
def displayhook(self, obj):
"""Custom displayhook for the exec in default(), which prevents
assignment of the _ variable in the builtins.
"""
# reproduce the behavior of the standard displayhook, not printing None
if obj is not None:
print repr(obj)
def default(self, line):
if line[:1] == '!': line = line[1:]
locals = self.curframe_locals
globals = self.curframe.f_globals
try:
code = compile(line + '\n', '<stdin>', 'single')
save_stdout = sys.stdout
save_stdin = sys.stdin
save_displayhook = sys.displayhook
try:
sys.stdin = self.stdin
sys.stdout = self.stdout
sys.displayhook = self.displayhook
exec code in globals, locals
finally:
sys.stdout = save_stdout
sys.stdin = save_stdin
sys.displayhook = save_displayhook
except:
t, v = sys.exc_info()[:2]
if type(t) == type(''):
exc_type_name = t
else: exc_type_name = t.__name__
print >>self.stdout, '***', exc_type_name + ':', v
def precmd(self, line):
"""Handle alias expansion and ';;' separator."""
if not line.strip():
return line
args = line.split()
while args[0] in self.aliases:
line = self.aliases[args[0]]
ii = 1
for tmpArg in args[1:]:
line = line.replace("%" + str(ii),
tmpArg)
ii = ii + 1
line = line.replace("%*", ' '.join(args[1:]))
args = line.split()
# split into ';;' separated commands
# unless it's an alias command
if args[0] != 'alias':
marker = line.find(';;')
if marker >= 0:
# queue up everything after marker
next = line[marker+2:].lstrip()
self.cmdqueue.append(next)
line = line[:marker].rstrip()
return line
def onecmd(self, line):
"""Interpret the argument as though it had been typed in response
to the prompt.
Checks whether this line is typed at the normal prompt or in
a breakpoint command list definition.
"""
if not self.commands_defining:
return cmd.Cmd.onecmd(self, line)
else:
return self.handle_command_def(line)
def handle_command_def(self,line):
"""Handles one command line during command list definition."""
cmd, arg, line = self.parseline(line)
if not cmd:
return
if cmd == 'silent':
self.commands_silent[self.commands_bnum] = True
return # continue to handle other cmd def in the cmd list
elif cmd == 'end':
self.cmdqueue = []
return 1 # end of cmd list
cmdlist = self.commands[self.commands_bnum]
if arg:
cmdlist.append(cmd+' '+arg)
else:
cmdlist.append(cmd)
# Determine if we must stop
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
func = self.default
# one of the resuming commands
if func.func_name in self.commands_resuming:
self.commands_doprompt[self.commands_bnum] = False
self.cmdqueue = []
return 1
return
# Command definitions, called by cmdloop()
# The argument is the remaining string on the command line
# Return true to exit from the command loop
do_h = cmd.Cmd.do_help
def do_commands(self, arg):
"""Defines a list of commands associated to a breakpoint.
Those commands will be executed whenever the breakpoint causes
the program to stop execution."""
if not arg:
bnum = len(bdb.Breakpoint.bpbynumber)-1
else:
try:
bnum = int(arg)
except:
print >>self.stdout, "Usage : commands [bnum]\n ..." \
"\n end"
return
self.commands_bnum = bnum
self.commands[bnum] = []
self.commands_doprompt[bnum] = True
self.commands_silent[bnum] = False
prompt_back = self.prompt
self.prompt = '(com) '
self.commands_defining = True
try:
self.cmdloop()
finally:
self.commands_defining = False
self.prompt = prompt_back
def do_break(self, arg, temporary = 0):
# break [ ([filename:]lineno | function) [, "condition"] ]
if not arg:
if self.breaks: # There's at least one
print >>self.stdout, "Num Type Disp Enb Where"
for bp in bdb.Breakpoint.bpbynumber:
if bp:
bp.bpprint(self.stdout)
return
# parse arguments; comma has lowest precedence
# and cannot occur in filename
filename = None
lineno = None
cond = None
comma = arg.find(',')
if comma > 0:
# parse stuff after comma: "condition"
cond = arg[comma+1:].lstrip()
arg = arg[:comma].rstrip()
# parse stuff before comma: [filename:]lineno | function
colon = arg.rfind(':')
funcname = None
if colon >= 0:
filename = arg[:colon].rstrip()
f = self.lookupmodule(filename)
if not f:
print >>self.stdout, '*** ', repr(filename),
print >>self.stdout, 'not found from sys.path'
return
else:
filename = f
arg = arg[colon+1:].lstrip()
try:
lineno = int(arg)
except ValueError, msg:
print >>self.stdout, '*** Bad lineno:', arg
return
else:
# no colon; can be lineno or function
try:
lineno = int(arg)
except ValueError:
try:
func = eval(arg,
self.curframe.f_globals,
self.curframe_locals)
except:
func = arg
try:
if hasattr(func, 'im_func'):
func = func.im_func
code = func.func_code
#use co_name to identify the bkpt (function names
#could be aliased, but co_name is invariant)
funcname = code.co_name
lineno = code.co_firstlineno
filename = code.co_filename
except:
# last thing to try
(ok, filename, ln) = self.lineinfo(arg)
if not ok:
print >>self.stdout, '*** The specified object',
print >>self.stdout, repr(arg),
print >>self.stdout, 'is not a function'
print >>self.stdout, 'or was not found along sys.path.'
return
funcname = ok # ok contains a function name
lineno = int(ln)
if not filename:
filename = self.defaultFile()
# Check for reasonable breakpoint
line = self.checkline(filename, lineno)
if line:
# now set the break point
err = self.set_break(filename, line, temporary, cond, funcname)
if err: print >>self.stdout, '***', err
else:
bp = self.get_breaks(filename, line)[-1]
print >>self.stdout, "Breakpoint %d at %s:%d" % (bp.number,
bp.file,
bp.line)
# To be overridden in derived debuggers
def defaultFile(self):
"""Produce a reasonable default."""
filename = self.curframe.f_code.co_filename
if filename == '<string>' and self.mainpyfile:
filename = self.mainpyfile
return filename
do_b = do_break
def do_tbreak(self, arg):
self.do_break(arg, 1)
def lineinfo(self, identifier):
failed = (None, None, None)
# Input is identifier, may be in single quotes
idstring = identifier.split("'")
if len(idstring) == 1:
# not in single quotes
id = idstring[0].strip()
elif len(idstring) == 3:
# quoted
id = idstring[1].strip()
else:
return failed
if id == '': return failed
parts = id.split('.')
# Protection for derived debuggers
if parts[0] == 'self':
del parts[0]
if len(parts) == 0:
return failed
# Best first guess at file to look at
fname = self.defaultFile()
if len(parts) == 1:
item = parts[0]
else:
# More than one part.
# First is module, second is method/class
f = self.lookupmodule(parts[0])
if f:
fname = f
item = parts[1]
answer = find_function(item, fname)
return answer or failed
def checkline(self, filename, lineno):
"""Check whether specified line seems to be executable.
Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank
line or EOF). Warning: testing is not comprehensive.
"""
# this method should be callable before starting debugging, so default
# to "no globals" if there is no current frame
globs = self.curframe.f_globals if hasattr(self, 'curframe') else None
line = linecache.getline(filename, lineno, globs)
if not line:
print >>self.stdout, 'End of file'
return 0
line = line.strip()
# Don't allow setting breakpoint at a blank line
if (not line or (line[0] == '#') or
(line[:3] == '"""') or line[:3] == "'''"):
print >>self.stdout, '*** Blank or comment'
return 0
return lineno
def do_enable(self, arg):
args = arg.split()
for i in args:
try:
i = int(i)
except ValueError:
print >>self.stdout, 'Breakpoint index %r is not a number' % i
continue
if not (0 <= i < len(bdb.Breakpoint.bpbynumber)):
print >>self.stdout, 'No breakpoint numbered', i
continue
bp = bdb.Breakpoint.bpbynumber[i]
if bp:
bp.enable()
def do_disable(self, arg):
args = arg.split()
for i in args:
try:
i = int(i)
except ValueError:
print >>self.stdout, 'Breakpoint index %r is not a number' % i
continue
if not (0 <= i < len(bdb.Breakpoint.bpbynumber)):
print >>self.stdout, 'No breakpoint numbered', i
continue
bp = bdb.Breakpoint.bpbynumber[i]
if bp:
bp.disable()
def do_condition(self, arg):
# arg is breakpoint number and condition
args = arg.split(' ', 1)
try:
bpnum = int(args[0].strip())
except ValueError:
# something went wrong
print >>self.stdout, \
'Breakpoint index %r is not a number' % args[0]
return
try:
cond = args[1]
except:
cond = None
try:
bp = bdb.Breakpoint.bpbynumber[bpnum]
except IndexError:
print >>self.stdout, 'Breakpoint index %r is not valid' % args[0]
return
if bp:
bp.cond = cond
if not cond:
print >>self.stdout, 'Breakpoint', bpnum,
print >>self.stdout, 'is now unconditional.'
def do_ignore(self,arg):
"""arg is bp number followed by ignore count."""
args = arg.split()
try:
bpnum = int(args[0].strip())
except ValueError:
# something went wrong
print >>self.stdout, \
'Breakpoint index %r is not a number' % args[0]
return
try:
count = int(args[1].strip())
except:
count = 0
try:
bp = bdb.Breakpoint.bpbynumber[bpnum]
except IndexError:
print >>self.stdout, 'Breakpoint index %r is not valid' % args[0]
return
if bp:
bp.ignore = count
if count > 0:
reply = 'Will ignore next '
if count > 1:
reply = reply + '%d crossings' % count
else:
reply = reply + '1 crossing'
print >>self.stdout, reply + ' of breakpoint %d.' % bpnum
else:
print >>self.stdout, 'Will stop next time breakpoint',
print >>self.stdout, bpnum, 'is reached.'
def do_clear(self, arg):
"""Three possibilities, tried in this order:
clear -> clear all breaks, ask for confirmation
clear file:lineno -> clear all breaks at file:lineno
clear bpno bpno ... -> clear breakpoints by number"""
if not arg:
try:
reply = raw_input('Clear all breaks? ')
except EOFError:
reply = 'no'
reply = reply.strip().lower()
if reply in ('y', 'yes'):
self.clear_all_breaks()
return
if ':' in arg:
# Make sure it works for "clear C:\foo\bar.py:12"
i = arg.rfind(':')
filename = arg[:i]
arg = arg[i+1:]
try:
lineno = int(arg)
except ValueError:
err = "Invalid line number (%s)" % arg
else:
err = self.clear_break(filename, lineno)
if err: print >>self.stdout, '***', err
return
numberlist = arg.split()
for i in numberlist:
try:
i = int(i)
except ValueError:
print >>self.stdout, 'Breakpoint index %r is not a number' % i
continue
if not (0 <= i < len(bdb.Breakpoint.bpbynumber)):
print >>self.stdout, 'No breakpoint numbered', i
continue
err = self.clear_bpbynumber(i)
if err:
print >>self.stdout, '***', err
else:
print >>self.stdout, 'Deleted breakpoint', i
do_cl = do_clear # 'c' is already an abbreviation for 'continue'
def do_where(self, arg):
self.print_stack_trace()
do_w = do_where
do_bt = do_where
def do_up(self, arg):
if self.curindex == 0:
print >>self.stdout, '*** Oldest frame'
else:
self.curindex = self.curindex - 1
self.curframe = self.stack[self.curindex][0]
self.curframe_locals = self.curframe.f_locals
self.print_stack_entry(self.stack[self.curindex])
self.lineno = None
do_u = do_up
def do_down(self, arg):
if self.curindex + 1 == len(self.stack):
print >>self.stdout, '*** Newest frame'
else:
self.curindex = self.curindex + 1
self.curframe = self.stack[self.curindex][0]
self.curframe_locals = self.curframe.f_locals
self.print_stack_entry(self.stack[self.curindex])
self.lineno = None
do_d = do_down
def do_until(self, arg):
self.set_until(self.curframe)
return 1
do_unt = do_until
def do_step(self, arg):
self.set_step()
return 1
do_s = do_step
def do_next(self, arg):
self.set_next(self.curframe)
return 1
do_n = do_next
def do_run(self, arg):
"""Restart program by raising an exception to be caught in the main
debugger loop. If arguments were given, set them in sys.argv."""
if arg:
import shlex
argv0 = sys.argv[0:1]
sys.argv = shlex.split(arg)
sys.argv[:0] = argv0
raise Restart
do_restart = do_run
def do_return(self, arg):
self.set_return(self.curframe)
return 1
do_r = do_return
def do_continue(self, arg):
self.set_continue()
return 1
do_c = do_cont = do_continue
def do_jump(self, arg):
if self.curindex + 1 != len(self.stack):
print >>self.stdout, "*** You can only jump within the bottom frame"
return
try:
arg = int(arg)
except ValueError:
print >>self.stdout, "*** The 'jump' command requires a line number."
else:
try:
# Do the jump, fix up our copy of the stack, and display the
# new position
self.curframe.f_lineno = arg
self.stack[self.curindex] = self.stack[self.curindex][0], arg
self.print_stack_entry(self.stack[self.curindex])
except ValueError, e:
print >>self.stdout, '*** Jump failed:', e
do_j = do_jump
def do_debug(self, arg):
sys.settrace(None)
globals = self.curframe.f_globals
locals = self.curframe_locals
p = Pdb(self.completekey, self.stdin, self.stdout)
p.prompt = "(%s) " % self.prompt.strip()
print >>self.stdout, "ENTERING RECURSIVE DEBUGGER"
sys.call_tracing(p.run, (arg, globals, locals))
print >>self.stdout, "LEAVING RECURSIVE DEBUGGER"
sys.settrace(self.trace_dispatch)
self.lastcmd = p.lastcmd
def do_quit(self, arg):
self._user_requested_quit = 1
self.set_quit()
return 1
do_q = do_quit
do_exit = do_quit
def do_EOF(self, arg):
print >>self.stdout
self._user_requested_quit = 1
self.set_quit()
return 1
def do_args(self, arg):
co = self.curframe.f_code
dict = self.curframe_locals
n = co.co_argcount
if co.co_flags & 4: n = n+1
if co.co_flags & 8: n = n+1
for i in range(n):
name = co.co_varnames[i]
print >>self.stdout, name, '=',
if name in dict: print >>self.stdout, dict[name]
else: print >>self.stdout, "*** undefined ***"
do_a = do_args
def do_retval(self, arg):
if '__return__' in self.curframe_locals:
print >>self.stdout, self.curframe_locals['__return__']
else:
print >>self.stdout, '*** Not yet returned!'
do_rv = do_retval
def _getval(self, arg):
try:
return eval(arg, self.curframe.f_globals,
self.curframe_locals)
except:
t, v = sys.exc_info()[:2]
if isinstance(t, str):
exc_type_name = t
else: exc_type_name = t.__name__
print >>self.stdout, '***', exc_type_name + ':', repr(v)
raise
def do_p(self, arg):
try:
print >>self.stdout, repr(self._getval(arg))
except:
pass
def do_pp(self, arg):
try:
pprint.pprint(self._getval(arg), self.stdout)
except:
pass
def do_list(self, arg):
self.lastcmd = 'list'
last = None
if arg:
try:
x = eval(arg, {}, {})
if type(x) == type(()):
first, last = x
first = int(first)
last = int(last)
if last < first:
# Assume it's a count
last = first + last
else:
first = max(1, int(x) - 5)
except:
print >>self.stdout, '*** Error in argument:', repr(arg)
return
elif self.lineno is None:
first = max(1, self.curframe.f_lineno - 5)
else:
first = self.lineno + 1
if last is None:
last = first + 10
filename = self.curframe.f_code.co_filename
breaklist = self.get_file_breaks(filename)
try:
for lineno in range(first, last+1):
line = linecache.getline(filename, lineno,
self.curframe.f_globals)
if not line:
print >>self.stdout, '[EOF]'
break
else:
s = repr(lineno).rjust(3)
if len(s) < 4: s = s + ' '
if lineno in breaklist: s = s + 'B'
else: s = s + ' '
if lineno == self.curframe.f_lineno:
s = s + '->'
print >>self.stdout, s + '\t' + line,
self.lineno = lineno
except KeyboardInterrupt:
pass
do_l = do_list
def do_whatis(self, arg):
try:
value = eval(arg, self.curframe.f_globals,
self.curframe_locals)
except:
t, v = sys.exc_info()[:2]
if type(t) == type(''):
exc_type_name = t
else: exc_type_name = t.__name__
print >>self.stdout, '***', exc_type_name + ':', repr(v)
return
code = None
# Is it a function?
try: code = value.func_code
except: pass
if code:
print >>self.stdout, 'Function', code.co_name
return
# Is it an instance method?
try: code = value.im_func.func_code
except: pass
if code:
print >>self.stdout, 'Method', code.co_name
return
# None of the above...
print >>self.stdout, type(value)
def do_alias(self, arg):
args = arg.split()
if len(args) == 0:
keys = self.aliases.keys()
keys.sort()
for alias in keys:
print >>self.stdout, "%s = %s" % (alias, self.aliases[alias])
return
if args[0] in self.aliases and len(args) == 1:
print >>self.stdout, "%s = %s" % (args[0], self.aliases[args[0]])
else:
self.aliases[args[0]] = ' '.join(args[1:])
def do_unalias(self, arg):
args = arg.split()
if len(args) == 0: return
if args[0] in self.aliases:
del self.aliases[args[0]]
#list of all the commands making the program resume execution.
commands_resuming = ['do_continue', 'do_step', 'do_next', 'do_return',
'do_quit', 'do_jump']
# Print a traceback starting at the top stack frame.
# The most recently entered frame is printed last;
# this is different from dbx and gdb, but consistent with
# the Python interpreter's stack trace.
# It is also consistent with the up/down commands (which are
# compatible with dbx and gdb: up moves towards 'main()'
# and down moves towards the most recent stack frame).
def print_stack_trace(self):
try:
for frame_lineno in self.stack:
self.print_stack_entry(frame_lineno)
except KeyboardInterrupt:
pass
def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix):
frame, lineno = frame_lineno
if frame is self.curframe:
print >>self.stdout, '>',
else:
print >>self.stdout, ' ',
print >>self.stdout, self.format_stack_entry(frame_lineno,
prompt_prefix)
# Help methods (derived from pdb.doc)
def help_help(self):
self.help_h()
def help_h(self):
print >>self.stdout, """h(elp)
Without argument, print the list of available commands.
With a command name as argument, print help about that command
"help pdb" pipes the full documentation file to the $PAGER
"help exec" gives help on the ! command"""
def help_where(self):
self.help_w()
def help_w(self):
print >>self.stdout, """w(here)
Print a stack trace, with the most recent frame at the bottom.
An arrow indicates the "current frame", which determines the
context of most commands. 'bt' is an alias for this command."""
help_bt = help_w
def help_down(self):
self.help_d()
def help_d(self):
print >>self.stdout, """d(own)
Move the current frame one level down in the stack trace
(to a newer frame)."""
def help_up(self):
self.help_u()
def help_u(self):
print >>self.stdout, """u(p)
Move the current frame one level up in the stack trace
(to an older frame)."""
def help_break(self):
self.help_b()
def help_b(self):
print >>self.stdout, """b(reak) ([file:]lineno | function) [, condition]
With a line number argument, set a break there in the current
file. With a function name, set a break at first executable line
of that function. Without argument, list all breaks. If a second
argument is present, it is a string specifying an expression
which must evaluate to true before the breakpoint is honored.
The line number may be prefixed with a filename and a colon,
to specify a breakpoint in another file (probably one that
hasn't been loaded yet). The file is searched for on sys.path;
the .py suffix may be omitted."""
def help_clear(self):
self.help_cl()
def help_cl(self):
print >>self.stdout, "cl(ear) filename:lineno"
print >>self.stdout, """cl(ear) [bpnumber [bpnumber...]]
With a space separated list of breakpoint numbers, clear
those breakpoints. Without argument, clear all breaks (but
first ask confirmation). With a filename:lineno argument,
clear all breaks at that line in that file.
Note that the argument is different from previous versions of
the debugger (in python distributions 1.5.1 and before) where
a linenumber was used instead of either filename:lineno or
breakpoint numbers."""
def help_tbreak(self):
print >>self.stdout, """tbreak same arguments as break, but breakpoint
is removed when first hit."""
def help_enable(self):
print >>self.stdout, """enable bpnumber [bpnumber ...]
Enables the breakpoints given as a space separated list of
bp numbers."""
def help_disable(self):
print >>self.stdout, """disable bpnumber [bpnumber ...]
Disables the breakpoints given as a space separated list of
bp numbers."""
def help_ignore(self):
print >>self.stdout, """ignore bpnumber count
Sets the ignore count for the given breakpoint number. A breakpoint
becomes active when the ignore count is zero. When non-zero, the
count is decremented each time the breakpoint is reached and the
breakpoint is not disabled and any associated condition evaluates
to true."""
def help_condition(self):
print >>self.stdout, """condition bpnumber str_condition
str_condition is a string specifying an expression which
must evaluate to true before the breakpoint is honored.
If str_condition is absent, any existing condition is removed;
i.e., the breakpoint is made unconditional."""
def help_step(self):
self.help_s()
def help_s(self):
print >>self.stdout, """s(tep)
Execute the current line, stop at the first possible occasion
(either in a function that is called or in the current function)."""
def help_until(self):
self.help_unt()
def help_unt(self):
print """unt(il)
Continue execution until the line with a number greater than the current
one is reached or until the current frame returns"""
def help_next(self):
self.help_n()
def help_n(self):
print >>self.stdout, """n(ext)
Continue execution until the next line in the current function
is reached or it returns."""
def help_return(self):
self.help_r()
def help_r(self):
print >>self.stdout, """r(eturn)
Continue execution until the current function returns."""
def help_continue(self):
self.help_c()
def help_cont(self):
self.help_c()
def help_c(self):
print >>self.stdout, """c(ont(inue))
Continue execution, only stop when a breakpoint is encountered."""
def help_jump(self):
self.help_j()
def help_j(self):
print >>self.stdout, """j(ump) lineno
Set the next line that will be executed."""
def help_debug(self):
print >>self.stdout, """debug code
Enter a recursive debugger that steps through the code argument
(which is an arbitrary expression or statement to be executed
in the current environment)."""
def help_list(self):
self.help_l()
def help_l(self):
print >>self.stdout, """l(ist) [first [,last]]
List source code for the current file.
Without arguments, list 11 lines around the current line
or continue the previous listing.
With one argument, list 11 lines starting at that line.
With two arguments, list the given range;
if the second argument is less than the first, it is a count."""
def help_args(self):
self.help_a()
def help_a(self):
print >>self.stdout, """a(rgs)
Print the arguments of the current function."""
def help_p(self):
print >>self.stdout, """p expression
Print the value of the expression."""
def help_pp(self):
print >>self.stdout, """pp expression
Pretty-print the value of the expression."""
def help_exec(self):
print >>self.stdout, """(!) statement
Execute the (one-line) statement in the context of
the current stack frame.
The exclamation point can be omitted unless the first word
of the statement resembles a debugger command.
To assign to a global variable you must always prefix the
command with a 'global' command, e.g.:
(Pdb) global list_options; list_options = ['-l']
(Pdb)"""
def help_run(self):
print """run [args...]
Restart the debugged python program. If a string is supplied, it is
splitted with "shlex" and the result is used as the new sys.argv.
History, breakpoints, actions and debugger options are preserved.
"restart" is an alias for "run"."""
help_restart = help_run
def help_quit(self):
self.help_q()
def help_q(self):
print >>self.stdout, """q(uit) or exit - Quit from the debugger.
The program being executed is aborted."""
help_exit = help_q
def help_whatis(self):
print >>self.stdout, """whatis arg
Prints the type of the argument."""
def help_EOF(self):
print >>self.stdout, """EOF
Handles the receipt of EOF as a command."""
def help_alias(self):
print >>self.stdout, """alias [name [command [parameter parameter ...]]]
Creates an alias called 'name' the executes 'command'. The command
must *not* be enclosed in quotes. Replaceable parameters are
indicated by %1, %2, and so on, while %* is replaced by all the
parameters. If no command is given, the current alias for name
is shown. If no name is given, all aliases are listed.
Aliases may be nested and can contain anything that can be
legally typed at the pdb prompt. Note! You *can* override
internal pdb commands with aliases! Those internal commands
are then hidden until the alias is removed. Aliasing is recursively
applied to the first word of the command line; all other words
in the line are left alone.
Some useful aliases (especially when placed in the .pdbrc file) are:
#Print instance variables (usage "pi classInst")
alias pi for k in %1.__dict__.keys(): print "%1.",k,"=",%1.__dict__[k]
#Print instance variables in self
alias ps pi self
"""
def help_unalias(self):
print >>self.stdout, """unalias name
Deletes the specified alias."""
def help_commands(self):
print >>self.stdout, """commands [bpnumber]
(com) ...
(com) end
(Pdb)
Specify a list of commands for breakpoint number bpnumber. The
commands themselves appear on the following lines. Type a line
containing just 'end' to terminate the commands.
To remove all commands from a breakpoint, type commands and
follow it immediately with end; that is, give no commands.
With no bpnumber argument, commands refers to the last
breakpoint set.
You can use breakpoint commands to start your program up again.
Simply use the continue command, or step, or any other
command that resumes execution.
Specifying any command resuming execution (currently continue,
step, next, return, jump, quit and their abbreviations) terminates
the command list (as if that command was immediately followed by end).
This is because any time you resume execution
(even with a simple next or step), you may encounter
another breakpoint--which could have its own command list, leading to
ambiguities about which list to execute.
If you use the 'silent' command in the command list, the
usual message about stopping at a breakpoint is not printed. This may
be desirable for breakpoints that are to print a specific message and
then continue. If none of the other commands print anything, you
see no sign that the breakpoint was reached.
"""
def help_pdb(self):
help()
def lookupmodule(self, filename):
"""Helper function for break/clear parsing -- may be overridden.
lookupmodule() translates (possibly incomplete) file or module name
into an absolute file name.
"""
if os.path.isabs(filename) and os.path.exists(filename):
return filename
f = os.path.join(sys.path[0], filename)
if os.path.exists(f) and self.canonic(f) == self.mainpyfile:
return f
root, ext = os.path.splitext(filename)
if ext == '':
filename = filename + '.py'
if os.path.isabs(filename):
return filename
for dirname in sys.path:
while os.path.islink(dirname):
dirname = os.readlink(dirname)
fullname = os.path.join(dirname, filename)
if os.path.exists(fullname):
return fullname
return None
def _runscript(self, filename):
# The script has to run in __main__ namespace (or imports from
# __main__ will break).
#
# So we clear up the __main__ and set several special variables
# (this gets rid of pdb's globals and cleans old variables on restarts).
import __main__
__main__.__dict__.clear()
__main__.__dict__.update({"__name__" : "__main__",
"__file__" : filename,
"__builtins__": __builtins__,
})
# When bdb sets tracing, a number of call and line events happens
# BEFORE debugger even reaches user's code (and the exact sequence of
# events depends on python version). So we take special measures to
# avoid stopping before we reach the main script (see user_line and
# user_call for details).
self._wait_for_mainpyfile = 1
self.mainpyfile = self.canonic(filename)
self._user_requested_quit = 0
statement = 'execfile(%r)' % filename
self.run(statement)
# Simplified interface
def run(statement, globals=None, locals=None):
Pdb().run(statement, globals, locals)
def runeval(expression, globals=None, locals=None):
return Pdb().runeval(expression, globals, locals)
def runctx(statement, globals, locals):
# B/W compatibility
run(statement, globals, locals)
def runcall(*args, **kwds):
return Pdb().runcall(*args, **kwds)
def set_trace():
Pdb().set_trace(sys._getframe().f_back)
# Post-Mortem interface
def post_mortem(t=None):
# handling the default
if t is None:
# sys.exc_info() returns (type, value, traceback) if an exception is
# being handled, otherwise it returns None
t = sys.exc_info()[2]
if t is None:
raise ValueError("A valid traceback must be passed if no "
"exception is being handled")
p = Pdb()
p.reset()
p.interaction(None, t)
def pm():
post_mortem(sys.last_traceback)
# Main program for testing
TESTCMD = 'import x; x.main()'
def test():
run(TESTCMD)
# print help
def help():
for dirname in sys.path:
fullname = os.path.join(dirname, 'pdb.doc')
if os.path.exists(fullname):
sts = os.system('${PAGER-more} '+fullname)
if sts: print '*** Pager exit status:', sts
break
else:
print 'Sorry, can\'t find the help file "pdb.doc"',
print 'along the Python search path'
def main():
if not sys.argv[1:] or sys.argv[1] in ("--help", "-h"):
print "usage: pdb.py scriptfile [arg] ..."
sys.exit(2)
mainpyfile = sys.argv[1] # Get script filename
if not os.path.exists(mainpyfile):
print 'Error:', mainpyfile, 'does not exist'
sys.exit(1)
del sys.argv[0] # Hide "pdb.py" from argument list
# Replace pdb's dir with script's dir in front of module search path.
sys.path[0] = os.path.dirname(mainpyfile)
# Note on saving/restoring sys.argv: it's a good idea when sys.argv was
# modified by the script being debugged. It's a bad idea when it was
# changed by the user from the command line. There is a "restart" command
# which allows explicit specification of command line arguments.
pdb = Pdb()
while True:
try:
pdb._runscript(mainpyfile)
if pdb._user_requested_quit:
break
print "The program finished and will be restarted"
except Restart:
print "Restarting", mainpyfile, "with arguments:"
print "\t" + " ".join(sys.argv[1:])
except SystemExit:
# In most cases SystemExit does not warrant a post-mortem session.
print "The program exited via sys.exit(). Exit status: ",
print sys.exc_info()[1]
except:
traceback.print_exc()
print "Uncaught exception. Entering post mortem debugging"
print "Running 'cont' or 'step' will restart the program"
t = sys.exc_info()[2]
pdb.interaction(None, t)
print "Post mortem debugger finished. The " + mainpyfile + \
" will be restarted"
# When invoked as main program, invoke the debugger on a script
if __name__ == '__main__':
import pdb
pdb.main()
| gpl-2.0 |
lmyrefelt/CouchPotatoServer | libs/rsa/bigfile.py | 196 | 3101 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Large file support
- break a file into smaller blocks, and encrypt them, and store the
encrypted blocks in another file.
- take such an encrypted files, decrypt its blocks, and reconstruct the
original file.
The encrypted file format is as follows, where || denotes byte concatenation:
FILE := VERSION || BLOCK || BLOCK ...
BLOCK := LENGTH || DATA
LENGTH := varint-encoded length of the subsequent data. Varint comes from
Google Protobuf, and encodes an integer into a variable number of bytes.
Each byte uses the 7 lowest bits to encode the value. The highest bit set
to 1 indicates the next byte is also part of the varint. The last byte will
have this bit set to 0.
This file format is called the VARBLOCK format, in line with the varint format
used to denote the block sizes.
'''
from rsa import key, common, pkcs1, varblock
from rsa._compat import byte
def encrypt_bigfile(infile, outfile, pub_key):
'''Encrypts a file, writing it to 'outfile' in VARBLOCK format.
:param infile: file-like object to read the cleartext from
:param outfile: file-like object to write the crypto in VARBLOCK format to
:param pub_key: :py:class:`rsa.PublicKey` to encrypt with
'''
if not isinstance(pub_key, key.PublicKey):
raise TypeError('Public key required, but got %r' % pub_key)
key_bytes = common.bit_size(pub_key.n) // 8
blocksize = key_bytes - 11 # keep space for PKCS#1 padding
# Write the version number to the VARBLOCK file
outfile.write(byte(varblock.VARBLOCK_VERSION))
# Encrypt and write each block
for block in varblock.yield_fixedblocks(infile, blocksize):
crypto = pkcs1.encrypt(block, pub_key)
varblock.write_varint(outfile, len(crypto))
outfile.write(crypto)
def decrypt_bigfile(infile, outfile, priv_key):
'''Decrypts an encrypted VARBLOCK file, writing it to 'outfile'
:param infile: file-like object to read the crypto in VARBLOCK format from
:param outfile: file-like object to write the cleartext to
:param priv_key: :py:class:`rsa.PrivateKey` to decrypt with
'''
if not isinstance(priv_key, key.PrivateKey):
raise TypeError('Private key required, but got %r' % priv_key)
for block in varblock.yield_varblocks(infile):
cleartext = pkcs1.decrypt(block, priv_key)
outfile.write(cleartext)
__all__ = ['encrypt_bigfile', 'decrypt_bigfile']
| gpl-3.0 |
darren-wang/gl | glance/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py | 19 | 1191 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
from glance.db.sqlalchemy.migrate_repo import schema
def get_images_table(meta):
return sqlalchemy.Table('images', meta, autoload=True)
def upgrade(migrate_engine):
meta = sqlalchemy.schema.MetaData(migrate_engine)
images_table = get_images_table(meta)
images_table.columns['location'].drop()
def downgrade(migrate_engine):
meta = sqlalchemy.schema.MetaData(migrate_engine)
images_table = get_images_table(meta)
location = sqlalchemy.Column('location', schema.Text())
location.create(images_table)
| apache-2.0 |
praekelt/django-photologue | photologue/admin.py | 2 | 2551 | """ Newforms Admin configuration for Photologue
"""
from django.contrib import admin
from django.contrib.contenttypes import generic
from models import *
class GalleryAdmin(admin.ModelAdmin):
list_display = ('title', 'date_added', 'photo_count', 'is_public')
list_filter = ['date_added', 'is_public']
date_hierarchy = 'date_added'
prepopulated_fields = {'title_slug': ('title',)}
filter_horizontal = ('photos',)
class PhotoAdmin(admin.ModelAdmin):
list_display = ('title', 'date_taken', 'date_added', 'is_public', 'tags', 'view_count', 'admin_thumbnail')
list_filter = ['date_added', 'is_public']
search_fields = ['title', 'title_slug', 'caption']
list_per_page = 10
prepopulated_fields = {'title_slug': ('title',)}
class PhotoEffectAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'color', 'brightness', 'contrast', 'sharpness', 'filters', 'admin_sample')
fieldsets = (
(None, {
'fields': ('name', 'description')
}),
('Adjustments', {
'fields': ('color', 'brightness', 'contrast', 'sharpness')
}),
('Filters', {
'fields': ('filters',)
}),
('Reflection', {
'fields': ('reflection_size', 'reflection_strength', 'background_color')
}),
('Transpose', {
'fields': ('transpose_method',)
}),
)
class PhotoSizeAdmin(admin.ModelAdmin):
list_display = ('name', 'width', 'height', 'crop', 'pre_cache', 'effect', 'increment_count')
fieldsets = (
(None, {
'fields': ('name', 'width', 'height', 'quality')
}),
('Options', {
'fields': ('upscale', 'crop', 'pre_cache', 'increment_count')
}),
('Enhancements', {
'fields': ('effect', 'watermark',)
}),
)
class WatermarkAdmin(admin.ModelAdmin):
list_display = ('name', 'opacity', 'style')
class GalleryUploadAdmin(admin.ModelAdmin):
def has_change_permission(self, request, obj=None):
return False # To remove the 'Save and continue editing' button
class ImageOverrideInline(generic.GenericTabularInline):
model = ImageOverride
admin.site.register(Gallery, GalleryAdmin)
admin.site.register(GalleryUpload, GalleryUploadAdmin)
admin.site.register(Photo, PhotoAdmin)
admin.site.register(PhotoEffect, PhotoEffectAdmin)
admin.site.register(PhotoSize, PhotoSizeAdmin)
admin.site.register(Watermark, WatermarkAdmin)
| bsd-3-clause |
kytvi2p/Sigil | 3rdparty/python/Lib/test/test_locale.py | 8 | 20616 | from test.support import verbose
import unittest
import locale
import sys
import codecs
class BaseLocalizedTest(unittest.TestCase):
#
# Base class for tests using a real locale
#
@classmethod
def setUpClass(cls):
if sys.platform == 'darwin':
import os
tlocs = ("en_US.UTF-8", "en_US.ISO8859-1", "en_US")
if int(os.uname().release.split('.')[0]) < 10:
# The locale test work fine on OSX 10.6, I (ronaldoussoren)
# haven't had time yet to verify if tests work on OSX 10.5
# (10.4 is known to be bad)
raise unittest.SkipTest("Locale support on MacOSX is minimal")
elif sys.platform.startswith("win"):
tlocs = ("En", "English")
else:
tlocs = ("en_US.UTF-8", "en_US.ISO8859-1",
"en_US.US-ASCII", "en_US")
try:
oldlocale = locale.setlocale(locale.LC_NUMERIC)
for tloc in tlocs:
try:
locale.setlocale(locale.LC_NUMERIC, tloc)
except locale.Error:
continue
break
else:
raise unittest.SkipTest("Test locale not supported "
"(tried %s)" % (', '.join(tlocs)))
cls.enUS_locale = tloc
finally:
locale.setlocale(locale.LC_NUMERIC, oldlocale)
def setUp(self):
oldlocale = locale.setlocale(self.locale_type)
self.addCleanup(locale.setlocale, self.locale_type, oldlocale)
locale.setlocale(self.locale_type, self.enUS_locale)
if verbose:
print("testing with %r..." % self.enUS_locale, end=' ', flush=True)
class BaseCookedTest(unittest.TestCase):
#
# Base class for tests using cooked localeconv() values
#
def setUp(self):
locale._override_localeconv = self.cooked_values
def tearDown(self):
locale._override_localeconv = {}
class CCookedTest(BaseCookedTest):
# A cooked "C" locale
cooked_values = {
'currency_symbol': '',
'decimal_point': '.',
'frac_digits': 127,
'grouping': [],
'int_curr_symbol': '',
'int_frac_digits': 127,
'mon_decimal_point': '',
'mon_grouping': [],
'mon_thousands_sep': '',
'n_cs_precedes': 127,
'n_sep_by_space': 127,
'n_sign_posn': 127,
'negative_sign': '',
'p_cs_precedes': 127,
'p_sep_by_space': 127,
'p_sign_posn': 127,
'positive_sign': '',
'thousands_sep': ''
}
class EnUSCookedTest(BaseCookedTest):
# A cooked "en_US" locale
cooked_values = {
'currency_symbol': '$',
'decimal_point': '.',
'frac_digits': 2,
'grouping': [3, 3, 0],
'int_curr_symbol': 'USD ',
'int_frac_digits': 2,
'mon_decimal_point': '.',
'mon_grouping': [3, 3, 0],
'mon_thousands_sep': ',',
'n_cs_precedes': 1,
'n_sep_by_space': 0,
'n_sign_posn': 1,
'negative_sign': '-',
'p_cs_precedes': 1,
'p_sep_by_space': 0,
'p_sign_posn': 1,
'positive_sign': '',
'thousands_sep': ','
}
class FrFRCookedTest(BaseCookedTest):
# A cooked "fr_FR" locale with a space character as decimal separator
# and a non-ASCII currency symbol.
cooked_values = {
'currency_symbol': '\u20ac',
'decimal_point': ',',
'frac_digits': 2,
'grouping': [3, 3, 0],
'int_curr_symbol': 'EUR ',
'int_frac_digits': 2,
'mon_decimal_point': ',',
'mon_grouping': [3, 3, 0],
'mon_thousands_sep': ' ',
'n_cs_precedes': 0,
'n_sep_by_space': 1,
'n_sign_posn': 1,
'negative_sign': '-',
'p_cs_precedes': 0,
'p_sep_by_space': 1,
'p_sign_posn': 1,
'positive_sign': '',
'thousands_sep': ' '
}
class BaseFormattingTest(object):
#
# Utility functions for formatting tests
#
def _test_formatfunc(self, format, value, out, func, **format_opts):
self.assertEqual(
func(format, value, **format_opts), out)
def _test_format(self, format, value, out, **format_opts):
self._test_formatfunc(format, value, out,
func=locale.format, **format_opts)
def _test_format_string(self, format, value, out, **format_opts):
self._test_formatfunc(format, value, out,
func=locale.format_string, **format_opts)
def _test_currency(self, value, out, **format_opts):
self.assertEqual(locale.currency(value, **format_opts), out)
class EnUSNumberFormatting(BaseFormattingTest):
# XXX there is a grouping + padding bug when the thousands separator
# is empty but the grouping array contains values (e.g. Solaris 10)
def setUp(self):
self.sep = locale.localeconv()['thousands_sep']
def test_grouping(self):
self._test_format("%f", 1024, grouping=1, out='1%s024.000000' % self.sep)
self._test_format("%f", 102, grouping=1, out='102.000000')
self._test_format("%f", -42, grouping=1, out='-42.000000')
self._test_format("%+f", -42, grouping=1, out='-42.000000')
def test_grouping_and_padding(self):
self._test_format("%20.f", -42, grouping=1, out='-42'.rjust(20))
if self.sep:
self._test_format("%+10.f", -4200, grouping=1,
out=('-4%s200' % self.sep).rjust(10))
self._test_format("%-10.f", -4200, grouping=1,
out=('-4%s200' % self.sep).ljust(10))
def test_integer_grouping(self):
self._test_format("%d", 4200, grouping=True, out='4%s200' % self.sep)
self._test_format("%+d", 4200, grouping=True, out='+4%s200' % self.sep)
self._test_format("%+d", -4200, grouping=True, out='-4%s200' % self.sep)
def test_integer_grouping_and_padding(self):
self._test_format("%10d", 4200, grouping=True,
out=('4%s200' % self.sep).rjust(10))
self._test_format("%-10d", -4200, grouping=True,
out=('-4%s200' % self.sep).ljust(10))
def test_simple(self):
self._test_format("%f", 1024, grouping=0, out='1024.000000')
self._test_format("%f", 102, grouping=0, out='102.000000')
self._test_format("%f", -42, grouping=0, out='-42.000000')
self._test_format("%+f", -42, grouping=0, out='-42.000000')
def test_padding(self):
self._test_format("%20.f", -42, grouping=0, out='-42'.rjust(20))
self._test_format("%+10.f", -4200, grouping=0, out='-4200'.rjust(10))
self._test_format("%-10.f", 4200, grouping=0, out='4200'.ljust(10))
def test_complex_formatting(self):
# Spaces in formatting string
self._test_format_string("One million is %i", 1000000, grouping=1,
out='One million is 1%s000%s000' % (self.sep, self.sep))
self._test_format_string("One million is %i", 1000000, grouping=1,
out='One million is 1%s000%s000' % (self.sep, self.sep))
# Dots in formatting string
self._test_format_string(".%f.", 1000.0, out='.1000.000000.')
# Padding
if self.sep:
self._test_format_string("--> %10.2f", 4200, grouping=1,
out='--> ' + ('4%s200.00' % self.sep).rjust(10))
# Asterisk formats
self._test_format_string("%10.*f", (2, 1000), grouping=0,
out='1000.00'.rjust(10))
if self.sep:
self._test_format_string("%*.*f", (10, 2, 1000), grouping=1,
out=('1%s000.00' % self.sep).rjust(10))
# Test more-in-one
if self.sep:
self._test_format_string("int %i float %.2f str %s",
(1000, 1000.0, 'str'), grouping=1,
out='int 1%s000 float 1%s000.00 str str' %
(self.sep, self.sep))
class TestFormatPatternArg(unittest.TestCase):
# Test handling of pattern argument of format
def test_onlyOnePattern(self):
# Issue 2522: accept exactly one % pattern, and no extra chars.
self.assertRaises(ValueError, locale.format, "%f\n", 'foo')
self.assertRaises(ValueError, locale.format, "%f\r", 'foo')
self.assertRaises(ValueError, locale.format, "%f\r\n", 'foo')
self.assertRaises(ValueError, locale.format, " %f", 'foo')
self.assertRaises(ValueError, locale.format, "%fg", 'foo')
self.assertRaises(ValueError, locale.format, "%^g", 'foo')
self.assertRaises(ValueError, locale.format, "%f%%", 'foo')
class TestLocaleFormatString(unittest.TestCase):
"""General tests on locale.format_string"""
def test_percent_escape(self):
self.assertEqual(locale.format_string('%f%%', 1.0), '%f%%' % 1.0)
self.assertEqual(locale.format_string('%d %f%%d', (1, 1.0)),
'%d %f%%d' % (1, 1.0))
self.assertEqual(locale.format_string('%(foo)s %%d', {'foo': 'bar'}),
('%(foo)s %%d' % {'foo': 'bar'}))
def test_mapping(self):
self.assertEqual(locale.format_string('%(foo)s bing.', {'foo': 'bar'}),
('%(foo)s bing.' % {'foo': 'bar'}))
self.assertEqual(locale.format_string('%(foo)s', {'foo': 'bar'}),
('%(foo)s' % {'foo': 'bar'}))
class TestNumberFormatting(BaseLocalizedTest, EnUSNumberFormatting):
# Test number formatting with a real English locale.
locale_type = locale.LC_NUMERIC
def setUp(self):
BaseLocalizedTest.setUp(self)
EnUSNumberFormatting.setUp(self)
class TestEnUSNumberFormatting(EnUSCookedTest, EnUSNumberFormatting):
# Test number formatting with a cooked "en_US" locale.
def setUp(self):
EnUSCookedTest.setUp(self)
EnUSNumberFormatting.setUp(self)
def test_currency(self):
self._test_currency(50000, "$50000.00")
self._test_currency(50000, "$50,000.00", grouping=True)
self._test_currency(50000, "USD 50,000.00",
grouping=True, international=True)
class TestCNumberFormatting(CCookedTest, BaseFormattingTest):
# Test number formatting with a cooked "C" locale.
def test_grouping(self):
self._test_format("%.2f", 12345.67, grouping=True, out='12345.67')
def test_grouping_and_padding(self):
self._test_format("%9.2f", 12345.67, grouping=True, out=' 12345.67')
class TestFrFRNumberFormatting(FrFRCookedTest, BaseFormattingTest):
# Test number formatting with a cooked "fr_FR" locale.
def test_decimal_point(self):
self._test_format("%.2f", 12345.67, out='12345,67')
def test_grouping(self):
self._test_format("%.2f", 345.67, grouping=True, out='345,67')
self._test_format("%.2f", 12345.67, grouping=True, out='12 345,67')
def test_grouping_and_padding(self):
self._test_format("%6.2f", 345.67, grouping=True, out='345,67')
self._test_format("%7.2f", 345.67, grouping=True, out=' 345,67')
self._test_format("%8.2f", 12345.67, grouping=True, out='12 345,67')
self._test_format("%9.2f", 12345.67, grouping=True, out='12 345,67')
self._test_format("%10.2f", 12345.67, grouping=True, out=' 12 345,67')
self._test_format("%-6.2f", 345.67, grouping=True, out='345,67')
self._test_format("%-7.2f", 345.67, grouping=True, out='345,67 ')
self._test_format("%-8.2f", 12345.67, grouping=True, out='12 345,67')
self._test_format("%-9.2f", 12345.67, grouping=True, out='12 345,67')
self._test_format("%-10.2f", 12345.67, grouping=True, out='12 345,67 ')
def test_integer_grouping(self):
self._test_format("%d", 200, grouping=True, out='200')
self._test_format("%d", 4200, grouping=True, out='4 200')
def test_integer_grouping_and_padding(self):
self._test_format("%4d", 4200, grouping=True, out='4 200')
self._test_format("%5d", 4200, grouping=True, out='4 200')
self._test_format("%10d", 4200, grouping=True, out='4 200'.rjust(10))
self._test_format("%-4d", 4200, grouping=True, out='4 200')
self._test_format("%-5d", 4200, grouping=True, out='4 200')
self._test_format("%-10d", 4200, grouping=True, out='4 200'.ljust(10))
def test_currency(self):
euro = '\u20ac'
self._test_currency(50000, "50000,00 " + euro)
self._test_currency(50000, "50 000,00 " + euro, grouping=True)
# XXX is the trailing space a bug?
self._test_currency(50000, "50 000,00 EUR ",
grouping=True, international=True)
class TestCollation(unittest.TestCase):
# Test string collation functions
def test_strcoll(self):
self.assertLess(locale.strcoll('a', 'b'), 0)
self.assertEqual(locale.strcoll('a', 'a'), 0)
self.assertGreater(locale.strcoll('b', 'a'), 0)
def test_strxfrm(self):
self.assertLess(locale.strxfrm('a'), locale.strxfrm('b'))
class TestEnUSCollation(BaseLocalizedTest, TestCollation):
# Test string collation functions with a real English locale
locale_type = locale.LC_ALL
def setUp(self):
enc = codecs.lookup(locale.getpreferredencoding(False) or 'ascii').name
if enc not in ('utf-8', 'iso8859-1', 'cp1252'):
raise unittest.SkipTest('encoding not suitable')
if enc != 'iso8859-1' and (sys.platform == 'darwin' or
sys.platform.startswith('freebsd')):
raise unittest.SkipTest('wcscoll/wcsxfrm have known bugs')
BaseLocalizedTest.setUp(self)
def test_strcoll_with_diacritic(self):
self.assertLess(locale.strcoll('à', 'b'), 0)
def test_strxfrm_with_diacritic(self):
self.assertLess(locale.strxfrm('à'), locale.strxfrm('b'))
class NormalizeTest(unittest.TestCase):
def check(self, localename, expected):
self.assertEqual(locale.normalize(localename), expected, msg=localename)
def test_locale_alias(self):
for localename, alias in locale.locale_alias.items():
with self.subTest(locale=(localename, alias)):
self.check(localename, alias)
def test_empty(self):
self.check('', '')
def test_c(self):
self.check('c', 'C')
self.check('posix', 'C')
def test_english(self):
self.check('en', 'en_US.ISO8859-1')
self.check('EN', 'en_US.ISO8859-1')
self.check('en.iso88591', 'en_US.ISO8859-1')
self.check('en_US', 'en_US.ISO8859-1')
self.check('en_us', 'en_US.ISO8859-1')
self.check('en_GB', 'en_GB.ISO8859-1')
self.check('en_US.UTF-8', 'en_US.UTF-8')
self.check('en_US.utf8', 'en_US.UTF-8')
self.check('en_US:UTF-8', 'en_US.UTF-8')
self.check('en_US.ISO8859-1', 'en_US.ISO8859-1')
self.check('en_US.US-ASCII', 'en_US.ISO8859-1')
self.check('en_US.88591', 'en_US.ISO8859-1')
self.check('en_US.885915', 'en_US.ISO8859-15')
self.check('english', 'en_EN.ISO8859-1')
self.check('english_uk.ascii', 'en_GB.ISO8859-1')
def test_hyphenated_encoding(self):
self.check('az_AZ.iso88599e', 'az_AZ.ISO8859-9E')
self.check('az_AZ.ISO8859-9E', 'az_AZ.ISO8859-9E')
self.check('tt_RU.koi8c', 'tt_RU.KOI8-C')
self.check('tt_RU.KOI8-C', 'tt_RU.KOI8-C')
self.check('lo_LA.cp1133', 'lo_LA.IBM-CP1133')
self.check('lo_LA.ibmcp1133', 'lo_LA.IBM-CP1133')
self.check('lo_LA.IBM-CP1133', 'lo_LA.IBM-CP1133')
self.check('uk_ua.microsoftcp1251', 'uk_UA.CP1251')
self.check('uk_ua.microsoft-cp1251', 'uk_UA.CP1251')
self.check('ka_ge.georgianacademy', 'ka_GE.GEORGIAN-ACADEMY')
self.check('ka_GE.GEORGIAN-ACADEMY', 'ka_GE.GEORGIAN-ACADEMY')
self.check('cs_CZ.iso88592', 'cs_CZ.ISO8859-2')
self.check('cs_CZ.ISO8859-2', 'cs_CZ.ISO8859-2')
def test_euro_modifier(self):
self.check('de_DE@euro', 'de_DE.ISO8859-15')
self.check('en_US.ISO8859-15@euro', 'en_US.ISO8859-15')
self.check('de_DE.utf8@euro', 'de_DE.UTF-8')
def test_latin_modifier(self):
self.check('be_BY.UTF-8@latin', 'be_BY.UTF-8@latin')
self.check('sr_RS.UTF-8@latin', 'sr_RS.UTF-8@latin')
self.check('sr_RS.UTF-8@latn', 'sr_RS.UTF-8@latin')
def test_valencia_modifier(self):
self.check('ca_ES.UTF-8@valencia', 'ca_ES.UTF-8@valencia')
self.check('ca_ES@valencia', 'ca_ES.ISO8859-15@valencia')
self.check('ca@valencia', 'ca_ES.ISO8859-1@valencia')
def test_devanagari_modifier(self):
self.check('ks_IN.UTF-8@devanagari', 'ks_IN.UTF-8@devanagari')
self.check('ks_IN@devanagari', 'ks_IN.UTF-8@devanagari')
self.check('ks@devanagari', 'ks_IN.UTF-8@devanagari')
self.check('ks_IN.UTF-8', 'ks_IN.UTF-8')
self.check('ks_IN', 'ks_IN.UTF-8')
self.check('ks', 'ks_IN.UTF-8')
self.check('sd_IN.UTF-8@devanagari', 'sd_IN.UTF-8@devanagari')
self.check('sd_IN@devanagari', 'sd_IN.UTF-8@devanagari')
self.check('sd@devanagari', 'sd_IN.UTF-8@devanagari')
self.check('sd_IN.UTF-8', 'sd_IN.UTF-8')
self.check('sd_IN', 'sd_IN.UTF-8')
self.check('sd', 'sd_IN.UTF-8')
def test_euc_encoding(self):
self.check('ja_jp.euc', 'ja_JP.eucJP')
self.check('ja_jp.eucjp', 'ja_JP.eucJP')
self.check('ko_kr.euc', 'ko_KR.eucKR')
self.check('ko_kr.euckr', 'ko_KR.eucKR')
self.check('zh_cn.euc', 'zh_CN.eucCN')
self.check('zh_tw.euc', 'zh_TW.eucTW')
self.check('zh_tw.euctw', 'zh_TW.eucTW')
def test_japanese(self):
self.check('ja', 'ja_JP.eucJP')
self.check('ja.jis', 'ja_JP.JIS7')
self.check('ja.sjis', 'ja_JP.SJIS')
self.check('ja_jp', 'ja_JP.eucJP')
self.check('ja_jp.ajec', 'ja_JP.eucJP')
self.check('ja_jp.euc', 'ja_JP.eucJP')
self.check('ja_jp.eucjp', 'ja_JP.eucJP')
self.check('ja_jp.iso-2022-jp', 'ja_JP.JIS7')
self.check('ja_jp.iso2022jp', 'ja_JP.JIS7')
self.check('ja_jp.jis', 'ja_JP.JIS7')
self.check('ja_jp.jis7', 'ja_JP.JIS7')
self.check('ja_jp.mscode', 'ja_JP.SJIS')
self.check('ja_jp.pck', 'ja_JP.SJIS')
self.check('ja_jp.sjis', 'ja_JP.SJIS')
self.check('ja_jp.ujis', 'ja_JP.eucJP')
self.check('ja_jp.utf8', 'ja_JP.UTF-8')
self.check('japan', 'ja_JP.eucJP')
self.check('japanese', 'ja_JP.eucJP')
self.check('japanese-euc', 'ja_JP.eucJP')
self.check('japanese.euc', 'ja_JP.eucJP')
self.check('japanese.sjis', 'ja_JP.SJIS')
self.check('jp_jp', 'ja_JP.eucJP')
class TestMiscellaneous(unittest.TestCase):
def test_getpreferredencoding(self):
# Invoke getpreferredencoding to make sure it does not cause exceptions.
enc = locale.getpreferredencoding()
if enc:
# If encoding non-empty, make sure it is valid
codecs.lookup(enc)
def test_strcoll_3303(self):
# test crasher from bug #3303
self.assertRaises(TypeError, locale.strcoll, "a", None)
self.assertRaises(TypeError, locale.strcoll, b"a", None)
def test_setlocale_category(self):
locale.setlocale(locale.LC_ALL)
locale.setlocale(locale.LC_TIME)
locale.setlocale(locale.LC_CTYPE)
locale.setlocale(locale.LC_COLLATE)
locale.setlocale(locale.LC_MONETARY)
locale.setlocale(locale.LC_NUMERIC)
# crasher from bug #7419
self.assertRaises(locale.Error, locale.setlocale, 12345)
def test_getsetlocale_issue1813(self):
# Issue #1813: setting and getting the locale under a Turkish locale
oldlocale = locale.setlocale(locale.LC_CTYPE)
self.addCleanup(locale.setlocale, locale.LC_CTYPE, oldlocale)
try:
locale.setlocale(locale.LC_CTYPE, 'tr_TR')
except locale.Error:
# Unsupported locale on this system
self.skipTest('test needs Turkish locale')
loc = locale.getlocale(locale.LC_CTYPE)
if verbose:
print('got locale %a' % (loc,))
locale.setlocale(locale.LC_CTYPE, loc)
self.assertEqual(loc, locale.getlocale(locale.LC_CTYPE))
def test_invalid_locale_format_in_localetuple(self):
with self.assertRaises(TypeError):
locale.setlocale(locale.LC_ALL, b'fi_FI')
def test_invalid_iterable_in_localetuple(self):
with self.assertRaises(TypeError):
locale.setlocale(locale.LC_ALL, (b'not', b'valid'))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
samsu/neutron | services/firewall/agents/firewall_agent_api.py | 7 | 2606 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from neutron.common import rpc as n_rpc
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
FWaaSOpts = [
cfg.StrOpt(
'driver',
default='',
help=_("Name of the FWaaS Driver")),
cfg.BoolOpt(
'enabled',
default=False,
help=_("Enable FWaaS")),
]
cfg.CONF.register_opts(FWaaSOpts, 'fwaas')
class FWaaSPluginApiMixin(n_rpc.RpcProxy):
"""Agent side of the FWaaS agent to FWaaS Plugin RPC API."""
RPC_API_VERSION = '1.0'
def __init__(self, topic, host):
super(FWaaSPluginApiMixin,
self).__init__(topic=topic,
default_version=self.RPC_API_VERSION)
self.host = host
def set_firewall_status(self, context, firewall_id, status):
"""Make a RPC to set the status of a firewall."""
return self.call(context,
self.make_msg('set_firewall_status', host=self.host,
firewall_id=firewall_id, status=status))
def firewall_deleted(self, context, firewall_id):
"""Make a RPC to indicate that the firewall resources are deleted."""
return self.call(context,
self.make_msg('firewall_deleted', host=self.host,
firewall_id=firewall_id))
class FWaaSAgentRpcCallbackMixin(object):
"""Mixin for FWaaS agent Implementations."""
def __init__(self, host):
super(FWaaSAgentRpcCallbackMixin, self).__init__(host)
def create_firewall(self, context, firewall, host):
"""Handle RPC cast from plugin to create a firewall."""
pass
def update_firewall(self, context, firewall, host):
"""Handle RPC cast from plugin to update a firewall."""
pass
def delete_firewall(self, context, firewall, host):
"""Handle RPC cast from plugin to delete a firewall."""
pass
| apache-2.0 |
jefftc/changlab | Betsy/Betsy/modules/summarize_coverage_at_positions.py | 1 | 4182 | from Module import AbstractModule
# Chrom Pos <Sample> [<Sample> ...]
#
# Each value in the matrix is:
# <ref>/<alt>/<vaf>
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, in_data, out_attributes, user_options,
num_cores, outfile):
import os
import stat
from genomicode import filelib
from genomicode import vcflib
from genomicode import SimpleVariantMatrix
from Betsy import module_utils as mlib
vcf_node = in_data
vcf_filenames = filelib.list_files_in_path(
vcf_node.identifier, endswith=".vcf", toplevel_only=True)
assert vcf_filenames, "No .vcf files."
metadata = {}
jobs = [] # list of (filestem, vcf_filename)
for vcf_filename in vcf_filenames:
path, root, ext = mlib.splitpath(vcf_filename)
x = root, vcf_filename
jobs.append(x)
vcf_objects = []
for x in jobs:
filestem, vcf_filename = x
# There might be an empty VCF file if there are no calls
# (e.g. from call_consensus_varscan). If this is the
# case, then ignore the file.
if os.stat(vcf_filename)[stat.ST_SIZE] < 256:
x = open(vcf_filename).read()
if not x.strip():
continue
vcf = vcflib.read(vcf_filename)
vcf_objects.append(vcf)
# Make a list of all the samples. Make sure they are unique.
all_samples = []
for vcf in vcf_objects:
for sample in vcf.samples:
assert sample not in all_samples
all_samples.append(sample)
# Make a list of all the positions.
# sample name -> (chrom, pos) -> SimpleVariantMatrix.call
sample2coord2call = {}
for vcf in vcf_objects:
for i in range(vcf.num_variants()):
var = vcflib.get_variant(vcf, i)
for sample in vcf.samples:
coord2call = sample2coord2call.get(sample, {})
coord = var.chrom, var.pos
call = vcflib.get_call(var, sample)
# convert to SimpleVariantMatrix.call
call = vcflib.simplify_call(call)
num_alt = None
if call.num_alt:
num_alt = call.num_alt[0]
vaf = None
if call.vaf:
vaf = call.vaf[0]
if call.num_ref is None and num_alt is None and \
vaf is None:
continue
scall = SimpleVariantMatrix.Call(
call.num_ref, num_alt, vaf)
assert coord not in coord2call
coord2call[coord] = scall
sample2coord2call[sample] = coord2call
all_coord = {}
for coord2call in sample2coord2call.itervalues():
for coord in coord2call.iterkeys():
all_coord[coord] = 1
all_coord = sorted(all_coord)
handle = open(outfile, 'w')
header = ["Chrom", "Pos"] + all_samples
print >>handle, "\t".join(header)
for coord in all_coord:
chrom, pos = coord
pos_f = vcflib._format_vcf_value(pos)
coverage = [""] * len(all_samples)
for i, sample in enumerate(all_samples):
coord2call = sample2coord2call.get(sample, {})
call = coord2call.get(coord, None)
if not call:
continue
coverage[i] = SimpleVariantMatrix._format_call(call)
#coverage[i] = vcflib._format_vcf_value(
# call.total_reads, None_char="")
x = [chrom, pos_f] + coverage
assert len(x) == len(header)
print >>handle, "\t".join(map(str, x))
handle.close()
return metadata
def name_outfile(self, antecedents, user_options):
return "coverage.txt"
| mit |
weimingtom/python-for-android | python3-alpha/python3-src/Mac/Tools/Doc/HelpIndexingTool/Miscellaneous_Standards.py | 48 | 1206 | """Suite Miscellaneous Standards: Useful events that aren\xd5t in any other suite
Level 0, version 0
Generated from /Developer/Applications/Apple Help Indexing Tool.app
AETE/AEUT resource version 1/1, language 0, script 0
"""
import aetools
import MacOS
_code = 'misc'
class Miscellaneous_Standards_Events:
def revert(self, _object, _attributes={}, **_arguments):
"""revert: Revert an object to the most recently saved version
Required argument: object to revert
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'misc'
_subcode = 'rvrt'
if _arguments: raise TypeError('No optional args expected')
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error(aetools.decodeerror(_arguments))
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
#
# Indices of types declared in this module
#
_classdeclarations = {
}
_propdeclarations = {
}
_compdeclarations = {
}
_enumdeclarations = {
}
| apache-2.0 |
wtsi-hgi/startfortest | useintest/modules/irods/services.py | 1 | 6234 | import json
import logging
import math
import os
from abc import abstractmethod, ABCMeta
from typing import List, Type, Callable, Sequence
from useintest.modules.irods.models import IrodsUser, IrodsDockerisedService, Version
from useintest.services.controllers import DockerisedServiceController
_DOCKER_REPOSITORY = "mercury/icat"
_logger = logging.getLogger(__name__)
class IrodsBaseServiceController(DockerisedServiceController[IrodsDockerisedService], metaclass=ABCMeta):
"""
TODO
"""
@staticmethod
@abstractmethod
def write_connection_settings(file_location: str, service: IrodsDockerisedService):
"""
Writes the connection settings for the given iRODS service to the given location.
:param file_location: the location to write the settings to (file should not already exist)
:param service: the Dockerized iRODS service
"""
@staticmethod
def _persistent_error_log_detector(line: str) -> bool:
"""
TODO
:param line:
:return:
"""
return "No space left on device" in line
def __init__(self, version: Version, users: Sequence[IrodsUser], config_file_name: str,
repository: str, tag: str, ports: List[int], start_log_detector: Callable[[str], bool], **kwargs):
"""
Constructor.
:param version:
:param users:
:param config_file_name:
:param repository:
:param tag:
:param ports:
:param start_log_detector:
:param kwargs:
"""
super().__init__(
IrodsDockerisedService, repository, tag, ports, start_log_detector=start_log_detector, **kwargs)
self.config_file_name = config_file_name
self._version = version
self._users = users
def start_service(self) -> IrodsDockerisedService:
service = super().start_service()
for user in self._users:
if user.admin:
service.root_user = user
service.users.add(user)
service.version = self._version
return service
class Irods4ServiceController(IrodsBaseServiceController, metaclass=ABCMeta):
"""
iRODS 4 service controller.
"""
_PORT = 1247
_CONFIG_FILE_NAME = "irods_environment.json"
_NATIVE_AUTHENTICATION_SCHEME = "native"
_HOST_PARAMETER_NAME = "irods_host"
_PORT_PARAMETER_NAME = "irods_port"
_USERNAME_PARAMETER_NAME = "irods_user_name"
_ZONE_PARAMETER_NAME = "irods_zone_name"
_AUTHENTICATION_SCHEME_PARAMETER_NAME = "irods_authentication_scheme"
_USERS = [IrodsUser("rods", "testZone", "irods123", admin=True)]
# TODO: These connection settings will not work with port-mapping to localhost
@staticmethod
def write_connection_settings(file_location: str, service: IrodsDockerisedService):
if os.path.isfile(file_location):
raise ValueError(f"Settings cannot be written to a file that already exists ({file_location})")
config = {Irods4ServiceController._USERNAME_PARAMETER_NAME: service.root_user.username,
Irods4ServiceController._HOST_PARAMETER_NAME: service.name,
Irods4ServiceController._PORT_PARAMETER_NAME: Irods4ServiceController._PORT,
Irods4ServiceController._ZONE_PARAMETER_NAME: service.root_user.zone,
Irods4ServiceController._AUTHENTICATION_SCHEME_PARAMETER_NAME:
Irods4ServiceController._NATIVE_AUTHENTICATION_SCHEME}
config_as_json = json.dumps(config)
_logger.debug(f"Writing iRODS connection config to: {file_location}")
with open(file_location, "w") as settings_file:
settings_file.write(config_as_json)
def __init__(self, docker_repository: str, docker_tag: str, start_timeout: int=math.inf, start_tries: int=10,
version: Version=None):
"""
Constructor.
:param docker_repository: name of the Docker repository
:param docker_tag: the Docker tag of the iRODS 4 image
:param start_timeout: see `ContainerisedServiceController.__init__`
:param start_tries: see `ContainerisedServiceController.__init__`
:param version: exact version of the iRODS 4 sever (will use `docker_tag` if not supplied)
"""
version = version if version is not None else Version(docker_tag)
super().__init__(version, Irods4ServiceController._USERS, Irods4ServiceController._CONFIG_FILE_NAME,
docker_repository, docker_tag, [Irods4ServiceController._PORT],
start_log_detector=lambda line: "iRODS server started successfully!" in line,
transient_error_log_detector=lambda line: "iRODS server failed to start." in line
or "RuntimeError:" in line,
persistent_error_log_detector=IrodsBaseServiceController._persistent_error_log_detector,
start_timeout=start_timeout, start_tries=start_tries)
# TODO: Why not use DockerisedServiceControllerTypeBuilder?
def build_irods_service_controller_type(docker_repository: str, docker_tag: str, superclass: type) \
-> Type[IrodsBaseServiceController]:
"""
Builds a controller for an iRODS server that runs in containers of on the given Docker image.
:param docker_repository: name of the Docker repository
:param docker_tag: the Docker tag of the image in the Docker repository
:param superclass: the superclass of the service controller
:return: the build service controller for the given image
"""
def init(self: superclass, *args, **kwargs):
super(type(self), self).__init__(docker_repository, docker_tag, *args, **kwargs)
return type(
"Irods%sServiceController" % docker_tag.replace(".", "_"),
(superclass,),
{"__init__": init}
)
# Concrete service controller definitions
Irods4_1_10ServiceController = build_irods_service_controller_type(_DOCKER_REPOSITORY, "4.1.10", Irods4ServiceController)
IrodsServiceController = Irods4_1_10ServiceController
irods_service_controllers = {Irods4_1_10ServiceController}
| mit |
2uller/LotF | App/Lib/test/test_xmlrpc.py | 3 | 40092 | import base64
import datetime
import sys
import time
import unittest
import xmlrpclib
import SimpleXMLRPCServer
import mimetools
import httplib
import socket
import StringIO
import os
import re
from test import test_support
try:
import threading
except ImportError:
threading = None
try:
unicode
except NameError:
have_unicode = False
else:
have_unicode = True
alist = [{'astring': 'foo@bar.baz.spam',
'afloat': 7283.43,
'anint': 2**20,
'ashortlong': 2L,
'anotherlist': ['.zyx.41'],
'abase64': xmlrpclib.Binary("my dog has fleas"),
'boolean': xmlrpclib.False,
'unicode': u'\u4000\u6000\u8000',
u'ukey\u4000': 'regular value',
'datetime1': xmlrpclib.DateTime('20050210T11:41:23'),
'datetime2': xmlrpclib.DateTime(
(2005, 02, 10, 11, 41, 23, 0, 1, -1)),
'datetime3': xmlrpclib.DateTime(
datetime.datetime(2005, 02, 10, 11, 41, 23)),
}]
class XMLRPCTestCase(unittest.TestCase):
def test_dump_load(self):
self.assertEqual(alist,
xmlrpclib.loads(xmlrpclib.dumps((alist,)))[0][0])
def test_dump_bare_datetime(self):
# This checks that an unwrapped datetime.date object can be handled
# by the marshalling code. This can't be done via test_dump_load()
# since with use_datetime set to 1 the unmarshaller would create
# datetime objects for the 'datetime[123]' keys as well
dt = datetime.datetime(2005, 02, 10, 11, 41, 23)
s = xmlrpclib.dumps((dt,))
(newdt,), m = xmlrpclib.loads(s, use_datetime=1)
self.assertEqual(newdt, dt)
self.assertEqual(m, None)
(newdt,), m = xmlrpclib.loads(s, use_datetime=0)
self.assertEqual(newdt, xmlrpclib.DateTime('20050210T11:41:23'))
def test_datetime_before_1900(self):
# same as before but with a date before 1900
dt = datetime.datetime(1, 02, 10, 11, 41, 23)
s = xmlrpclib.dumps((dt,))
(newdt,), m = xmlrpclib.loads(s, use_datetime=1)
self.assertEqual(newdt, dt)
self.assertEqual(m, None)
(newdt,), m = xmlrpclib.loads(s, use_datetime=0)
self.assertEqual(newdt, xmlrpclib.DateTime('00010210T11:41:23'))
def test_cmp_datetime_DateTime(self):
now = datetime.datetime.now()
dt = xmlrpclib.DateTime(now.timetuple())
self.assertTrue(dt == now)
self.assertTrue(now == dt)
then = now + datetime.timedelta(seconds=4)
self.assertTrue(then >= dt)
self.assertTrue(dt < then)
def test_bug_1164912 (self):
d = xmlrpclib.DateTime()
((new_d,), dummy) = xmlrpclib.loads(xmlrpclib.dumps((d,),
methodresponse=True))
self.assertIsInstance(new_d.value, str)
# Check that the output of dumps() is still an 8-bit string
s = xmlrpclib.dumps((new_d,), methodresponse=True)
self.assertIsInstance(s, str)
def test_newstyle_class(self):
class T(object):
pass
t = T()
t.x = 100
t.y = "Hello"
((t2,), dummy) = xmlrpclib.loads(xmlrpclib.dumps((t,)))
self.assertEqual(t2, t.__dict__)
def test_dump_big_long(self):
self.assertRaises(OverflowError, xmlrpclib.dumps, (2L**99,))
def test_dump_bad_dict(self):
self.assertRaises(TypeError, xmlrpclib.dumps, ({(1,2,3): 1},))
def test_dump_recursive_seq(self):
l = [1,2,3]
t = [3,4,5,l]
l.append(t)
self.assertRaises(TypeError, xmlrpclib.dumps, (l,))
def test_dump_recursive_dict(self):
d = {'1':1, '2':1}
t = {'3':3, 'd':d}
d['t'] = t
self.assertRaises(TypeError, xmlrpclib.dumps, (d,))
def test_dump_big_int(self):
if sys.maxint > 2L**31-1:
self.assertRaises(OverflowError, xmlrpclib.dumps,
(int(2L**34),))
xmlrpclib.dumps((xmlrpclib.MAXINT, xmlrpclib.MININT))
self.assertRaises(OverflowError, xmlrpclib.dumps, (xmlrpclib.MAXINT+1,))
self.assertRaises(OverflowError, xmlrpclib.dumps, (xmlrpclib.MININT-1,))
def dummy_write(s):
pass
m = xmlrpclib.Marshaller()
m.dump_int(xmlrpclib.MAXINT, dummy_write)
m.dump_int(xmlrpclib.MININT, dummy_write)
self.assertRaises(OverflowError, m.dump_int, xmlrpclib.MAXINT+1, dummy_write)
self.assertRaises(OverflowError, m.dump_int, xmlrpclib.MININT-1, dummy_write)
def test_dump_none(self):
value = alist + [None]
arg1 = (alist + [None],)
strg = xmlrpclib.dumps(arg1, allow_none=True)
self.assertEqual(value,
xmlrpclib.loads(strg)[0][0])
self.assertRaises(TypeError, xmlrpclib.dumps, (arg1,))
def test_default_encoding_issues(self):
# SF bug #1115989: wrong decoding in '_stringify'
utf8 = """<?xml version='1.0' encoding='iso-8859-1'?>
<params>
<param><value>
<string>abc \x95</string>
</value></param>
<param><value>
<struct>
<member>
<name>def \x96</name>
<value><string>ghi \x97</string></value>
</member>
</struct>
</value></param>
</params>
"""
# sys.setdefaultencoding() normally doesn't exist after site.py is
# loaded. Import a temporary fresh copy to get access to it
# but then restore the original copy to avoid messing with
# other potentially modified sys module attributes
old_encoding = sys.getdefaultencoding()
with test_support.CleanImport('sys'):
import sys as temp_sys
temp_sys.setdefaultencoding("iso-8859-1")
try:
(s, d), m = xmlrpclib.loads(utf8)
finally:
temp_sys.setdefaultencoding(old_encoding)
items = d.items()
if have_unicode:
self.assertEqual(s, u"abc \x95")
self.assertIsInstance(s, unicode)
self.assertEqual(items, [(u"def \x96", u"ghi \x97")])
self.assertIsInstance(items[0][0], unicode)
self.assertIsInstance(items[0][1], unicode)
else:
self.assertEqual(s, "abc \xc2\x95")
self.assertEqual(items, [("def \xc2\x96", "ghi \xc2\x97")])
class HelperTestCase(unittest.TestCase):
def test_escape(self):
self.assertEqual(xmlrpclib.escape("a&b"), "a&b")
self.assertEqual(xmlrpclib.escape("a<b"), "a<b")
self.assertEqual(xmlrpclib.escape("a>b"), "a>b")
class FaultTestCase(unittest.TestCase):
def test_repr(self):
f = xmlrpclib.Fault(42, 'Test Fault')
self.assertEqual(repr(f), "<Fault 42: 'Test Fault'>")
self.assertEqual(repr(f), str(f))
def test_dump_fault(self):
f = xmlrpclib.Fault(42, 'Test Fault')
s = xmlrpclib.dumps((f,))
(newf,), m = xmlrpclib.loads(s)
self.assertEqual(newf, {'faultCode': 42, 'faultString': 'Test Fault'})
self.assertEqual(m, None)
s = xmlrpclib.Marshaller().dumps(f)
self.assertRaises(xmlrpclib.Fault, xmlrpclib.loads, s)
class DateTimeTestCase(unittest.TestCase):
def test_default(self):
t = xmlrpclib.DateTime()
def test_time(self):
d = 1181399930.036952
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", time.localtime(d)))
def test_time_tuple(self):
d = (2007,6,9,10,38,50,5,160,0)
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t), '20070609T10:38:50')
def test_time_struct(self):
d = time.localtime(1181399930.036952)
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", d))
def test_datetime_datetime(self):
d = datetime.datetime(2007,1,2,3,4,5)
t = xmlrpclib.DateTime(d)
self.assertEqual(str(t), '20070102T03:04:05')
def test_repr(self):
d = datetime.datetime(2007,1,2,3,4,5)
t = xmlrpclib.DateTime(d)
val ="<DateTime '20070102T03:04:05' at %x>" % id(t)
self.assertEqual(repr(t), val)
def test_decode(self):
d = ' 20070908T07:11:13 '
t1 = xmlrpclib.DateTime()
t1.decode(d)
tref = xmlrpclib.DateTime(datetime.datetime(2007,9,8,7,11,13))
self.assertEqual(t1, tref)
t2 = xmlrpclib._datetime(d)
self.assertEqual(t1, tref)
class BinaryTestCase(unittest.TestCase):
def test_default(self):
t = xmlrpclib.Binary()
self.assertEqual(str(t), '')
def test_string(self):
d = '\x01\x02\x03abc123\xff\xfe'
t = xmlrpclib.Binary(d)
self.assertEqual(str(t), d)
def test_decode(self):
d = '\x01\x02\x03abc123\xff\xfe'
de = base64.encodestring(d)
t1 = xmlrpclib.Binary()
t1.decode(de)
self.assertEqual(str(t1), d)
t2 = xmlrpclib._binary(de)
self.assertEqual(str(t2), d)
ADDR = PORT = URL = None
# The evt is set twice. First when the server is ready to serve.
# Second when the server has been shutdown. The user must clear
# the event after it has been set the first time to catch the second set.
def http_server(evt, numrequests, requestHandler=None):
class TestInstanceClass:
def div(self, x, y):
return x // y
def _methodHelp(self, name):
if name == 'div':
return 'This is the div function'
def my_function():
'''This is my function'''
return True
class MyXMLRPCServer(SimpleXMLRPCServer.SimpleXMLRPCServer):
def get_request(self):
# Ensure the socket is always non-blocking. On Linux, socket
# attributes are not inherited like they are on *BSD and Windows.
s, port = self.socket.accept()
s.setblocking(True)
return s, port
if not requestHandler:
requestHandler = SimpleXMLRPCServer.SimpleXMLRPCRequestHandler
serv = MyXMLRPCServer(("localhost", 0), requestHandler,
logRequests=False, bind_and_activate=False)
try:
serv.socket.settimeout(3)
serv.server_bind()
global ADDR, PORT, URL
ADDR, PORT = serv.socket.getsockname()
#connect to IP address directly. This avoids socket.create_connection()
#trying to connect to "localhost" using all address families, which
#causes slowdown e.g. on vista which supports AF_INET6. The server listens
#on AF_INET only.
URL = "http://%s:%d"%(ADDR, PORT)
serv.server_activate()
serv.register_introspection_functions()
serv.register_multicall_functions()
serv.register_function(pow)
serv.register_function(lambda x,y: x+y, 'add')
serv.register_function(my_function)
serv.register_instance(TestInstanceClass())
evt.set()
# handle up to 'numrequests' requests
while numrequests > 0:
serv.handle_request()
numrequests -= 1
except socket.timeout:
pass
finally:
serv.socket.close()
PORT = None
evt.set()
def http_multi_server(evt, numrequests, requestHandler=None):
class TestInstanceClass:
def div(self, x, y):
return x // y
def _methodHelp(self, name):
if name == 'div':
return 'This is the div function'
def my_function():
'''This is my function'''
return True
class MyXMLRPCServer(SimpleXMLRPCServer.MultiPathXMLRPCServer):
def get_request(self):
# Ensure the socket is always non-blocking. On Linux, socket
# attributes are not inherited like they are on *BSD and Windows.
s, port = self.socket.accept()
s.setblocking(True)
return s, port
if not requestHandler:
requestHandler = SimpleXMLRPCServer.SimpleXMLRPCRequestHandler
class MyRequestHandler(requestHandler):
rpc_paths = []
serv = MyXMLRPCServer(("localhost", 0), MyRequestHandler,
logRequests=False, bind_and_activate=False)
serv.socket.settimeout(3)
serv.server_bind()
try:
global ADDR, PORT, URL
ADDR, PORT = serv.socket.getsockname()
#connect to IP address directly. This avoids socket.create_connection()
#trying to connect to "localhost" using all address families, which
#causes slowdown e.g. on vista which supports AF_INET6. The server listens
#on AF_INET only.
URL = "http://%s:%d"%(ADDR, PORT)
serv.server_activate()
paths = ["/foo", "/foo/bar"]
for path in paths:
d = serv.add_dispatcher(path, SimpleXMLRPCServer.SimpleXMLRPCDispatcher())
d.register_introspection_functions()
d.register_multicall_functions()
serv.get_dispatcher(paths[0]).register_function(pow)
serv.get_dispatcher(paths[1]).register_function(lambda x,y: x+y, 'add')
evt.set()
# handle up to 'numrequests' requests
while numrequests > 0:
serv.handle_request()
numrequests -= 1
except socket.timeout:
pass
finally:
serv.socket.close()
PORT = None
evt.set()
# This function prevents errors like:
# <ProtocolError for localhost:57527/RPC2: 500 Internal Server Error>
def is_unavailable_exception(e):
'''Returns True if the given ProtocolError is the product of a server-side
exception caused by the 'temporarily unavailable' response sometimes
given by operations on non-blocking sockets.'''
# sometimes we get a -1 error code and/or empty headers
try:
if e.errcode == -1 or e.headers is None:
return True
exc_mess = e.headers.get('X-exception')
except AttributeError:
# Ignore socket.errors here.
exc_mess = str(e)
if exc_mess and 'temporarily unavailable' in exc_mess.lower():
return True
return False
@unittest.skipUnless(threading, 'Threading required for this test.')
class BaseServerTestCase(unittest.TestCase):
requestHandler = None
request_count = 1
threadFunc = staticmethod(http_server)
def setUp(self):
# enable traceback reporting
SimpleXMLRPCServer.SimpleXMLRPCServer._send_traceback_header = True
self.evt = threading.Event()
# start server thread to handle requests
serv_args = (self.evt, self.request_count, self.requestHandler)
threading.Thread(target=self.threadFunc, args=serv_args).start()
# wait for the server to be ready
self.evt.wait(10)
self.evt.clear()
def tearDown(self):
# wait on the server thread to terminate
self.evt.wait(10)
# disable traceback reporting
SimpleXMLRPCServer.SimpleXMLRPCServer._send_traceback_header = False
# NOTE: The tests in SimpleServerTestCase will ignore failures caused by
# "temporarily unavailable" exceptions raised in SimpleXMLRPCServer. This
# condition occurs infrequently on some platforms, frequently on others, and
# is apparently caused by using SimpleXMLRPCServer with a non-blocking socket
# If the server class is updated at some point in the future to handle this
# situation more gracefully, these tests should be modified appropriately.
class SimpleServerTestCase(BaseServerTestCase):
def test_simple1(self):
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.pow(6,8), 6**8)
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_nonascii(self):
start_string = 'P\N{LATIN SMALL LETTER Y WITH CIRCUMFLEX}t'
end_string = 'h\N{LATIN SMALL LETTER O WITH HORN}n'
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.add(start_string, end_string),
start_string + end_string)
except (xmlrpclib.ProtocolError, socket.error) as e:
# ignore failures due to non-blocking socket unavailable errors.
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_unicode_host(self):
server = xmlrpclib.ServerProxy(u"http://%s:%d/RPC2"%(ADDR, PORT))
self.assertEqual(server.add("a", u"\xe9"), u"a\xe9")
# [ch] The test 404 is causing lots of false alarms.
def XXXtest_404(self):
# send POST with httplib, it should return 404 header and
# 'Not Found' message.
conn = httplib.HTTPConnection(ADDR, PORT)
conn.request('POST', '/this-is-not-valid')
response = conn.getresponse()
conn.close()
self.assertEqual(response.status, 404)
self.assertEqual(response.reason, 'Not Found')
def test_introspection1(self):
try:
p = xmlrpclib.ServerProxy(URL)
meth = p.system.listMethods()
expected_methods = set(['pow', 'div', 'my_function', 'add',
'system.listMethods', 'system.methodHelp',
'system.methodSignature', 'system.multicall'])
self.assertEqual(set(meth), expected_methods)
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_introspection2(self):
try:
# test _methodHelp()
p = xmlrpclib.ServerProxy(URL)
divhelp = p.system.methodHelp('div')
self.assertEqual(divhelp, 'This is the div function')
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_introspection3(self):
try:
# test native doc
p = xmlrpclib.ServerProxy(URL)
myfunction = p.system.methodHelp('my_function')
self.assertEqual(myfunction, 'This is my function')
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_introspection4(self):
# the SimpleXMLRPCServer doesn't support signatures, but
# at least check that we can try making the call
try:
p = xmlrpclib.ServerProxy(URL)
divsig = p.system.methodSignature('div')
self.assertEqual(divsig, 'signatures not supported')
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_multicall(self):
try:
p = xmlrpclib.ServerProxy(URL)
multicall = xmlrpclib.MultiCall(p)
multicall.add(2,3)
multicall.pow(6,8)
multicall.div(127,42)
add_result, pow_result, div_result = multicall()
self.assertEqual(add_result, 2+3)
self.assertEqual(pow_result, 6**8)
self.assertEqual(div_result, 127//42)
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_non_existing_multicall(self):
try:
p = xmlrpclib.ServerProxy(URL)
multicall = xmlrpclib.MultiCall(p)
multicall.this_is_not_exists()
result = multicall()
# result.results contains;
# [{'faultCode': 1, 'faultString': '<type \'exceptions.Exception\'>:'
# 'method "this_is_not_exists" is not supported'>}]
self.assertEqual(result.results[0]['faultCode'], 1)
self.assertEqual(result.results[0]['faultString'],
'<type \'exceptions.Exception\'>:method "this_is_not_exists" '
'is not supported')
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_dotted_attribute(self):
# Raises an AttributeError because private methods are not allowed.
self.assertRaises(AttributeError,
SimpleXMLRPCServer.resolve_dotted_attribute, str, '__add')
self.assertTrue(SimpleXMLRPCServer.resolve_dotted_attribute(str, 'title'))
# Get the test to run faster by sending a request with test_simple1.
# This avoids waiting for the socket timeout.
self.test_simple1()
def test_partial_post(self):
# Check that a partial POST doesn't make the server loop: issue #14001.
conn = httplib.HTTPConnection(ADDR, PORT)
conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye')
conn.close()
class MultiPathServerTestCase(BaseServerTestCase):
threadFunc = staticmethod(http_multi_server)
request_count = 2
def test_path1(self):
p = xmlrpclib.ServerProxy(URL+"/foo")
self.assertEqual(p.pow(6,8), 6**8)
self.assertRaises(xmlrpclib.Fault, p.add, 6, 8)
def test_path2(self):
p = xmlrpclib.ServerProxy(URL+"/foo/bar")
self.assertEqual(p.add(6,8), 6+8)
self.assertRaises(xmlrpclib.Fault, p.pow, 6, 8)
#A test case that verifies that a server using the HTTP/1.1 keep-alive mechanism
#does indeed serve subsequent requests on the same connection
class BaseKeepaliveServerTestCase(BaseServerTestCase):
#a request handler that supports keep-alive and logs requests into a
#class variable
class RequestHandler(SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
parentClass = SimpleXMLRPCServer.SimpleXMLRPCRequestHandler
protocol_version = 'HTTP/1.1'
myRequests = []
def handle(self):
self.myRequests.append([])
self.reqidx = len(self.myRequests)-1
return self.parentClass.handle(self)
def handle_one_request(self):
result = self.parentClass.handle_one_request(self)
self.myRequests[self.reqidx].append(self.raw_requestline)
return result
requestHandler = RequestHandler
def setUp(self):
#clear request log
self.RequestHandler.myRequests = []
return BaseServerTestCase.setUp(self)
#A test case that verifies that a server using the HTTP/1.1 keep-alive mechanism
#does indeed serve subsequent requests on the same connection
class KeepaliveServerTestCase1(BaseKeepaliveServerTestCase):
def test_two(self):
p = xmlrpclib.ServerProxy(URL)
#do three requests.
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
#they should have all been handled by a single request handler
self.assertEqual(len(self.RequestHandler.myRequests), 1)
#check that we did at least two (the third may be pending append
#due to thread scheduling)
self.assertGreaterEqual(len(self.RequestHandler.myRequests[-1]), 2)
#test special attribute access on the serverproxy, through the __call__
#function.
class KeepaliveServerTestCase2(BaseKeepaliveServerTestCase):
#ask for two keepalive requests to be handled.
request_count=2
def test_close(self):
p = xmlrpclib.ServerProxy(URL)
#do some requests with close.
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
p("close")() #this should trigger a new keep-alive request
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(p.pow(6,8), 6**8)
#they should have all been two request handlers, each having logged at least
#two complete requests
self.assertEqual(len(self.RequestHandler.myRequests), 2)
self.assertGreaterEqual(len(self.RequestHandler.myRequests[-1]), 2)
self.assertGreaterEqual(len(self.RequestHandler.myRequests[-2]), 2)
def test_transport(self):
p = xmlrpclib.ServerProxy(URL)
#do some requests with close.
self.assertEqual(p.pow(6,8), 6**8)
p("transport").close() #same as above, really.
self.assertEqual(p.pow(6,8), 6**8)
self.assertEqual(len(self.RequestHandler.myRequests), 2)
#A test case that verifies that gzip encoding works in both directions
#(for a request and the response)
class GzipServerTestCase(BaseServerTestCase):
#a request handler that supports keep-alive and logs requests into a
#class variable
class RequestHandler(SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
parentClass = SimpleXMLRPCServer.SimpleXMLRPCRequestHandler
protocol_version = 'HTTP/1.1'
def do_POST(self):
#store content of last request in class
self.__class__.content_length = int(self.headers["content-length"])
return self.parentClass.do_POST(self)
requestHandler = RequestHandler
class Transport(xmlrpclib.Transport):
#custom transport, stores the response length for our perusal
fake_gzip = False
def parse_response(self, response):
self.response_length=int(response.getheader("content-length", 0))
return xmlrpclib.Transport.parse_response(self, response)
def send_content(self, connection, body):
if self.fake_gzip:
#add a lone gzip header to induce decode error remotely
connection.putheader("Content-Encoding", "gzip")
return xmlrpclib.Transport.send_content(self, connection, body)
def setUp(self):
BaseServerTestCase.setUp(self)
def test_gzip_request(self):
t = self.Transport()
t.encode_threshold = None
p = xmlrpclib.ServerProxy(URL, transport=t)
self.assertEqual(p.pow(6,8), 6**8)
a = self.RequestHandler.content_length
t.encode_threshold = 0 #turn on request encoding
self.assertEqual(p.pow(6,8), 6**8)
b = self.RequestHandler.content_length
self.assertTrue(a>b)
def test_bad_gzip_request(self):
t = self.Transport()
t.encode_threshold = None
t.fake_gzip = True
p = xmlrpclib.ServerProxy(URL, transport=t)
cm = self.assertRaisesRegexp(xmlrpclib.ProtocolError,
re.compile(r"\b400\b"))
with cm:
p.pow(6, 8)
def test_gsip_response(self):
t = self.Transport()
p = xmlrpclib.ServerProxy(URL, transport=t)
old = self.requestHandler.encode_threshold
self.requestHandler.encode_threshold = None #no encoding
self.assertEqual(p.pow(6,8), 6**8)
a = t.response_length
self.requestHandler.encode_threshold = 0 #always encode
self.assertEqual(p.pow(6,8), 6**8)
b = t.response_length
self.requestHandler.encode_threshold = old
self.assertTrue(a>b)
#Test special attributes of the ServerProxy object
class ServerProxyTestCase(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
if threading:
self.url = URL
else:
# Without threading, http_server() and http_multi_server() will not
# be executed and URL is still equal to None. 'http://' is a just
# enough to choose the scheme (HTTP)
self.url = 'http://'
def test_close(self):
p = xmlrpclib.ServerProxy(self.url)
self.assertEqual(p('close')(), None)
def test_transport(self):
t = xmlrpclib.Transport()
p = xmlrpclib.ServerProxy(self.url, transport=t)
self.assertEqual(p('transport'), t)
# This is a contrived way to make a failure occur on the server side
# in order to test the _send_traceback_header flag on the server
class FailingMessageClass(mimetools.Message):
def __getitem__(self, key):
key = key.lower()
if key == 'content-length':
return 'I am broken'
return mimetools.Message.__getitem__(self, key)
@unittest.skipUnless(threading, 'Threading required for this test.')
class FailingServerTestCase(unittest.TestCase):
def setUp(self):
self.evt = threading.Event()
# start server thread to handle requests
serv_args = (self.evt, 1)
threading.Thread(target=http_server, args=serv_args).start()
# wait for the server to be ready
self.evt.wait()
self.evt.clear()
def tearDown(self):
# wait on the server thread to terminate
self.evt.wait()
# reset flag
SimpleXMLRPCServer.SimpleXMLRPCServer._send_traceback_header = False
# reset message class
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.MessageClass = mimetools.Message
def test_basic(self):
# check that flag is false by default
flagval = SimpleXMLRPCServer.SimpleXMLRPCServer._send_traceback_header
self.assertEqual(flagval, False)
# enable traceback reporting
SimpleXMLRPCServer.SimpleXMLRPCServer._send_traceback_header = True
# test a call that shouldn't fail just as a smoke test
try:
p = xmlrpclib.ServerProxy(URL)
self.assertEqual(p.pow(6,8), 6**8)
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e):
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
def test_fail_no_info(self):
# use the broken message class
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.MessageClass = FailingMessageClass
try:
p = xmlrpclib.ServerProxy(URL)
p.pow(6,8)
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e) and hasattr(e, "headers"):
# The two server-side error headers shouldn't be sent back in this case
self.assertTrue(e.headers.get("X-exception") is None)
self.assertTrue(e.headers.get("X-traceback") is None)
else:
self.fail('ProtocolError not raised')
def test_fail_with_info(self):
# use the broken message class
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler.MessageClass = FailingMessageClass
# Check that errors in the server send back exception/traceback
# info when flag is set
SimpleXMLRPCServer.SimpleXMLRPCServer._send_traceback_header = True
try:
p = xmlrpclib.ServerProxy(URL)
p.pow(6,8)
except (xmlrpclib.ProtocolError, socket.error), e:
# ignore failures due to non-blocking socket 'unavailable' errors
if not is_unavailable_exception(e) and hasattr(e, "headers"):
# We should get error info in the response
expected_err = "invalid literal for int() with base 10: 'I am broken'"
self.assertEqual(e.headers.get("x-exception"), expected_err)
self.assertTrue(e.headers.get("x-traceback") is not None)
else:
self.fail('ProtocolError not raised')
class CGIHandlerTestCase(unittest.TestCase):
def setUp(self):
self.cgi = SimpleXMLRPCServer.CGIXMLRPCRequestHandler()
def tearDown(self):
self.cgi = None
def test_cgi_get(self):
with test_support.EnvironmentVarGuard() as env:
env['REQUEST_METHOD'] = 'GET'
# if the method is GET and no request_text is given, it runs handle_get
# get sysout output
with test_support.captured_stdout() as data_out:
self.cgi.handle_request()
# parse Status header
data_out.seek(0)
handle = data_out.read()
status = handle.split()[1]
message = ' '.join(handle.split()[2:4])
self.assertEqual(status, '400')
self.assertEqual(message, 'Bad Request')
def test_cgi_xmlrpc_response(self):
data = """<?xml version='1.0'?>
<methodCall>
<methodName>test_method</methodName>
<params>
<param>
<value><string>foo</string></value>
</param>
<param>
<value><string>bar</string></value>
</param>
</params>
</methodCall>
"""
with test_support.EnvironmentVarGuard() as env, \
test_support.captured_stdout() as data_out, \
test_support.captured_stdin() as data_in:
data_in.write(data)
data_in.seek(0)
env['CONTENT_LENGTH'] = str(len(data))
self.cgi.handle_request()
data_out.seek(0)
# will respond exception, if so, our goal is achieved ;)
handle = data_out.read()
# start with 44th char so as not to get http header, we just need only xml
self.assertRaises(xmlrpclib.Fault, xmlrpclib.loads, handle[44:])
# Also test the content-length returned by handle_request
# Using the same test method inorder to avoid all the datapassing
# boilerplate code.
# Test for bug: http://bugs.python.org/issue5040
content = handle[handle.find("<?xml"):]
self.assertEqual(
int(re.search('Content-Length: (\d+)', handle).group(1)),
len(content))
class FakeSocket:
def __init__(self):
self.data = StringIO.StringIO()
def send(self, buf):
self.data.write(buf)
return len(buf)
def sendall(self, buf):
self.data.write(buf)
def getvalue(self):
return self.data.getvalue()
def makefile(self, x='r', y=-1):
raise RuntimeError
def close(self):
pass
class FakeTransport(xmlrpclib.Transport):
"""A Transport instance that records instead of sending a request.
This class replaces the actual socket used by httplib with a
FakeSocket object that records the request. It doesn't provide a
response.
"""
def make_connection(self, host):
conn = xmlrpclib.Transport.make_connection(self, host)
conn.sock = self.fake_socket = FakeSocket()
return conn
class TransportSubclassTestCase(unittest.TestCase):
def issue_request(self, transport_class):
"""Return an HTTP request made via transport_class."""
transport = transport_class()
proxy = xmlrpclib.ServerProxy("http://example.com/",
transport=transport)
try:
proxy.pow(6, 8)
except RuntimeError:
return transport.fake_socket.getvalue()
return None
def test_custom_user_agent(self):
class TestTransport(FakeTransport):
def send_user_agent(self, conn):
xmlrpclib.Transport.send_user_agent(self, conn)
conn.putheader("X-Test", "test_custom_user_agent")
req = self.issue_request(TestTransport)
self.assertIn("X-Test: test_custom_user_agent\r\n", req)
def test_send_host(self):
class TestTransport(FakeTransport):
def send_host(self, conn, host):
xmlrpclib.Transport.send_host(self, conn, host)
conn.putheader("X-Test", "test_send_host")
req = self.issue_request(TestTransport)
self.assertIn("X-Test: test_send_host\r\n", req)
def test_send_request(self):
class TestTransport(FakeTransport):
def send_request(self, conn, url, body):
xmlrpclib.Transport.send_request(self, conn, url, body)
conn.putheader("X-Test", "test_send_request")
req = self.issue_request(TestTransport)
self.assertIn("X-Test: test_send_request\r\n", req)
def test_send_content(self):
class TestTransport(FakeTransport):
def send_content(self, conn, body):
conn.putheader("X-Test", "test_send_content")
xmlrpclib.Transport.send_content(self, conn, body)
req = self.issue_request(TestTransport)
self.assertIn("X-Test: test_send_content\r\n", req)
@test_support.reap_threads
def test_main():
xmlrpc_tests = [XMLRPCTestCase, HelperTestCase, DateTimeTestCase,
BinaryTestCase, FaultTestCase, TransportSubclassTestCase]
xmlrpc_tests.append(SimpleServerTestCase)
xmlrpc_tests.append(KeepaliveServerTestCase1)
xmlrpc_tests.append(KeepaliveServerTestCase2)
try:
import gzip
xmlrpc_tests.append(GzipServerTestCase)
except ImportError:
pass #gzip not supported in this build
xmlrpc_tests.append(MultiPathServerTestCase)
xmlrpc_tests.append(ServerProxyTestCase)
xmlrpc_tests.append(FailingServerTestCase)
xmlrpc_tests.append(CGIHandlerTestCase)
test_support.run_unittest(*xmlrpc_tests)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
kennedyshead/home-assistant | tests/components/netatmo/test_sensor.py | 2 | 7387 | """The tests for the Netatmo sensor platform."""
from unittest.mock import patch
import pytest
from homeassistant.components.netatmo import sensor
from homeassistant.components.netatmo.sensor import MODULE_TYPE_WIND
from homeassistant.helpers import entity_registry as er
from .common import TEST_TIME, selected_platforms
async def test_weather_sensor(hass, config_entry, netatmo_auth):
"""Test weather sensor setup."""
with patch("time.time", return_value=TEST_TIME), selected_platforms(["sensor"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
prefix = "sensor.netatmo_mystation_"
assert hass.states.get(f"{prefix}temperature").state == "24.6"
assert hass.states.get(f"{prefix}humidity").state == "36"
assert hass.states.get(f"{prefix}co2").state == "749"
assert hass.states.get(f"{prefix}pressure").state == "1017.3"
async def test_public_weather_sensor(hass, config_entry, netatmo_auth):
"""Test public weather sensor setup."""
with patch("time.time", return_value=TEST_TIME), selected_platforms(["sensor"]):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_all()) > 0
prefix = "sensor.netatmo_home_max_"
assert hass.states.get(f"{prefix}temperature").state == "27.4"
assert hass.states.get(f"{prefix}humidity").state == "76"
assert hass.states.get(f"{prefix}pressure").state == "1014.4"
prefix = "sensor.netatmo_home_avg_"
assert hass.states.get(f"{prefix}temperature").state == "22.7"
assert hass.states.get(f"{prefix}humidity").state == "63.2"
assert hass.states.get(f"{prefix}pressure").state == "1010.3"
entities_before_change = len(hass.states.async_all())
valid_option = {
"lat_ne": 32.91336,
"lon_ne": -117.187429,
"lat_sw": 32.83336,
"lon_sw": -117.26743,
"show_on_map": True,
"area_name": "Home avg",
"mode": "max",
}
result = await hass.config_entries.options.async_init(config_entry.entry_id)
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={"new_area": "Home avg"}
)
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input=valid_option
)
result = await hass.config_entries.options.async_configure(
result["flow_id"], user_input={}
)
await hass.async_block_till_done()
assert len(hass.states.async_all()) == entities_before_change
assert hass.states.get(f"{prefix}temperature").state == "27.4"
@pytest.mark.parametrize(
"strength, expected",
[(50, "Full"), (60, "High"), (80, "Medium"), (90, "Low")],
)
async def test_process_wifi(strength, expected):
"""Test wifi strength translation."""
assert sensor.process_wifi(strength) == expected
@pytest.mark.parametrize(
"strength, expected",
[(50, "Full"), (70, "High"), (80, "Medium"), (90, "Low")],
)
async def test_process_rf(strength, expected):
"""Test radio strength translation."""
assert sensor.process_rf(strength) == expected
@pytest.mark.parametrize(
"health, expected",
[(4, "Unhealthy"), (3, "Poor"), (2, "Fair"), (1, "Fine"), (0, "Healthy")],
)
async def test_process_health(health, expected):
"""Test health index translation."""
assert sensor.process_health(health) == expected
@pytest.mark.parametrize(
"model, data, expected",
[
(MODULE_TYPE_WIND, 5591, "Full"),
(MODULE_TYPE_WIND, 5181, "High"),
(MODULE_TYPE_WIND, 4771, "Medium"),
(MODULE_TYPE_WIND, 4361, "Low"),
(MODULE_TYPE_WIND, 4300, "Very Low"),
],
)
async def test_process_battery(model, data, expected):
"""Test battery level translation."""
assert sensor.process_battery(data, model) == expected
@pytest.mark.parametrize(
"angle, expected",
[
(0, "N"),
(40, "NE"),
(70, "E"),
(130, "SE"),
(160, "S"),
(220, "SW"),
(250, "W"),
(310, "NW"),
(340, "N"),
],
)
async def test_process_angle(angle, expected):
"""Test wind direction translation."""
assert sensor.process_angle(angle) == expected
@pytest.mark.parametrize(
"angle, expected",
[(-1, 359), (-40, 320)],
)
async def test_fix_angle(angle, expected):
"""Test wind angle fix."""
assert sensor.fix_angle(angle) == expected
@pytest.mark.parametrize(
"uid, name, expected",
[
("12:34:56:37:11:ca-reachable", "netatmo_mystation_reachable", "True"),
("12:34:56:03:1b:e4-rf_status", "netatmo_mystation_yard_radio", "Full"),
(
"12:34:56:05:25:6e-rf_status",
"netatmo_valley_road_rain_gauge_radio",
"Medium",
),
(
"12:34:56:36:fc:de-rf_status_lvl",
"netatmo_mystation_netatmooutdoor_radio_level",
"65",
),
(
"12:34:56:37:11:ca-wifi_status_lvl",
"netatmo_mystation_wifi_level",
"45",
),
(
"12:34:56:37:11:ca-wifi_status",
"netatmo_mystation_wifi_status",
"Full",
),
(
"12:34:56:37:11:ca-temp_trend",
"netatmo_mystation_temperature_trend",
"stable",
),
(
"12:34:56:37:11:ca-pressure_trend",
"netatmo_mystation_pressure_trend",
"down",
),
("12:34:56:05:51:20-sum_rain_1", "netatmo_mystation_yard_rain_last_hour", "0"),
("12:34:56:05:51:20-sum_rain_24", "netatmo_mystation_yard_rain_today", "0"),
("12:34:56:03:1b:e4-windangle", "netatmo_mystation_garden_direction", "SW"),
(
"12:34:56:03:1b:e4-windangle_value",
"netatmo_mystation_garden_angle",
"217",
),
("12:34:56:03:1b:e4-gustangle", "mystation_garden_gust_direction", "S"),
(
"12:34:56:03:1b:e4-gustangle",
"netatmo_mystation_garden_gust_direction",
"S",
),
(
"12:34:56:03:1b:e4-gustangle_value",
"netatmo_mystation_garden_gust_angle_value",
"206",
),
(
"12:34:56:03:1b:e4-guststrength",
"netatmo_mystation_garden_gust_strength",
"9",
),
(
"12:34:56:26:68:92-health_idx",
"netatmo_baby_bedroom_health",
"Fine",
),
],
)
async def test_weather_sensor_enabling(
hass, config_entry, uid, name, expected, netatmo_auth
):
"""Test enabling of by default disabled sensors."""
with patch("time.time", return_value=TEST_TIME), selected_platforms(["sensor"]):
states_before = len(hass.states.async_all())
assert hass.states.get(f"sensor.{name}") is None
registry = er.async_get(hass)
registry.async_get_or_create(
"sensor",
"netatmo",
uid,
suggested_object_id=name,
disabled_by=None,
)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_all()) > states_before
assert hass.states.get(f"sensor.{name}").state == expected
| apache-2.0 |
CiuffysHub/MITMf | mitmflib-0.18.4/mitmflib/impacket/ICMP6.py | 2 | 19485 | # Copyright (c) 2003-2012 CORE Security Technologies
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
import array
import struct
from ImpactPacket import Header, Data
from IP6_Address import IP6_Address
class ICMP6(Header):
#IP Protocol number for ICMP6
IP_PROTOCOL_NUMBER = 58
protocol = IP_PROTOCOL_NUMBER #ImpactDecoder uses the constant "protocol" as the IP Protocol Number
#Size of ICMP6 header (excluding payload)
HEADER_SIZE = 4
#ICMP6 Message Type numbers
DESTINATION_UNREACHABLE = 1
PACKET_TOO_BIG = 2
TIME_EXCEEDED = 3
PARAMETER_PROBLEM = 4
ECHO_REQUEST = 128
ECHO_REPLY = 129
ROUTER_SOLICITATION = 133
ROUTER_ADVERTISEMENT = 134
NEIGHBOR_SOLICITATION = 135
NEIGHBOR_ADVERTISEMENT = 136
REDIRECT_MESSAGE = 137
NODE_INFORMATION_QUERY = 139
NODE_INFORMATION_REPLY = 140
#Destination Unreachable codes
NO_ROUTE_TO_DESTINATION = 0
ADMINISTRATIVELY_PROHIBITED = 1
BEYOND_SCOPE_OF_SOURCE_ADDRESS = 2
ADDRESS_UNREACHABLE = 3
PORT_UNREACHABLE = 4
SOURCE_ADDRESS_FAILED_INGRESS_EGRESS_POLICY = 5
REJECT_ROUTE_TO_DESTINATION = 6
#Time Exceeded codes
HOP_LIMIT_EXCEEDED_IN_TRANSIT = 0
FRAGMENT_REASSEMBLY_TIME_EXCEEDED = 1
#Parameter problem codes
ERRONEOUS_HEADER_FIELD_ENCOUNTERED = 0
UNRECOGNIZED_NEXT_HEADER_TYPE_ENCOUNTERED = 1
UNRECOGNIZED_IPV6_OPTION_ENCOUNTERED = 2
#Node Information codes
NODE_INFORMATION_QUERY_IPV6 = 0
NODE_INFORMATION_QUERY_NAME_OR_EMPTY = 1
NODE_INFORMATION_QUERY_IPV4 = 2
NODE_INFORMATION_REPLY_SUCCESS = 0
NODE_INFORMATION_REPLY_REFUSED = 1
NODE_INFORMATION_REPLY_UNKNOWN_QTYPE = 2
#Node Information qtypes
NODE_INFORMATION_QTYPE_NOOP = 0
NODE_INFORMATION_QTYPE_UNUSED = 1
NODE_INFORMATION_QTYPE_NODENAME = 2
NODE_INFORMATION_QTYPE_NODEADDRS = 3
NODE_INFORMATION_QTYPE_IPv4ADDRS = 4
#ICMP Message semantic types (error or informational)
ERROR_MESSAGE = 0
INFORMATIONAL_MESSAGE = 1
#ICMP message dictionary - specifying text descriptions and valid message codes
#Key: ICMP message number
#Data: Tuple ( Message Type (error/informational), Text description, Codes dictionary (can be None) )
#Codes dictionary
#Key: Code number
#Data: Text description
#ICMP message dictionary tuple indexes
MSG_TYPE_INDEX = 0
DESCRIPTION_INDEX = 1
CODES_INDEX = 2
icmp_messages = {
DESTINATION_UNREACHABLE : (ERROR_MESSAGE, "Destination unreachable",
{ NO_ROUTE_TO_DESTINATION : "No route to destination",
ADMINISTRATIVELY_PROHIBITED : "Administratively prohibited",
BEYOND_SCOPE_OF_SOURCE_ADDRESS : "Beyond scope of source address",
ADDRESS_UNREACHABLE : "Address unreachable",
PORT_UNREACHABLE : "Port unreachable",
SOURCE_ADDRESS_FAILED_INGRESS_EGRESS_POLICY : "Source address failed ingress/egress policy",
REJECT_ROUTE_TO_DESTINATION : "Reject route to destination"
}),
PACKET_TOO_BIG : (ERROR_MESSAGE, "Packet too big", None),
TIME_EXCEEDED : (ERROR_MESSAGE, "Time exceeded",
{HOP_LIMIT_EXCEEDED_IN_TRANSIT : "Hop limit exceeded in transit",
FRAGMENT_REASSEMBLY_TIME_EXCEEDED : "Fragment reassembly time exceeded"
}),
PARAMETER_PROBLEM : (ERROR_MESSAGE, "Parameter problem",
{
ERRONEOUS_HEADER_FIELD_ENCOUNTERED : "Erroneous header field encountered",
UNRECOGNIZED_NEXT_HEADER_TYPE_ENCOUNTERED : "Unrecognized Next Header type encountered",
UNRECOGNIZED_IPV6_OPTION_ENCOUNTERED : "Unrecognized IPv6 Option Encountered"
}),
ECHO_REQUEST : (INFORMATIONAL_MESSAGE, "Echo request", None),
ECHO_REPLY : (INFORMATIONAL_MESSAGE, "Echo reply", None),
ROUTER_SOLICITATION : (INFORMATIONAL_MESSAGE, "Router Solicitation", None),
ROUTER_ADVERTISEMENT : (INFORMATIONAL_MESSAGE, "Router Advertisement", None),
NEIGHBOR_SOLICITATION : (INFORMATIONAL_MESSAGE, "Neighbor Solicitation", None),
NEIGHBOR_ADVERTISEMENT : (INFORMATIONAL_MESSAGE, "Neighbor Advertisement", None),
REDIRECT_MESSAGE : (INFORMATIONAL_MESSAGE, "Redirect Message", None),
NODE_INFORMATION_QUERY: (INFORMATIONAL_MESSAGE, "Node Information Query", None),
NODE_INFORMATION_REPLY: (INFORMATIONAL_MESSAGE, "Node Information Reply", None),
}
############################################################################
def __init__(self, buffer = None):
Header.__init__(self, self.HEADER_SIZE)
if (buffer):
self.load_header(buffer)
def get_header_size(self):
return self.HEADER_SIZE
def get_ip_protocol_number(self):
return self.IP_PROTOCOL_NUMBER
def __str__(self):
type = self.get_type()
code = self.get_code()
checksum = self.get_checksum()
s = "ICMP6 - Type: " + str(type) + " - " + self.__get_message_description() + "\n"
s += "Code: " + str(code)
if (self.__get_code_description() != ""):
s += " - " + self.__get_code_description()
s += "\n"
s += "Checksum: " + str(checksum) + "\n"
return s
def __get_message_description(self):
return self.icmp_messages[self.get_type()][self.DESCRIPTION_INDEX]
def __get_code_description(self):
code_dictionary = self.icmp_messages[self.get_type()][self.CODES_INDEX]
if (code_dictionary is None):
return ""
else:
return code_dictionary[self.get_code()]
############################################################################
def get_type(self):
return (self.get_byte(0))
def get_code(self):
return (self.get_byte(1))
def get_checksum(self):
return (self.get_word(2))
############################################################################
def set_type(self, type):
self.set_byte(0, type)
def set_code(self, code):
self.set_byte(1, code)
def set_checksum(self, checksum):
self.set_word(2, checksum)
############################################################################
def calculate_checksum(self):
#Initialize the checksum value to 0 to yield a correct calculation
self.set_checksum(0)
#Fetch the pseudo header from the IP6 parent packet
pseudo_header = self.parent().get_pseudo_header()
#Fetch the ICMP data
icmp_header = self.get_bytes()
#Build an array of bytes concatenating the pseudo_header, the ICMP header and the ICMP data (if present)
checksum_array = array.array('B')
checksum_array.extend(pseudo_header)
checksum_array.extend(icmp_header)
if (self.child()):
checksum_array.extend(self.child().get_bytes())
#Compute the checksum over that array
self.set_checksum(self.compute_checksum(checksum_array))
def is_informational_message(self):
return self.icmp_messages[self.get_type()][self.MSG_TYPE_INDEX] == self.INFORMATIONAL_MESSAGE
def is_error_message(self):
return self.icmp_messages[self.get_type()][self.MSG_TYPE_INDEX] == self.ERROR_MESSAGE
def is_well_formed(self):
well_formed = True
#Check that the message type is known
well_formed &= self.get_type() in self.icmp_messages.keys()
#Check that the code is known (zero, if there are no codes defined)
code_dictionary = self.icmp_messages[self.get_type()][self.CODES_INDEX]
if (code_dictionary is None):
well_formed &= self.get_code() == 0
else:
well_formed &= self.get_code() in code_dictionary.keys()
return well_formed
############################################################################
@classmethod
def Echo_Request(class_object, id, sequence_number, arbitrary_data = None):
return class_object.__build_echo_message(ICMP6.ECHO_REQUEST, id, sequence_number, arbitrary_data)
@classmethod
def Echo_Reply(class_object, id, sequence_number, arbitrary_data = None):
return class_object.__build_echo_message(ICMP6.ECHO_REPLY, id, sequence_number, arbitrary_data)
@classmethod
def __build_echo_message(class_object, type, id, sequence_number, arbitrary_data):
#Build ICMP6 header
icmp_packet = ICMP6()
icmp_packet.set_type(type)
icmp_packet.set_code(0)
#Pack ICMP payload
icmp_bytes = struct.pack('>H', id)
icmp_bytes += struct.pack('>H', sequence_number)
if (arbitrary_data is not None):
icmp_bytes += array.array('B', arbitrary_data).tostring()
icmp_payload = Data()
icmp_payload.set_data(icmp_bytes)
#Link payload to header
icmp_packet.contains(icmp_payload)
return icmp_packet
############################################################################
@classmethod
def Destination_Unreachable(class_object, code, originating_packet_data = None):
unused_bytes = [0x00, 0x00, 0x00, 0x00]
return class_object.__build_error_message(ICMP6.DESTINATION_UNREACHABLE, code, unused_bytes, originating_packet_data)
@classmethod
def Packet_Too_Big(class_object, MTU, originating_packet_data = None):
MTU_bytes = struct.pack('!L', MTU)
return class_object.__build_error_message(ICMP6.PACKET_TOO_BIG, 0, MTU_bytes, originating_packet_data)
@classmethod
def Time_Exceeded(class_object, code, originating_packet_data = None):
unused_bytes = [0x00, 0x00, 0x00, 0x00]
return class_object.__build_error_message(ICMP6.TIME_EXCEEDED, code, unused_bytes, originating_packet_data)
@classmethod
def Parameter_Problem(class_object, code, pointer, originating_packet_data = None):
pointer_bytes = struct.pack('!L', pointer)
return class_object.__build_error_message(ICMP6.PARAMETER_PROBLEM, code, pointer_bytes, originating_packet_data)
@classmethod
def __build_error_message(class_object, type, code, data, originating_packet_data):
#Build ICMP6 header
icmp_packet = ICMP6()
icmp_packet.set_type(type)
icmp_packet.set_code(code)
#Pack ICMP payload
icmp_bytes = array.array('B', data).tostring()
if (originating_packet_data is not None):
icmp_bytes += array.array('B', originating_packet_data).tostring()
icmp_payload = Data()
icmp_payload.set_data(icmp_bytes)
#Link payload to header
icmp_packet.contains(icmp_payload)
return icmp_packet
############################################################################
@classmethod
def Neighbor_Solicitation(class_object, target_address):
return class_object.__build_neighbor_message(ICMP6.NEIGHBOR_SOLICITATION, target_address)
@classmethod
def Neighbor_Advertisement(class_object, target_address):
return class_object.__build_neighbor_message(ICMP6.NEIGHBOR_ADVERTISEMENT, target_address)
@classmethod
def __build_neighbor_message(class_object, msg_type, target_address):
#Build ICMP6 header
icmp_packet = ICMP6()
icmp_packet.set_type(msg_type)
icmp_packet.set_code(0)
# Flags + Reserved
icmp_bytes = array.array('B', [0x00] * 4).tostring()
# Target Address: The IP address of the target of the solicitation.
# It MUST NOT be a multicast address.
icmp_bytes += array.array('B', IP6_Address(target_address).as_bytes()).tostring()
icmp_payload = Data()
icmp_payload.set_data(icmp_bytes)
#Link payload to header
icmp_packet.contains(icmp_payload)
return icmp_packet
############################################################################
def get_target_address(self):
return IP6_Address(self.child().get_bytes()[4:20])
def set_target_address(self, target_address):
address = IP6_Address(target_address)
payload_bytes = self.child().get_bytes()
payload_bytes[4:20] = address.get_bytes()
self.child().set_bytes(payload_bytes)
# 0 1 2 3 4 5 6 7
# +-+-+-+-+-+-+-+-+
# |R|S|O|reserved |
# +-+-+-+-+-+-+-+-+
def get_neighbor_advertisement_flags(self):
return self.child().get_byte(0)
def set_neighbor_advertisement_flags(self, flags):
self.child().set_byte(0, flags)
def get_router_flag(self):
return (self.get_neighbor_advertisement_flags() & 0x80) != 0
def set_router_flag(self, flag_value):
curr_flags = self.get_neighbor_advertisement_flags()
if flag_value:
curr_flags |= 0x80
else:
curr_flags &= ~0x80
self.set_neighbor_advertisement_flags(curr_flags)
def get_solicited_flag(self):
return (self.get_neighbor_advertisement_flags() & 0x40) != 0
def set_solicited_flag(self, flag_value):
curr_flags = self.get_neighbor_advertisement_flags()
if flag_value:
curr_flags |= 0x40
else:
curr_flags &= ~0x40
self.set_neighbor_advertisement_flags(curr_flags)
def get_override_flag(self):
return (self.get_neighbor_advertisement_flags() & 0x20) != 0
def set_override_flag(self, flag_value):
curr_flags = self.get_neighbor_advertisement_flags()
if flag_value:
curr_flags |= 0x20
else:
curr_flags &= ~0x20
self.set_neighbor_advertisement_flags(curr_flags)
############################################################################
@classmethod
def Node_Information_Query(class_object, code, payload = None):
return class_object.__build_node_information_message(ICMP6.NODE_INFORMATION_QUERY, code, payload)
@classmethod
def Node_Information_Reply(class_object, code, payload = None):
return class_object.__build_node_information_message(ICMP6.NODE_INFORMATION_REPLY, code, payload)
@classmethod
def __build_node_information_message(class_object, type, code, payload = None):
#Build ICMP6 header
icmp_packet = ICMP6()
icmp_packet.set_type(type)
icmp_packet.set_code(code)
#Pack ICMP payload
qtype = 0
flags = 0
nonce = [0x00] * 8
icmp_bytes = struct.pack('>H', qtype)
icmp_bytes += struct.pack('>H', flags)
icmp_bytes += array.array('B', nonce).tostring()
if payload is not None:
icmp_bytes += array.array('B', payload).tostring()
icmp_payload = Data()
icmp_payload.set_data(icmp_bytes)
#Link payload to header
icmp_packet.contains(icmp_payload)
return icmp_packet
def get_qtype(self):
return self.child().get_word(0)
def set_qtype(self, qtype):
self.child().set_word(0, qtype)
def get_nonce(self):
return self.child().get_bytes()[4:12]
def set_nonce(self, nonce):
payload_bytes = self.child().get_bytes()
payload_bytes[4:12] = array.array('B', nonce)
self.child().set_bytes(payload_bytes)
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | unused |G|S|L|C|A|T|
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
def get_flags(self):
return self.child().get_word(2)
def set_flags(self, flags):
self.child().set_word(2, flags)
def get_flag_T(self):
return (self.get_flags() & 0x0001) != 0
def set_flag_T(self, flag_value):
curr_flags = self.get_flags()
if flag_value:
curr_flags |= 0x0001
else:
curr_flags &= ~0x0001
self.set_flags(curr_flags)
def get_flag_A(self):
return (self.get_flags() & 0x0002) != 0
def set_flag_A(self, flag_value):
curr_flags = self.get_flags()
if flag_value:
curr_flags |= 0x0002
else:
curr_flags &= ~0x0002
self.set_flags(curr_flags)
def get_flag_C(self):
return (self.get_flags() & 0x0004) != 0
def set_flag_C(self, flag_value):
curr_flags = self.get_flags()
if flag_value:
curr_flags |= 0x0004
else:
curr_flags &= ~0x0004
self.set_flags(curr_flags)
def get_flag_L(self):
return (self.get_flags() & 0x0008) != 0
def set_flag_L(self, flag_value):
curr_flags = self.get_flags()
if flag_value:
curr_flags |= 0x0008
else:
curr_flags &= ~0x0008
self.set_flags(curr_flags)
def get_flag_S(self):
return (self.get_flags() & 0x0010) != 0
def set_flag_S(self, flag_value):
curr_flags = self.get_flags()
if flag_value:
curr_flags |= 0x0010
else:
curr_flags &= ~0x0010
self.set_flags(curr_flags)
def get_flag_G(self):
return (self.get_flags() & 0x0020) != 0
def set_flag_G(self, flag_value):
curr_flags = self.get_flags()
if flag_value:
curr_flags |= 0x0020
else:
curr_flags &= ~0x0020
self.set_flags(curr_flags)
def set_node_information_data(self, data):
payload_bytes = self.child().get_bytes()
payload_bytes[12:] = array.array('B', data)
self.child().set_bytes(payload_bytes)
def get_note_information_data(self):
return self.child().get_bytes()[12:]
############################################################################
def get_echo_id(self):
return self.child().get_word(0)
def get_echo_sequence_number(self):
return self.child().get_word(2)
def get_echo_arbitrary_data(self):
return self.child().get_bytes()[4:]
def get_mtu(self):
return self.child().get_long(0)
def get_parm_problem_pointer(self):
return self.child().get_long(0)
def get_originating_packet_data(self):
return self.child().get_bytes()[4:]
| gpl-3.0 |
ESS-LLP/erpnext | erpnext/config/manufacturing.py | 5 | 3100 | from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Production"),
"icon": "fa fa-star",
"items": [
{
"type": "doctype",
"name": "Work Order",
"description": _("Orders released for production."),
},
{
"type": "doctype",
"name": "Production Plan",
"description": _("Generate Material Requests (MRP) and Work Orders."),
},
{
"type": "doctype",
"name": "Stock Entry",
},
{
"type": "doctype",
"name": "Timesheet",
"description": _("Time Sheet for manufacturing."),
},
]
},
{
"label": _("Bill of Materials"),
"items": [
{
"type": "doctype",
"name": "BOM",
"description": _("Bill of Materials (BOM)"),
"label": _("Bill of Materials")
},
{
"type": "doctype",
"name": "BOM",
"icon": "fa fa-sitemap",
"label": _("BOM Browser"),
"description": _("Tree of Bill of Materials"),
"link": "Tree/BOM",
},
{
"type": "doctype",
"name": "Item",
"description": _("All Products or Services."),
},
{
"type": "doctype",
"name": "Workstation",
"description": _("Where manufacturing operations are carried."),
},
{
"type": "doctype",
"name": "Operation",
"description": _("Details of the operations carried out."),
},
]
},
{
"label": _("Tools"),
"icon": "fa fa-wrench",
"items": [
{
"type": "doctype",
"name": "BOM Update Tool",
"description": _("Replace BOM and update latest price in all BOMs"),
},
]
},
{
"label": _("Setup"),
"items": [
{
"type": "doctype",
"name": "Manufacturing Settings",
"description": _("Global settings for all manufacturing processes."),
}
]
},
{
"label": _("Reports"),
"icon": "fa fa-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Open Work Orders",
"doctype": "Work Order"
},
{
"type": "report",
"is_query_report": True,
"name": "Work Orders in Progress",
"doctype": "Work Order"
},
{
"type": "report",
"is_query_report": True,
"name": "Issued Items Against Work Order",
"doctype": "Work Order"
},
{
"type": "report",
"is_query_report": True,
"name": "Completed Work Orders",
"doctype": "Work Order"
},
{
"type": "report",
"is_query_report": True,
"name": "Production Analytics",
"doctype": "Work Order"
},
{
"type": "report",
"is_query_report": True,
"name": "BOM Search",
"doctype": "BOM"
},
{
"type": "report",
"is_query_report": True,
"name": "BOM Stock Report",
"doctype": "BOM"
}
]
},
{
"label": _("Help"),
"icon": "fa fa-facetime-video",
"items": [
{
"type": "help",
"label": _("Bill of Materials"),
"youtube_id": "hDV0c1OeWLo"
},
{
"type": "help",
"label": _("Work Order"),
"youtube_id": "ZotgLyp2YFY"
},
]
}
]
| gpl-3.0 |
weblyzard/ewrt | tests/access/test_http_retrieve.py | 1 | 3950 | #!/usr/bin/env python
''' @package eWRT.access.http
provides access to resources using http '''
from __future__ import print_function
# (C)opyrights 2008-2012 by Albert Weichselbraun <albert@weblyzard.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from future import standard_library
standard_library.install_aliases()
import unittest
import urllib.request, urllib.error, urllib.parse
from pytest import raises
from socket import timeout
from eWRT.access.http import DEFAULT_TIMEOUT, Retrieve, setdefaulttimeout, log
class TestHttpRetrieve(unittest.TestCase):
''' tests the http class '''
TEST_URLS = (
'http://www.google.at/search?hl=de&q=andreas&btnG=Google-Suche&meta=',
'http://www.heise.de')
def setUp(self):
from logging import StreamHandler
self.default_timeout = DEFAULT_TIMEOUT
# set logging handler
log.addHandler(StreamHandler())
def tearDown(self):
setdefaulttimeout(self.default_timeout)
def testRetrieval(self):
''' tries to retrieve the following url's from the list '''
r_handler = Retrieve(self.__class__.__name__)
for url in self.TEST_URLS:
print(url)
r = r_handler.open(url)
r.read()
r.close()
def testRetrieveContext(self):
''' tests the retrieve context module '''
with Retrieve(self.__class__.__name__) as r:
c = r.open("http://www.heise.de")
content = c.read()
assert len(content) > 100
def testRetrievalTimeout(self):
''' tests whether the socket timeout is honored by our class '''
SLOW_URL = "http://www.csse.uwa.edu.au/"
with raises((timeout, urllib.error.URLError)):
r = Retrieve(self.__class__.__name__,
default_timeout=0.1).open(SLOW_URL)
content = r.read()
r.close()
def testMultiProcessing(self):
''' verifies that retrieves works with multi-processing '''
from multiprocessing import Pool
p = Pool(5)
TEST_URLS = ['http://www.heise.de',
'http://linuxtoday.com',
'http://www.kurier.at',
'http://www.diepresse.com',
'http://www.spiegel.de',
'http://www.sueddeutsche.de',
]
for res in p.map(t_retrieve, TEST_URLS):
assert len(res) > 20
def testGettingUserPassword(self):
urls = (('http://irgendwas.com', None, None),
('http://heinz:secret@irgendwas.com', 'heinz', 'secret'))
for test_url, exp_user, exp_passwd in urls:
print('testing url ' + test_url)
url, user, passwd = Retrieve.get_user_password(test_url)
assert user == exp_user
assert passwd == exp_passwd
if user:
assert url != test_url
def t_retrieve(url):
''' retrieves the given url from the web
@remarks
helper module for the testMultiProcessing unit test.
'''
r = Retrieve(__name__).open(url)
try:
content = r.read()
finally:
# this is required as GzipFile does not support the context protocol
# in python 2.6
r.close()
return content
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
lightsofapollo/git-repo | subcmds/download.py | 53 | 3231 | #
# Copyright (C) 2008 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import re
import sys
from command import Command
from error import GitError
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
class Download(Command):
common = True
helpSummary = "Download and checkout a change"
helpUsage = """
%prog {project change[/patchset]}...
"""
helpDescription = """
The '%prog' command downloads a change from the review system and
makes it available in your project's local working directory.
"""
def _Options(self, p):
p.add_option('-c', '--cherry-pick',
dest='cherrypick', action='store_true',
help="cherry-pick instead of checkout")
p.add_option('-r', '--revert',
dest='revert', action='store_true',
help="revert instead of checkout")
p.add_option('-f', '--ff-only',
dest='ffonly', action='store_true',
help="force fast-forward merge")
def _ParseChangeIds(self, args):
if not args:
self.Usage()
to_get = []
project = None
for a in args:
m = CHANGE_RE.match(a)
if m:
if not project:
self.Usage()
chg_id = int(m.group(1))
if m.group(2):
ps_id = int(m.group(2))
else:
ps_id = 1
to_get.append((project, chg_id, ps_id))
else:
project = self.GetProjects([a])[0]
return to_get
def Execute(self, opt, args):
for project, change_id, ps_id in self._ParseChangeIds(args):
dl = project.DownloadPatchSet(change_id, ps_id)
if not dl:
print('[%s] change %d/%d not found'
% (project.name, change_id, ps_id),
file=sys.stderr)
sys.exit(1)
if not opt.revert and not dl.commits:
print('[%s] change %d/%d has already been merged'
% (project.name, change_id, ps_id),
file=sys.stderr)
continue
if len(dl.commits) > 1:
print('[%s] %d/%d depends on %d unmerged changes:' \
% (project.name, change_id, ps_id, len(dl.commits)),
file=sys.stderr)
for c in dl.commits:
print(' %s' % (c), file=sys.stderr)
if opt.cherrypick:
try:
project._CherryPick(dl.commit)
except GitError:
print('[%s] Could not complete the cherry-pick of %s' \
% (project.name, dl.commit), file=sys.stderr)
sys.exit(1)
elif opt.revert:
project._Revert(dl.commit)
elif opt.ffonly:
project._FastForward(dl.commit, ffonly=True)
else:
project._Checkout(dl.commit)
| apache-2.0 |
gfrd/gfrd | test/FirstPassageGreensFunction1D_test.py | 1 | 4825 | #!/usr/bin/env python
__author__ = 'Laurens Bossen'
__copyright__ = ''
import unittest
import _gfrd as mod
import numpy
class FirstPassageGreensFunction1DTestCase( unittest.TestCase ):
def setUp( self ):
pass
def tearDown( self ):
pass
def test_Instantiation( self ):
D = 1e-12
L = 2e-7
gf = mod.FirstPassageGreensFunction1D( D )
self.failIf( gf == None )
gf.setL( L )
def test_DrawTime( self ):
D = 1e-12
L = 2e-7
r0 = 5e-8
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = gf.drawTime( 0.5 )
self.failIf( t <= 0.0 or t >= numpy.inf )
t = gf.drawTime( 0.0 )
self.failIf( t < 0.0 or t >= numpy.inf )
t = gf.drawTime( 1e-16 )
self.failIf( t <= 0.0 or t >= numpy.inf )
t = gf.drawTime( 1 - 1e-16 )
self.failIf( t <= 0.0 or t >= numpy.inf )
def test_DrawTime_a_equal_sigma( self ):
D = 1e-12
L = 0
r0 = L
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = gf.drawTime( 0.5 )
self.assertEqual( 0.0, t )
def test_DrawTime_a_near_sigma( self ):
D = 1e-12
L = 2e-14
r0 = L/2
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = gf.drawTime( 0.5 )
self.failIf( t <= 0.0 or t >= numpy.inf )
def test_DrawTime_r0_equal_a( self ):
D = 1e-12
L = 2e-7
r0 = L
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = gf.drawTime( 0.5 )
self.assertEqual( 0.0, t )
def test_DrawTime_r0_equal_sigma( self ):
D = 1e-12
L = 1e-7
r0 = 0
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = gf.drawTime( 0.5 )
self.failIf( t < 0.0 or t >= numpy.inf )
def test_DrawEventType( self ):
D = 1e-12
L = 2e-7
r0 = L/2
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = gf.drawTime( 0.5 )
eventType = gf.drawEventType( 0.5, t )
self.failIf( eventType != 0 and eventType != 1 and eventType != 2 )
eventType = gf.drawEventType( 0.0, t )
self.assertEqual( eventType, 0 )
eventType = gf.drawEventType( 0.999999, t )
self.assertEqual( eventType, 1 )
def no_test_DrawEventType_smallt( self ):
D = 1e-12
L = 2e-6
r0 = L/2
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = gf.drawTime( 0.001 )
eventType = gf.drawEventType( 0.5, t )
self.failIf( eventType != 0 and eventType != 1 and eventType != 2 )
eventType = gf.drawEventType( 0.0, t )
self.assertEqual( eventType, 0 )
eventType = gf.drawEventType( 0.9999, t )
self.assertEqual( eventType, 1 )
def test_DrawR( self ):
D = 1e-12
L = 2e-7
r0 = L/2
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = 1e-3
r = gf.drawR( 0.5, t )
self.failIf( r < 0 or r > L )
r1 = gf.drawR( 0.0, t )
r2 = gf.drawR( 0.999999999999, t )
self.failIf( r1 != 0 )
self.failIf( r2 < 0 or r2 > L )
self.assertAlmostEqual( r1, 0 )
self.assertAlmostEqual( r2, L )
def test_DrawR_zerot( self ):
D = 1e-12
L = 1e-7
r0 = L/2
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
t = 0.0
r = gf.drawR( 0.5, t )
self.assertEqual( r0, r )
def test_DrawR_r0_equal_sigma( self ):
D = 1e-12
L = 2e-7
r0 = 0
t = 0.0#1e-3
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
gf.setr0 ( r0 )
# This raises an exception, which at this point we cannot catch
# r = gf.drawR( 0.5, t )
# self.failIf( r < 0 or r > L )
def test_DrawR_squeezed( self ):
D = 1e-12
L = 0.02e-8
gf = mod.FirstPassageGreensFunction1D( D )
gf.setL( L )
t = 1e-6
r0 = 0
gf.setr0 ( r0 )
# r = gf.drawR( 0.5, t )
# self.failIf( r < 0 or r > L )
# near s
r0 = 0.0001e-8
gf.setr0 ( r0 )
r = gf.drawR( 0.5, t )
self.failIf( r < 0 or r > L )
# near a
r0 = L - 0.0001e-8
gf.setr0 ( r0 )
r = gf.drawR( 0.5, t )
self.failIf( r < 0 or r > L )
if __name__ == "__main__":
unittest.main()
| gpl-2.0 |
mrtns/libphonenumber-csharp | csharp/lib/copyres.py | 14 | 1217 | import os, re, shutil
def copygeocoding(source, prefix, dest):
entries = []
for root, dirs, files in os.walk(source):
for f in files:
if not re.search(r'^\d+\.txt$', f):
continue
country = f[:-4]
s = os.path.join(root, f)
lang = os.path.split(root)[-1]
fn = '%s%s_%s' % (prefix, country, lang)
t = os.path.join(dest, fn)
opts = ''
if os.path.exists(t):
datasrc = file(s, 'rb').read()
datadst = file(t, 'rb').read()
if datasrc == datadst:
continue
opts = ' --force '
print 'hg cp %s %s %s' % (opts, s, t)
if __name__ == '__main__':
rootpath = os.path.join(os.path.dirname(__file__), '../../')
dest = os.path.join(rootpath, 'csharp/PhoneNumbers/res')
if not os.path.exists(dest):
os.makedirs(dest)
sources = [
(os.path.join(rootpath, 'resources/geocoding'), 'prod_'),
(os.path.join(rootpath, 'resources/test/geocoding'), 'test_'),
]
for source, prefix in sources:
copygeocoding(source, prefix, dest)
| apache-2.0 |
apagac/cfme_tests | sprout/appliances/models.py | 2 | 65154 | # -*- coding: utf-8 -*-
import base64
import re
import yaml
import pickle # NOQA
import wrapanapi
from wrapanapi import VmState, Openshift
from wrapanapi.exceptions import VMInstanceNotFound
from cached_property import cached_property
from celery import chain
from contextlib import contextmanager
from datetime import timedelta, date
from django.contrib.auth.models import User, Group as DjangoGroup
from django.core.exceptions import ObjectDoesNotExist
from django.db import models, transaction
from django.db.models import Q
from django.db.models.signals import pre_save
from django.dispatch import receiver
from django.utils import timezone
from json_field import JSONField
from cached_property import threaded_cached_property
from sprout import critical_section, redis
from sprout.log import create_logger
from cfme.utils.appliance import Appliance as CFMEAppliance, IPAppliance
from cfme.utils.bz import Bugzilla
from cfme.utils.conf import cfme_data
from cfme.utils.providers import get_mgmt
from cfme.utils.timeutil import nice_seconds
from cfme.utils.version import Version
# Monkey patch the User object in order to have nicer checks
def has_quotas(self):
try:
self.quotas
except ObjectDoesNotExist:
return False
else:
return True
def is_a_bot(self):
return self.last_name.lower() == "bot"
User.has_quotas = property(has_quotas)
User.is_a_bot = property(is_a_bot)
def apply_if_not_none(o, meth, *args, **kwargs):
if o is None:
return None
return getattr(o, meth)(*args, **kwargs)
class MetadataMixin(models.Model):
class Meta:
abstract = True
object_meta_data = models.TextField(default=yaml.safe_dump({}))
created_on = models.DateTimeField(default=timezone.now, editable=False)
modified_on = models.DateTimeField(default=timezone.now)
def save(self, *args, **kwargs):
if not self.id:
self.created_on = timezone.now()
if not kwargs.pop('ignore_modified', False):
self.modified_on = timezone.now()
return super(MetadataMixin, self).save(*args, **kwargs)
@property
def age(self):
return timezone.now() - self.created_on
def reload(self):
new_self = type(self).objects.get(pk=self.pk)
self.__dict__.update(new_self.__dict__)
@property
@contextmanager
def metadata_lock(self):
with critical_section("metadata-({})[{}]".format(type(self).__name__, str(self.pk))):
yield
@property
def metadata(self):
return yaml.safe_load(self.object_meta_data)
@metadata.setter
def metadata(self, value):
if not isinstance(value, dict):
raise TypeError("You can store only dict in metadata!")
self.object_meta_data = yaml.safe_dump(value)
@property
@contextmanager
def edit_metadata(self):
with transaction.atomic():
with self.metadata_lock:
o = type(self).objects.get(pk=self.pk)
metadata = o.metadata
yield metadata
o.metadata = metadata
o.save()
self.reload()
@property
def logger(self):
return create_logger(self)
@classmethod
def class_logger(cls, id=None):
return create_logger(cls, id)
class DelayedProvisionTask(MetadataMixin):
pool = models.ForeignKey("AppliancePool", on_delete=models.CASCADE)
lease_time = models.IntegerField(null=True, blank=True)
provider_to_avoid = models.ForeignKey(
"Provider", null=True, blank=True, on_delete=models.CASCADE)
def __unicode__(self):
return "Task {}: Provision on {}, lease time {}, avoid provider {}".format(
self.id, self.pool.id, self.lease_time,
self.provider_to_avoid.id if self.provider_to_avoid is not None else "---")
class Provider(MetadataMixin):
id = models.CharField(max_length=32, primary_key=True, help_text="Provider's key in YAML.")
working = models.BooleanField(default=False, help_text="Whether provider is available.")
num_simultaneous_provisioning = models.IntegerField(default=5,
help_text="How many simultaneous background provisioning tasks can run on this provider.")
num_simultaneous_configuring = models.IntegerField(default=1,
help_text="How many simultaneous template configuring tasks can run on this provider.")
appliance_limit = models.IntegerField(
null=True, help_text="Hard limit of how many appliances can run on this provider")
disabled = models.BooleanField(default=False, help_text="We can disable providers if we want.")
hidden = models.BooleanField(
default=False, help_text='We can hide providers if that is required.')
user_groups = models.ManyToManyField(
DjangoGroup, blank=True,
help_text='We can specify the providers that are tied to a specific user group.')
allow_renaming = models.BooleanField(
default=False, help_text="Whether this provider can rename appliances.")
container_base_template = models.CharField(
max_length=64,
null=True, blank=True, help_text='Base tempalte for containerized ManageIQ deployment.')
total_memory = models.IntegerField(null=True, blank=True, editable=False)
total_cpu = models.IntegerField(null=True, blank=True, editable=False)
used_memory = models.IntegerField(null=True, blank=True, editable=False)
used_cpu = models.IntegerField(null=True, blank=True, editable=False)
memory_limit = models.IntegerField(null=True, blank=True, editable=False)
cpu_limit = models.IntegerField(null=True, blank=True, editable=False)
custom_memory_limit = models.IntegerField(null=True, blank=True)
custom_cpu_limit = models.IntegerField(null=True, blank=True)
provider_type = models.CharField(max_length=16, null=True, blank=True)
class Meta:
ordering = ['id']
def perf_sync(self):
try:
stats = self.api.usage_and_quota()
with transaction.atomic():
provider = type(self).objects.get(pk=self.pk)
provider.total_memory = stats['ram_total']
provider.total_cpu = stats['cpu_total']
provider.used_memory = stats['ram_used']
provider.used_cpu = stats['cpu_used']
provider.memory_limit = stats['ram_limit']
provider.cpu_limit = stats['cpu_limit']
provider.save()
except NotImplementedError:
pass
@property
def is_working(self):
return self.working and not self.disabled
@property
def existing_templates(self):
return self.provider_templates.filter(exists=True)
@property
def api(self):
provider_data = self.metadata.get('provider_data')
if provider_data:
return get_mgmt(provider_data)
else:
return get_mgmt(self.id)
@property
def num_currently_provisioning(self):
return len(
Appliance.objects.filter(
ready=False, marked_for_deletion=False, template__provider=self, ip_address=None))
@property
def num_templates_preparing(self):
return len(Template.objects.filter(provider=self, ready=False))
@property
def remaining_configuring_slots(self):
result = self.num_simultaneous_configuring - self.num_templates_preparing
if result < 0:
return 0
return result
@property
def remaining_appliance_slots(self):
if self.appliance_limit is None:
return 1
result = self.appliance_limit - self.num_currently_managing
if result < 0:
return 0
return result
@property
def num_currently_managing(self):
return len(Appliance.objects.filter(template__provider=self))
@property
def currently_managed_appliances(self):
return Appliance.objects.filter(template__provider=self)
@property
def remaining_provisioning_slots(self):
result = self.num_simultaneous_provisioning - self.num_currently_provisioning
if result < 0:
return 0
# Take the appliance limit into account
if self.appliance_limit is None:
return result
else:
free_appl_slots = self.appliance_limit - self.num_currently_managing
if free_appl_slots < 0:
free_appl_slots = 0
return min(free_appl_slots, result)
@property
def free(self):
return self.remaining_provisioning_slots > 0
@property
def provisioning_load(self):
if self.num_simultaneous_provisioning == 0:
return 1.0 # prevent division by zero
return float(self.num_currently_provisioning) / float(self.num_simultaneous_provisioning)
@property
def appliance_load(self):
if self.appliance_limit is None or self.appliance_limit == 0:
return 0.0
return float(self.num_currently_managing) / float(self.appliance_limit)
@property
def load(self):
"""Load for sorting"""
if self.appliance_limit is None:
return self.provisioning_load
else:
return self.appliance_load
@classmethod
def get_available_provider_keys(cls):
return list(cfme_data.get("management_systems", {}).keys())
@classmethod
def get_available_provider_types(cls, user=None):
types = set()
for provider in cls.objects.all():
if user is not None and not provider.user_can_use(user):
continue
provider_data = provider.provider_data
if not provider_data:
continue
if not provider_data.get('use_for_sprout', False):
continue
if 'sprout' not in provider_data:
continue
provider_type = provider_data.get('type')
if provider_type:
types.add(provider_type)
return sorted(types)
@property
def provider_data(self):
data = self.metadata.get('provider_data')
if data:
return data
else:
return cfme_data.get("management_systems", {}).get(self.id, {})
@property
def ip_address(self):
return self.provider_data.get("ipaddress")
@property
def templates(self):
return self.metadata.get("templates", [])
@templates.setter
def templates(self, value):
with self.edit_metadata as metadata:
metadata["templates"] = value
@property
def template_name_length(self):
return self.metadata.get("template_name_length")
@template_name_length.setter
def template_name_length(self, value):
with self.edit_metadata as metadata:
metadata["template_name_length"] = value
@property
def appliances_manage_this_provider(self):
return self.metadata.get("appliances_manage_this_provider", [])
@appliances_manage_this_provider.setter
def appliances_manage_this_provider(self, value):
with self.edit_metadata as metadata:
metadata["appliances_manage_this_provider"] = value
@property
def g_appliances_manage_this_provider(self):
for appl_id in self.appliances_manage_this_provider:
try:
yield Appliance.objects.get(id=appl_id)
except ObjectDoesNotExist:
continue
@property
def user_usage(self):
per_user_usage = {}
for appliance in Appliance.objects.filter(template__provider=self):
if appliance.owner is None:
continue
owner = appliance.owner
if owner not in per_user_usage:
per_user_usage[owner] = 1
else:
per_user_usage[owner] += 1
per_user_usage = list(per_user_usage.items())
per_user_usage.sort(key=lambda item: item[1], reverse=True)
return per_user_usage
@property
def free_shepherd_appliances(self):
return Appliance.objects.filter(
template__provider=self, appliance_pool=None, marked_for_deletion=False, ready=True)
@classmethod
def complete_user_usage(cls, user_perspective=None):
result = {}
if user_perspective is None or user_perspective.is_superuser or user_perspective.is_staff:
perspective_filter = {}
else:
perspective_filter = {'user_groups__in': user_perspective.groups.all()}
for provider in cls.objects.filter(hidden=False, **perspective_filter):
for user, count in provider.user_usage:
if user not in result:
result[user] = 0
result[user] += count
result = list(result.items())
result.sort(key=lambda item: item[1], reverse=True)
return result
def cleanup(self):
"""Put any cleanup tasks that might help the application stability here"""
self.logger.info("Running cleanup on provider {}".format(self.id))
if isinstance(self.api, wrapanapi.systems.openstack.OpenstackSystem):
# Openstack cleanup
# Clean up the floating IPs
for floating_ip in self.api.api.floating_ips.findall(fixed_ip=None):
self.logger.info(
"Cleaning up the {} floating ip {}".format(self.id, floating_ip.ip))
try:
floating_ip.delete()
except Exception as e:
self.logger.exception(e)
def vnc_console_link_for(self, appliance):
if appliance.uuid is None:
return None
if isinstance(self.api, wrapanapi.systems.openstack.OpenstackSystem):
return "http://{}/dashboard/project/instances/{}/?tab=instance_details__console".format(
self.ip_address, appliance.uuid
)
else:
return None
def user_can_use(self, user):
groups = self.user_groups.all()
return any(user_group in groups for user_group in user.groups.all())
def user_can_see(self, user):
return user.is_staff or user.is_superuser or self.user_can_use(user)
def __unicode__(self):
return "{} {}".format(type(self).__name__, self.id)
@receiver(pre_save, sender=Provider)
def disable_if_hidden(sender, instance, **kwargs):
if instance.hidden:
instance.disabled = True
class Group(MetadataMixin):
id = models.CharField(max_length=32, primary_key=True,
help_text="Group name as trackerbot says. (eg. upstream, downstream-53z, ...)")
template_obsolete_days = models.IntegerField(
null=True, blank=True, help_text="Templates older than X days won't be loaded into sprout")
template_obsolete_days_delete = models.BooleanField(
default=False,
help_text="If template_obsolete_days set, this will enable deletion of obsolete templates"
" using that metric. WARNING! Use with care. Best use for upstream templates.")
templates_url = models.TextField(
blank=True, null=True, help_text='Location of templates. Currently used for containers.')
class Meta:
ordering = ['id']
@property
def obsolete_templates(self):
"""Return a list of obsolete templates. Ignores the latest one even if it was obsolete by
the means of days."""
if self.template_obsolete_days is None:
return None
# Preconfigured because we presume that if the preconfigured works, so does unconfigured one
latest_working_template_date = Template.objects.filter(
exists=True, usable=True, ready=True, preconfigured=True,
template_group=self).order_by("-date")[0].date
latest_working_template_ids = [
tpl.id
for tpl
in Template.objects.filter(
exists=True, usable=True, ready=True, template_group=self,
date=latest_working_template_date)]
return Template.objects.filter(
exists=True, date__lt=date.today() - timedelta(days=self.template_obsolete_days),
template_group=self).exclude(id__in=latest_working_template_ids).order_by("date")
@property
def templates(self):
return Template.objects.filter(template_group=self).order_by("-date", "provider__id")
@property
def existing_templates(self):
return self.templates.filter(exists=True)
@property
def appliances(self):
return Appliance.objects.filter(template__template_group=self)
@property
def zstreams_versions(self):
"""Returns a dict with structure ``{zstream: [version1, version2, ...]``"""
zstreams = {}
for version in Template.get_versions(template_group=self, exists=True):
zstream = ".".join(version.split(".")[:3])
if zstream not in zstreams:
zstreams[zstream] = []
zstreams[zstream].append(version)
return zstreams
def pick_versions_to_delete(self):
to_delete = {}
for zstream, versions in self.zstreams_versions.items():
versions = sorted(versions, key=Version, reverse=True)
versions_to_delete = versions[1:]
if versions_to_delete:
to_delete[zstream] = versions[1:]
return to_delete
def __unicode__(self):
return "{} {}".format(
type(self).__name__, self.id)
class GroupShepherd(MetadataMixin):
template_group = models.ForeignKey(Group, on_delete=models.CASCADE)
user_group = models.ForeignKey(DjangoGroup, on_delete=models.CASCADE)
template_pool_size = models.IntegerField(default=0,
help_text="How many appliances to keep spinned for quick taking.")
unconfigured_template_pool_size = models.IntegerField(default=0,
help_text="How many appliances to keep spinned for quick taking - unconfigured ones.")
class Meta:
ordering = ['template_group', 'user_group', 'id']
@property
def appliances(self):
return Appliance.objects.filter(
template__template_group=self.template_group,
template__provider__user_groups=self.user_group)
def get_fulfillment_percentage(self, preconfigured):
"""Return percentage of fulfillment of the group shepherd.
Values between 0-100, can be over 100 if there are more than required.
Args:
preconfigured: Whether to check the pure ones or configured ones.
"""
appliances_in_shepherd = len(
self.appliances.filter(
template__preconfigured=preconfigured, appliance_pool=None,
marked_for_deletion=False))
wanted_pool_size = (
self.template_pool_size if preconfigured else self.unconfigured_template_pool_size)
if wanted_pool_size == 0:
return 100
return int(round((float(appliances_in_shepherd) / float(wanted_pool_size)) * 100.0))
def shepherd_appliances(self, preconfigured=True):
return self.appliances.filter(
appliance_pool=None, ready=True, marked_for_deletion=False,
template__preconfigured=preconfigured)
@property
def configured_shepherd_appliances(self):
return self.shepherd_appliances(True)
@property
def unconfigured_shepherd_appliances(self):
return self.shepherd_appliances(False)
def __unicode__(self):
return "{} {}/{} (pool size={}/{})".format(
type(self).__name__, self.template_group.id, self.user_group.name,
self.template_pool_size, self.unconfigured_template_pool_size)
class Template(MetadataMixin):
VM = 'virtual_machine'
DOCKER_VM = 'docker_vm'
OPENSHIFT_POD = 'openshift_pod'
TEMPLATE_TYPES = (
(VM, 'Virtual Machine'),
(DOCKER_VM, 'VM-based Docker container'),
(OPENSHIFT_POD, 'Openshift pod'))
DEFAULT_TEMPLATE_TYPE = TEMPLATE_TYPES[0][0]
provider = models.ForeignKey(
Provider, on_delete=models.CASCADE, help_text="Where does this template reside",
related_name="provider_templates")
template_group = models.ForeignKey(
Group, on_delete=models.CASCADE, help_text="Which group the template belongs to.")
version = models.CharField(max_length=32, null=True, help_text="Downstream version.")
date = models.DateField(help_text="Template build date (original).")
original_name = models.CharField(max_length=64, help_text="Template's original name.")
name = models.CharField(max_length=64, help_text="Template's name as it resides on provider.")
status = models.TextField(default="Template inserted into the system")
status_changed = models.DateTimeField(auto_now_add=True)
ready = models.BooleanField(default=False, help_text="Template is ready-to-be-used")
exists = models.BooleanField(default=True, help_text="Template exists in the provider.")
usable = models.BooleanField(default=False, help_text="Template is marked as usable")
custom_data = JSONField(default={}, help_text="Some Templates require additional data "
"for deployment")
preconfigured = models.BooleanField(default=True, help_text="Is prepared for immediate use?")
suggested_delete = models.BooleanField(
default=False, help_text="Whether Sprout suggests deleting this template.")
parent_template = models.ForeignKey(
"self", blank=True, null=True, related_name="child_templates",
help_text="What was source of this template?")
container = models.CharField(
max_length=32, null=True, blank=True,
help_text=(
'Whether the appliance is located in a container in the VM. '
'This then specifies the container name.'))
ga_released = models.BooleanField(default=False)
template_type = models.CharField(max_length=16, choices=TEMPLATE_TYPES,
default=DEFAULT_TEMPLATE_TYPE)
class Meta:
ordering = ['name', 'original_name', 'provider', 'id']
@property
def provider_api(self):
return self.provider.api
@property
def provider_name(self):
return self.provider.id
@threaded_cached_property
def source_template_mgmt(self):
try:
return self.provider_api.get_template(self.original_name)
except VMInstanceNotFound:
return None
@threaded_cached_property
def template_mgmt(self):
try:
return self.provider_api.get_template(self.name)
except VMInstanceNotFound:
return None
@threaded_cached_property
def vm_mgmt(self):
try:
mgmt = self.provider_api.get_vm(self.name)
except VMInstanceNotFound:
return None
else:
return mgmt
@property
def exists_in_provider(self):
# TODO: change after openshift wrapanapi refactor
if isinstance(self.provider_api, Openshift):
return self.name in self.provider_api.list_template()
return self.source_template_mgmt.exists if self.source_template_mgmt else False
@property
def exists_and_ready(self):
return self.exists and self.ready
def user_can_use(self, user):
return self.provider.user_can_use(user)
def user_can_see(self, user):
return self.provider.user_can_see(user)
def set_status(self, status):
with transaction.atomic():
template = Template.objects.get(id=self.id)
template.status = status
template.status_changed = timezone.now()
template.save()
self.logger.info("{}: {}".format(self.pk, status))
@property
def possibly_bunk(self):
"""This property gives an indication on a possibly bad status of a template.
It looks at the age of the template and the status to work it out.
If it returns True, it means there may be a problem with setting this template up.
If it returns False, it is most likely all right and no action is needed.
"""
if 'creation failed' in self.status:
return True
if 'Could not properly' in self.status:
return True
if self.age > timedelta(hours=2):
return True
return False
@property
def cfme(self):
return CFMEAppliance.from_provider(self.provider_name, self.name, container=self.container)
@property
def can_be_deleted(self):
return self.exists and len(self.appliances) == 0 and not self.ga_released
@property
def appliances(self):
return Appliance.objects.filter(template=self)
@property
def temporary_name(self):
return self.metadata.get("temporary_name")
@temporary_name.setter
def temporary_name(self, name):
with self.edit_metadata as metadata:
metadata["temporary_name"] = name
@temporary_name.deleter
def temporary_name(self):
with self.edit_metadata as metadata:
if "temporary_name" in metadata:
del metadata["temporary_name"]
@classmethod
def get_versions(cls, *filters, **kwfilters):
versions = []
for version in cls.objects\
.filter(*filters, **kwfilters)\
.values_list('version', flat=True)\
.distinct()\
.order_by():
if version is not None:
versions.append(version)
versions.sort(key=Version, reverse=True)
return versions
@classmethod
def get_dates(cls, *filters, **kwfilters):
dates = list(
cls.objects
.filter(*filters, **kwfilters)
.values_list('date', flat=True)
.distinct()
.order_by())
dates.sort(reverse=True)
return dates
@classmethod
def ga_version(cls, version):
return bool(cls.objects.filter(version=version, ga_released=True))
def __unicode__(self):
return "{} {}:{} @ {}".format(
type(self).__name__, self.version, self.name, self.provider.id)
class Appliance(MetadataMixin):
class Meta:
permissions = (('can_modify_hw', 'Can modify HW configuration'), )
ordering = ['name', 'id']
class Power(object):
ON = "on"
OFF = "off"
SUSPENDED = "suspended"
REBOOTING = "rebooting"
LOCKED = "locked"
UNKNOWN = "unknown"
ORPHANED = "orphaned"
CREATION_FAILED = 'creation_failed'
CUSTOMIZATION_FAILED = 'customization_failed'
ERROR = 'error'
POWER_ICON_MAPPING = {
Power.ON: 'play',
Power.OFF: 'stop',
Power.SUSPENDED: 'pause',
Power.REBOOTING: 'repeat',
Power.LOCKED: 'lock',
Power.UNKNOWN: 'exclamation-sign',
Power.ORPHANED: 'exclamation-sign',
Power.CREATION_FAILED: 'remove',
Power.CUSTOMIZATION_FAILED: 'remove',
Power.ERROR: 'remove',
}
BAD_POWER_STATES = {
Power.UNKNOWN, Power.ORPHANED, Power.CREATION_FAILED, Power.CUSTOMIZATION_FAILED,
Power.ERROR}
POWER_STATES_MAPPING = {
# Common to vsphere + rhev
"suspended": Power.SUSPENDED,
# vSphere
"poweredOn": Power.ON,
"poweredOff": Power.OFF,
# RHEV
"up": Power.ON,
"down": Power.OFF,
"image_locked": Power.LOCKED,
# Openstack
"ACTIVE": Power.ON,
"SHUTOFF": Power.OFF,
"SUSPENDED": Power.SUSPENDED,
"ERROR": Power.ERROR,
# SCVMM
"Running": Power.ON,
"PoweredOff": Power.OFF,
"Stopped": Power.OFF,
"Paused": Power.SUSPENDED,
"Saved State": Power.SUSPENDED,
"Creation Failed": Power.CREATION_FAILED,
"Customization Failed": Power.CUSTOMIZATION_FAILED,
"Missing": Power.ORPHANED, # When SCVMM says it is missing ...
# EC2 (for VM manager)
"stopped": Power.OFF,
"running": Power.ON,
# TODO: change after openshift wrapanapi refactor
# Move everything to wrapanapi.VmState?
VmState.RUNNING: Power.ON,
VmState.STOPPED: Power.OFF,
VmState.SUSPENDED: Power.SUSPENDED,
VmState.ERROR: Power.ERROR,
VmState.UNKNOWN: Power.UNKNOWN,
}
RESET_SWAP_STATES = {Power.OFF, Power.REBOOTING, Power.ORPHANED}
template = models.ForeignKey(
Template, on_delete=models.CASCADE, help_text="Appliance's source template.")
appliance_pool = models.ForeignKey("AppliancePool", null=True, on_delete=models.CASCADE,
help_text="Which appliance pool this appliance belongs to.")
name = models.CharField(max_length=64, help_text="Appliance's name as it is in the provider.")
ip_address = models.CharField(max_length=64, null=True, help_text="Appliance's IP address")
openshift_ext_ip = models.CharField(max_length=64, null=True,
help_text="Openshift's project external ip")
openshift_project = models.CharField(max_length=64, null=True,
help_text="Openshift's project name")
datetime_leased = models.DateTimeField(null=True, help_text="When the appliance was leased")
leased_until = models.DateTimeField(null=True, help_text="When does the appliance lease expire")
status = models.TextField(default="Appliance inserted into the system.")
status_changed = models.DateTimeField(auto_now_add=True)
power_state_changed = models.DateTimeField(default=timezone.now)
marked_for_deletion = models.BooleanField(default=False,
help_text="Appliance is already being deleted.")
power_state = models.CharField(max_length=32, default="unknown",
help_text="Appliance's power state")
ready = models.BooleanField(default=False,
help_text="Appliance has an IP address and web UI is online.")
uuid = models.CharField(max_length=36, null=True, blank=True, help_text="UUID of the machine")
description = models.TextField(blank=True)
lun_disk_connected = models.BooleanField(
default=False,
help_text="Whether the Direct LUN disk is connected. (RHEV Only)")
swap = models.IntegerField(
help_text="How many MB is the appliance in swap.", null=True, blank=True)
ssh_failed = models.BooleanField(default=False, help_text="If last swap check failed on SSH.")
ram = models.IntegerField(null=True, blank=True)
cpu = models.IntegerField(null=True, blank=True)
def synchronize_metadata(self):
"""If possible, uploads some metadata to the provider VM object to be able to recover."""
self._set_meta('id', self.id)
self._set_meta('source_template_id', self.template.id)
self._set_meta('created_on', apply_if_not_none(self.created_on, "isoformat"))
self._set_meta('modified_on', apply_if_not_none(self.modified_on, "isoformat"))
if self.appliance_pool is not None:
self._set_meta('pool_id', self.appliance_pool.id)
self._set_meta('pool_total_count', self.appliance_pool.total_count)
self._set_meta('pool_group', self.appliance_pool.group.id)
if self.appliance_pool.provider is not None:
self._set_meta('pool_provider', self.appliance_pool.provider.id)
self._set_meta('pool_version', self.appliance_pool.version)
self._set_meta(
'pool_appliance_date', apply_if_not_none(self.appliance_pool.date, "isoformat"))
self._set_meta('pool_owner_id', self.appliance_pool.owner.id)
self._set_meta('pool_owner_username', self.appliance_pool.owner.username)
self._set_meta('pool_preconfigured', self.appliance_pool.preconfigured)
self._set_meta('pool_description', self.appliance_pool.description)
self._set_meta('pool_not_needed_anymore', self.appliance_pool.not_needed_anymore)
self._set_meta('pool_finished', self.appliance_pool.finished)
self._set_meta('pool_yum_update', self.appliance_pool.yum_update)
self._set_meta('datetime_leased', apply_if_not_none(self.datetime_leased, "isoformat"))
self._set_meta('leased_until', apply_if_not_none(self.leased_until, "isoformat"))
self._set_meta('status_changed', apply_if_not_none(self.status_changed, "isoformat"))
self._set_meta('ready', self.ready)
self._set_meta('description', self.description)
self._set_meta('lun_disk_connected', self.lun_disk_connected)
self._set_meta('swap', self.swap)
self._set_meta('ssh_failed', self.ssh_failed)
def _set_meta(self, key, value):
if self.power_state == self.Power.ORPHANED:
return
# TODO: change after openshift wrapanapi refactor
if isinstance(self.provider_api, Openshift):
try:
self.provider_api.set_meta_value('sprout_{}'.format(key), value)
self.logger.info('Set metadata {}: {}'.format(key, repr(value)))
except NotImplementedError:
pass
else:
try:
self.vm_mgmt.set_meta_value('sprout_{}'.format(key), value)
self.logger.info('Set metadata {}: {}'.format(key, repr(value)))
except (AttributeError, NotImplementedError):
pass
def sync_hw(self):
# TODO: change after openshift wrapanapi refactor
params = None
if isinstance(self.provider_api, Openshift):
params = self.provider_api.vm_hardware_configuration(self.name)
else:
try:
params = self.vm_mgmt.get_hardware_configuration()
except AttributeError:
pass
if params:
with transaction.atomic():
appliance = type(self).objects.get(pk=self.pk)
appliance.cpu = params['cpu']
appliance.ram = params['ram']
appliance.save()
@property
def serialized(self):
try:
pool_id = self.appliance_pool.id
except AttributeError:
pool_id = None
return dict(
id=self.id,
pool_id=pool_id,
ready=self.ready,
name=self.name,
ip_address=self.ip_address,
status=self.status,
power_state=self.power_state,
description=self.description,
status_changed=apply_if_not_none(self.status_changed, "isoformat"),
datetime_leased=apply_if_not_none(self.datetime_leased, "isoformat"),
leased_until=apply_if_not_none(self.leased_until, "isoformat"),
template_name=self.template.original_name,
template_id=self.template.id,
provider=self.template.provider.id,
marked_for_deletion=self.marked_for_deletion,
uuid=self.uuid,
template_version=self.template.version,
template_build_date=self.template.date.isoformat(),
template_group=self.template.template_group.id,
template_sprout_name=self.template.name,
preconfigured=self.preconfigured,
lun_disk_connected=self.lun_disk_connected,
container=self.template.container,
ram=self.ram,
cpu=self.cpu,
created_on=apply_if_not_none(self.created_on, "isoformat"),
modified_on=apply_if_not_none(self.modified_on, "isoformat"),
project=self.openshift_project,
db_host=self.openshift_ext_ip,
url=self.url,
)
@property
@contextmanager
def kill_lock(self):
with critical_section("kill-({})[{}]".format(type(self).__name__, str(self.pk))):
yield
@threaded_cached_property
def vm_mgmt(self):
try:
return self.template.provider_api.get_vm(self.name)
except VMInstanceNotFound:
return None
@threaded_cached_property
def template_mgmt(self):
return self.template.template_mgmt
@property
def provider_api(self):
return self.template.provider_api
@property
def provider_name(self):
return self.template.provider_name
@property
def provider(self):
return self.template.provider
@property
def url(self):
return "https://{}/".format(self.ip_address)
@property
def preconfigured(self):
return self.template.preconfigured
@property
def is_openshift(self):
return self.provider.provider_type == 'openshift'
@property
def app_args(self):
kwargs = {
'container': self.template.container,
'hostname': self.ip_address,
}
if self.is_openshift:
ocp_kwargs = {
'db_host': self.openshift_ext_ip,
'project': self.openshift_project,
}
kwargs.update(ocp_kwargs)
return kwargs
@property
def cfme(self):
kwargs = self.app_args
kwargs.update({
'provider_key': self.provider_name,
'vm_name': self.name
})
return CFMEAppliance.from_provider(**kwargs)
@property
def ipapp(self):
return IPAppliance(**self.app_args)
def user_can_use(self, user):
return self.provider.user_can_use(user)
def user_can_see(self, user):
return self.provider.user_can_see(user)
@property
def visible_in_groups(self):
return self.provider.user_groups.all()
def is_visible_only_in_group(self, group):
return len(self.visible_in_groups) == 1 and self.visible_in_groups[0] == group
@property
def containerized(self):
return self.template.container is not None
def set_status(self, status):
with transaction.atomic():
appliance = Appliance.objects.get(id=self.id)
if status != appliance.status:
appliance.status = status
appliance.status_changed = timezone.now()
appliance.save()
self.logger.info("Status changed: {}".format(status))
def set_power_state(self, power_state):
if power_state != self.power_state:
self.logger.info("Changed power state to {}".format(power_state))
self.power_state = power_state
self.power_state_changed = timezone.now()
if power_state in self.RESET_SWAP_STATES:
# Reset some values
self.swap = 0
self.ssh_failed = False
def __unicode__(self):
return "{} {} @ {}".format(type(self).__name__, self.name, self.template.provider.id)
@classmethod
def unassigned(cls):
return cls.objects.filter(appliance_pool=None, ready=True, marked_for_deletion=False)
@classmethod
def give_to_pool(cls, pool, custom_limit=None, cpu=None, ram=None):
"""Give appliances from shepherd to the pool where the maximum count is specified by pool
or you can specify a custom limit
"""
from appliances.tasks import (
appliance_power_on, mark_appliance_ready, wait_appliance_ready, appliance_yum_update,
appliance_reboot)
limit = custom_limit if custom_limit is not None else pool.total_count
appliances = []
if limit <= 0:
# Nothing to do
return 0
cpuram_filter = {}
if pool.override_cpu is not None:
cpuram_filter['cpu'] = pool.override_cpu
if pool.override_memory is not None:
cpuram_filter['ram'] = pool.override_memory
with transaction.atomic():
for template in pool.possible_templates:
for appliance in cls.unassigned().filter(
template=template, **cpuram_filter).all()[:limit - len(appliances)]:
with appliance.kill_lock:
appliance.appliance_pool = pool
appliance.save(update_fields=['appliance_pool'])
appliance.set_lease_time()
appliance.set_status("Given to pool {}".format(pool.id))
tasks = [appliance_power_on.si(appliance.id)]
if pool.yum_update:
tasks.append(appliance_yum_update.si(appliance.id))
tasks.append(
appliance_reboot.si(appliance.id, if_needs_restarting=True))
if appliance.preconfigured:
tasks.append(wait_appliance_ready.si(appliance.id))
else:
tasks.append(mark_appliance_ready.si(appliance.id))
chain(*tasks)()
appliances.append(appliance)
# We have the break twice, to be sure. For each for loop.
if len(appliances) >= limit:
break
if len(appliances) >= limit:
break
return len(appliances)
def set_lease_time(self, time_minutes=120):
# sometimes appliances get lost w/o lease time.
# so, we set default lease time to 2h on the off-chance
self.datetime_leased = timezone.now()
self.leased_until = self.datetime_leased + timedelta(minutes=time_minutes)
self.save(update_fields=['datetime_leased', 'leased_until'])
@classmethod
def kill(cls, appliance_or_id, force_delete=False):
# Completely delete appliance from provider
from appliances.tasks import kill_appliance
if isinstance(appliance_or_id, cls):
self = Appliance.objects.get(id=appliance_or_id.id)
else:
self = Appliance.objects.get(id=appliance_or_id)
with self.kill_lock:
with transaction.atomic():
self = type(self).objects.get(pk=self.pk)
self.class_logger(self.pk).info("Killing")
if not self.marked_for_deletion or force_delete:
self.marked_for_deletion = True
self.save()
return kill_appliance.delay(self.id)
def delete(self, *args, **kwargs):
# Intercept delete and lessen the number of appliances in the pool
# Then if the appliance is still present in the management system, kill it
self.logger.info("Deleting from database")
pool = self.appliance_pool
do_not_touch = kwargs.pop("do_not_touch_ap", False)
result = super(Appliance, self).delete(*args, **kwargs)
if pool is not None and not do_not_touch:
if pool.current_count == 0:
pool.delete()
return result
def prolong_lease(self, time=60):
self.logger.info("Prolonging lease by {} minutes from now.".format(time))
with transaction.atomic():
appliance = Appliance.objects.get(id=self.id)
appliance.leased_until = timezone.now() + timedelta(minutes=time)
appliance.save()
@property
def owner(self):
if self.appliance_pool is None:
return None
else:
return self.appliance_pool.owner
@property
def expires_in(self):
"""Minutes"""
if self.leased_until is None:
return "never"
seconds = (self.leased_until - timezone.now()).total_seconds()
if seconds <= 0.0:
return "Expired!"
else:
return nice_seconds(seconds)
@property
def can_launch(self):
return self.power_state in {self.Power.OFF, self.Power.SUSPENDED}
@property
def can_reboot(self):
return self.power_state in {self.Power.ON}
@property
def can_suspend(self):
return self.power_state in {self.Power.ON}
@property
def can_stop(self):
return self.power_state in {self.Power.ON}
@property
def has_uuid(self):
return self.uuid is not None
@property
def has_uuid_angular(self):
return "true" if self.has_uuid else "false"
@property
def version(self):
if self.template.version is None:
return "---"
else:
return self.template.version
@property
def managed_providers(self):
return self.metadata.get("managed_providers", [])
@managed_providers.setter
def managed_providers(self, value):
with self.edit_metadata as metadata:
metadata["managed_providers"] = value
@property
def vnc_link(self):
try:
return self.provider.vnc_console_link_for(self)
except KeyError: # provider does not exist any more
return None
class AppliancePool(MetadataMixin):
total_count = models.IntegerField(help_text="How many appliances should be in this pool.")
group = models.ForeignKey(
Group, on_delete=models.CASCADE, help_text="Group which is used to provision appliances.")
provider = models.ForeignKey(
Provider, help_text="If requested, appliances can be on single provider.", null=True,
blank=True, on_delete=models.CASCADE)
version = models.CharField(max_length=32, null=True, help_text="Appliance version")
date = models.DateField(null=True, help_text="Appliance date.")
owner = models.ForeignKey(
User, on_delete=models.CASCADE, help_text="User who owns the appliance pool")
preconfigured = models.BooleanField(
default=True, help_text="Whether to provision preconfigured appliances")
description = models.TextField(blank=True)
not_needed_anymore = models.BooleanField(
default=False, help_text="Used for marking the appliance pool as being deleted")
finished = models.BooleanField(default=False, help_text="Whether fulfillment has been met.")
yum_update = models.BooleanField(default=False, help_text="Whether to update appliances.")
override_memory = models.IntegerField(null=True, blank=True)
override_cpu = models.IntegerField(null=True, blank=True)
provider_type = models.CharField(max_length=32, null=True, blank=True)
template_type = models.CharField(max_length=16, choices=Template.TEMPLATE_TYPES,
default=Template.DEFAULT_TEMPLATE_TYPE)
class Meta:
ordering = ['id']
def merge(self, source_pool):
if not self.finished:
raise Exception('Provisioning of the target pool has not finished yet.')
if not source_pool.finished:
raise Exception('Provisioning of the source pool has not finished yet.')
if self.not_needed_anymore:
raise Exception('Target pool is being deleted.')
if source_pool.not_needed_anymore:
raise Exception('Source pool is being deleted.')
if self.group != source_pool.group:
raise ValueError('The groups of the pools differ')
if self.provider != source_pool.provider:
raise ValueError('The provider of the pools differ')
if self.version != source_pool.version:
raise ValueError('The version of the pools differ')
if self.date != source_pool.date:
raise ValueError('The date of the pools differ')
if self.preconfigured != source_pool.preconfigured:
raise ValueError('The preconfigured of the pools differ')
if self.yum_update != source_pool.yum_update:
raise ValueError('The yum_update of the pools differ')
if self.override_memory != source_pool.override_memory:
raise ValueError('The override_memory of the pools differ')
if self.override_cpu != source_pool.override_cpu:
raise ValueError('The override_cpu of the pools differ')
with transaction.atomic():
for appliance in source_pool.appliances:
appliance.appliance_pool = self
appliance.save()
self.total_count += 1
self.save()
source_pool.delete()
return self
def clone(self, num_appliances=None, time_leased=60, owner=None):
return self.create(
owner or self.owner,
self.group,
version=self.version,
date=self.date,
provider=self.provider,
num_appliances=self.total_count if num_appliances is None else num_appliances,
time_leased=time_leased,
preconfigured=self.preconfigured,
yum_update=self.yum_update,
ram=self.override_memory,
cpu=self.override_cpu,
provider_type=self.provider_type,
template_type=self.template_type)
@classmethod
def create(cls, owner, group, version=None, date=None, provider=None, num_appliances=1,
time_leased=60, preconfigured=True, yum_update=False, ram=None, cpu=None,
provider_type=None, template_type=Template.DEFAULT_TEMPLATE_TYPE):
if owner.has_quotas:
user_pools_count = cls.objects.filter(owner=owner).count()
user_vms_count = Appliance.objects.filter(appliance_pool__owner=owner).count()
if owner.quotas.total_pool_quota is not None:
if owner.quotas.total_pool_quota <= user_pools_count:
raise ValueError(
"User has too many pools ({} allowed, {} already existing)".format(
owner.quotas.total_pool_quota, user_pools_count))
if owner.quotas.total_vm_quota is not None:
if owner.quotas.total_vm_quota < (user_vms_count + num_appliances):
raise ValueError(
"Requested {} appliances, limit is {} and currently user has {}".format(
num_appliances, owner.quotas.total_vm_quota, user_vms_count))
if owner.quotas.per_pool_quota is not None:
if num_appliances > owner.quotas.per_pool_quota:
raise ValueError("You are limited to {} VMs per pool, requested {}".format(
owner.quotas.per_pool_quota, num_appliances))
user_filter = {'provider__user_groups__in': owner.groups.all()}
from appliances.tasks import request_appliance_pool
# Retrieve latest possible
if not version:
versions = Template.get_versions(template_group=group, ready=True, usable=True,
exists=True, preconfigured=preconfigured,
provider__working=True, provider__disabled=False,
template_type=template_type, **user_filter)
if versions:
version = versions[0]
if not date:
if version is not None:
dates = Template.get_dates(template_group=group, version=version, ready=True,
usable=True, exists=True, preconfigured=preconfigured,
provider__working=True, provider__disabled=False,
template_type=template_type, **user_filter)
else:
dates = Template.get_dates(template_group=group, ready=True, usable=True,
exists=True, preconfigured=preconfigured,
provider__working=True, provider__disabled=False,
template_type=template_type, **user_filter)
if dates:
date = dates[0]
if isinstance(group, str):
group = Group.objects.get(id=group)
if isinstance(provider_type, str):
if provider_type not in Provider.get_available_provider_types(owner):
raise Exception('There are no providers for type {!r}'.format(provider_type))
if isinstance(provider, str):
provider = Provider.objects.get(id=provider, working=True, disabled=False)
if provider_type is not None and provider.provider_type != provider_type:
raise Exception(
'You used contradicting parameters: provider={!r}, provider_type={!r}'.format(
provider.id, provider_type))
if not (version or date):
raise Exception(
"Could not find proper combination of group, date, version and a working provider!")
if provider and not provider.user_can_use(owner):
raise Exception(
'The user does not have the right to use provider {}'.format(provider.id))
req_params = dict(
group=group, version=version, date=date, total_count=num_appliances, owner=owner,
provider=provider, preconfigured=preconfigured, yum_update=yum_update,
override_memory=ram, override_cpu=cpu, provider_type=provider_type,
template_type=template_type)
if num_appliances == 0:
req_params['finished'] = True
req = cls(**req_params)
if not req.possible_templates:
raise Exception("No possible templates! (pool params: {})".format(str(req_params)))
req.save()
cls.class_logger(req.pk).info("Created")
if num_appliances > 0:
# Only if we have any appliances to request
request_appliance_pool.delay(req.id, time_leased)
return req
def delete(self, *args, **kwargs):
self.logger.info("Deleting")
with transaction.atomic():
for task in DelayedProvisionTask.objects.filter(pool=self):
task.delete()
return super(AppliancePool, self).delete(*args, **kwargs)
@property
def filter_params(self):
filter_params = {
"template_group": self.group,
"preconfigured": self.preconfigured,
'provider__user_groups__in': self.owner.groups.all(),
'provider__working': True,
'provider__disabled': False,
'template_type': self.template_type
}
if self.version is not None:
filter_params["version"] = self.version
if self.date is not None:
filter_params["date"] = self.date
if self.provider is not None:
filter_params["provider"] = self.provider
return filter_params
@property
def appliance_filter_params(self):
params = self.filter_params
result = {}
for key, value in params.items():
result["template__{}".format(key)] = value
return result
@property
def possible_templates(self):
q = Template.objects.filter(ready=True, exists=True, usable=True,
**self.filter_params).select_related('provider').distinct().order_by()
if self.provider_type is None:
return list(q)
else:
return [t for t in q if t.provider.provider_type == self.provider_type]
@property
def possible_provisioning_templates(self):
return sorted(
[tpl for tpl in self.possible_templates if tpl.provider.free],
# Sort by date and load to pick the best match (least loaded provider)
key=lambda tpl: (tpl.date, 1.0 - tpl.provider.appliance_load), reverse=True)
@property
def possible_providers(self):
"""Which providers contain a template that could be used for provisioning?."""
return set(tpl.provider for tpl in self.possible_templates if tpl.provider.is_working)
@property
def appliances(self):
return Appliance.objects\
.filter(appliance_pool=self)\
.select_related('template__provider')\
.order_by("id")
@property
def single_or_none_appliance(self):
return self.appliances.count() <= 1
@property
def current_count(self):
return len(self.appliances)
@property
def percent_finished(self):
if self.total_count is None:
return 0.0
total = 4 * self.total_count
if total == 0:
return 1.0
finished = 0
for appliance in self.appliances:
if appliance.power_state not in {Appliance.Power.UNKNOWN, Appliance.Power.ORPHANED}:
finished += 1
if appliance.power_state == Appliance.Power.ON:
finished += 1
if appliance.ip_address is not None:
finished += 1
if appliance.ready:
finished += 1
return float(finished) / float(total)
@property
def appliance_ips(self):
return [ap.ip_address for ap in [a for a in self.appliances if a.ip_address is not None]]
@property
def fulfilled(self):
try:
return (len(self.appliance_ips) == self.total_count and
all(a.ready for a in self.appliances))
except ObjectDoesNotExist:
return False
@property
def broken_with_no_appliances(self):
return self.age >= timedelta(hours=2) and self.current_count == 0
@property
def queued_provision_tasks(self):
return DelayedProvisionTask.objects.filter(pool=self).order_by("id")
def prolong_lease(self, time=60):
self.logger.info("Initiated lease prolonging by {} minutes".format(time))
for appliance in self.appliances:
appliance.prolong_lease(time=time)
def kill(self):
with transaction.atomic():
p = type(self).objects.get(pk=self.pk)
p.not_needed_anymore = True
p.save()
save_lives = not self.finished
self.logger.info("Killing")
if self.appliances:
for appliance in self.appliances:
kill = False
with transaction.atomic():
with appliance.kill_lock:
if (
save_lives and
not appliance.ready and
not appliance.marked_for_deletion and
not appliance.managed_providers and
appliance.power_state not in appliance.BAD_POWER_STATES):
# Only save appliances that are guaranteed to be untouched
appliance.appliance_pool = None
appliance.datetime_leased = None
appliance.save()
self.total_count -= 1
if self.total_count < 0:
self.total_count = 0 # Protection against stupidity
self.save()
appliance.set_status(
"The appliance was taken out of dying pool {}".format(self.id))
else:
kill = True
if kill: # Because Appliance.kill uses kill_lock too
Appliance.kill(appliance)
if self.current_count == 0:
# Pool is empty, no point of keeping it alive.
# This is needed when deleting a pool that has appliances that can be salvaged.
# They are not deleted. the .delete() method on appliances takes care that when the
# last appliance in pool is deleted, it deletes the pool. But since we don't delete
# in the case of salvaging them, we do have to do it manually here.
self.delete()
else:
# No appliances, so just delete it
self.delete()
@property
def possible_other_owners(self):
"""Returns a list of User objects that can own this pool instead of original owner"""
if self.provider is not None:
providers = {self.provider}
else:
providers = {appliance.template.provider for appliance in self.appliances}
possible_groups = set()
for provider in providers:
for group in provider.user_groups.all():
possible_groups.add(group)
common_groups = set()
for group in possible_groups:
if all(group in provider.user_groups.all() for provider in providers):
common_groups.add(group)
return User.objects\
.filter(groups__in=common_groups, is_active=True)\
.exclude(pk=self.owner.pk)\
.distinct()\
.order_by("last_name", "first_name", 'username')
@property
def num_delayed_provisioning_tasks(self):
return len(self.queued_provision_tasks)
@property
def num_provisioning_tasks_before(self):
tasks = self.queued_provision_tasks
if len(tasks) == 0:
return 0
latest_id = tasks[0].id
return len(DelayedProvisionTask.objects.filter(id__lt=latest_id))
@property
def num_possible_provisioning_slots(self):
providers = set([])
for template in self.possible_provisioning_templates:
providers.add(template.provider)
slots = 0
for provider in providers:
slots += provider.remaining_provisioning_slots
return slots
@property
def num_possible_appliance_slots(self):
providers = set([])
for template in self.possible_templates:
providers.add(template.provider)
slots = 0
for provider in providers:
slots += provider.remaining_appliance_slots
return slots
@property
def num_shepherd_appliances(self):
return len(
Appliance.objects.filter(appliance_pool=None,
**self.appliance_filter_params).distinct())
def __repr__(self):
return "<AppliancePool id: {}, group: {}, total_count: {}>".format(
self.id, self.group.id, self.total_count)
def __unicode__(self):
return "AppliancePool id: {}, group: {}, total_count: {}".format(
self.id, self.group.id, self.total_count)
class MismatchVersionMailer(models.Model):
provider = models.ForeignKey(Provider, on_delete=models.CASCADE)
template_name = models.CharField(max_length=64)
supposed_version = models.CharField(max_length=32)
actual_version = models.CharField(max_length=32)
sent = models.BooleanField(default=False)
class UserApplianceQuota(models.Model):
user = models.OneToOneField(User, related_name="quotas", on_delete=models.CASCADE)
per_pool_quota = models.IntegerField(null=True, blank=True)
total_pool_quota = models.IntegerField(null=True, blank=True)
total_vm_quota = models.IntegerField(null=True, blank=True)
class BugQuery(models.Model):
EMAIL_PLACEHOLDER = re.compile(r'\{\{EMAIL\}\}')
CACHE_TIMEOUT = 180
name = models.CharField(max_length=64)
url = models.TextField()
owner = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True)
@property
def is_global(self):
return self.owner is None
@property
def is_parametrized(self):
return self.EMAIL_PLACEHOLDER.search(self.url) is not None
@cached_property
def bugzilla(self):
# Returns the original bugzilla object
return Bugzilla.from_config().bugzilla
def query_for_user(self, user):
if self.is_parametrized:
if not user.email:
return None
url = self.EMAIL_PLACEHOLDER.sub(user.email, self.url)
else:
url = self.url
return self.bugzilla.url_to_query(url)
def list_bugs(self, user):
cache_id = 'bq-{}-{}'.format(self.id, user.id)
cached = redis.get(cache_id)
if cached is not None:
return pickle.loads(base64.b64decode(cached))
query = self.query_for_user(user)
if query is None:
result = []
else:
def process_bug(bug):
return {
'id': bug.id,
'weburl': bug.weburl,
'summary': bug.summary,
'severity': bug.severity,
'status': bug.status,
'component': bug.component,
'version': bug.version,
'fixed_in': bug.fixed_in,
'whiteboard': bug.whiteboard,
'flags': ['{}{}'.format(flag['name'], flag['status']) for flag in bug.flags],
}
result = [process_bug(bug) for bug in self.bugzilla.query(query)]
redis.set(cache_id, base64.b64encode(pickle.dumps(result)), ex=self.CACHE_TIMEOUT)
return result
@classmethod
def visible_for_user(cls, user):
return [
bq for bq in
cls.objects.filter(Q(owner=None) | Q(owner=user)).order_by('owner', 'id')
if not (bq.is_parametrized and not user.email)]
| gpl-2.0 |
ShivaShinde/gspread | gspread/client.py | 1 | 21604 | # -*- coding: utf-8 -*-
"""
gspread.client
~~~~~~~~~~~~~~
This module contains Client class responsible for communicating with
Google Data API.
"""
import re
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< a69cd84f789e21aa91b9c488abd3dc4ac39c8361
import json
try:
import xml.etree.cElementTree as ElementTree
except:
from xml.etree import ElementTree
=======
<<<<<<< HEAD
>>>>>>> Update README.md
=======
import warnings
<<<<<<< HEAD
=======
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
>>>>>>> # This is a combination of 2 commits.
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
=======
>>>>>>> Update README.md
>>>>>>> Update README.md
from xml.etree import ElementTree
>>>>>>> # This is a combination of 2 commits.
from . import __version__
from . import urlencode
from .ns import _ns
from .httpsession import HTTPSession, HTTPError
from .models import Spreadsheet
from .urls import construct_url
from .utils import finditem
from .exceptions import (SpreadsheetNotFound, NoValidUrlKeyFound,
UpdateCellError, RequestError)
AUTH_SERVER = 'https://www.google.com'
SPREADSHEETS_SERVER = 'spreadsheets.google.com'
_url_key_re_v1 = re.compile(r'key=([^&#]+)')
_url_key_re_v2 = re.compile(r'spreadsheets/d/([^&#]+)/edit')
class Client(object):
"""An instance of this class communicates with Google Data API.
<<<<<<< a69cd84f789e21aa91b9c488abd3dc4ac39c8361
:param auth: An OAuth2 credential object. Credential objects are those created by the
=======
:param auth: A tuple containing an *email* and a *password* used for ClientLogin
authentication or an OAuth2 credential object. Credential objects are those created by the
>>>>>>> # This is a combination of 2 commits.
oauth2client library. https://github.com/google/oauth2client
:param http_session: (optional) A session object capable of making HTTP requests while persisting headers.
Defaults to :class:`~gspread.httpsession.HTTPSession`.
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< a69cd84f789e21aa91b9c488abd3dc4ac39c8361
=======
<<<<<<< HEAD
>>>>>>> Update README.md
>>> c = gspread.Client(auth=OAuthCredentialObject)
=======
>>> c = gspread.Client(auth=('user@example.com', 'qwertypassword'))
<<<<<<< HEAD
or
>>> c = gspread.Client(auth=OAuthCredentialObject)
<<<<<<< 95d918ab8c3e881f4363e5f5a50e98f79c768ddf
=======
=======
or
>>> c = gspread.Client(auth=OAuthCredentialObject)
>>>>>>> # This is a combination of 2 commits.
>>>>>>> # This is a combination of 2 commits.
=======
>>> c = gspread.Client(auth=('user@example.com', 'qwertypassword'))
<<<<<<< HEAD
or
>>> c = gspread.Client(auth=OAuthCredentialObject)
=======
or
>>> c = gspread.Client(auth=OAuthCredentialObject)
>>>>>>> # This is a combination of 2 commits.
>>>>>>> Update README.md
"""
def __init__(self, auth, http_session=None):
self.auth = auth
self.session = http_session or HTTPSession()
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< a69cd84f789e21aa91b9c488abd3dc4ac39c8361
=======
<<<<<<< HEAD
=======
def _get_auth_token(self, content):
for line in content.splitlines():
if line.startswith('Auth='):
return line[5:]
return None
def _deprecation_warning(self):
warnings.warn("""
ClientLogin is deprecated:
https://developers.google.com/identity/protocols/AuthForInstalledApps?csw=1
Authorization with email and password will stop working on April 20, 2015.
Please use oAuth2 authorization instead:
http://gspread.readthedocs.org/en/latest/oauth2.html
""", Warning)
>>>>>>> Update README.md
>>>>>>> Update README.md
def _ensure_xml_header(self, data):
if data.startswith(b'<?xml'):
return data
else:
return b'<?xml version="1.0" encoding="utf8"?>' + data
=======
def _get_auth_token(self, content):
for line in content.splitlines():
if line.startswith('Auth='):
return line[5:]
return None
def _deprecation_warning(self):
warnings.warn("""
ClientLogin is deprecated:
https://developers.google.com/identity/protocols/AuthForInstalledApps?csw=1
Authorization with email and password will stop working on April 20, 2015.
Please use oAuth2 authorization instead:
http://gspread.readthedocs.org/en/latest/oauth2.html
""", Warning)
def _add_xml_header(self, data):
return "<?xml version='1.0' encoding='UTF-8'?>%s" % data.decode()
>>>>>>> # This is a combination of 2 commits.
def login(self):
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< 95d918ab8c3e881f4363e5f5a50e98f79c768ddf
=======
<<<<<<< HEAD
>>>>>>> Update README.md
"""Authorize client."""
if not self.auth.access_token or \
(hasattr(self.auth, 'access_token_expired') and self.auth.access_token_expired):
import httplib2
=======
warnings.warn("""
ClientLogin is deprecated:
https://developers.google.com/identity/protocols/AuthForInstalledApps?csw=1
Authorization with email and password will stop working on April 20, 2015.
Please use oAuth2 authorization instead:
http://gspread.readthedocs.org/en/latest/oauth2.html
""", Warning)
"""Authorize client using ClientLogin protocol.
The credentials provided in `auth` parameter to class' constructor will be used.
This method is using API described at:
http://code.google.com/apis/accounts/docs/AuthForInstalledApps.html
>>>>>>> # This is a combination of 2 commits.
<<<<<<< a69cd84f789e21aa91b9c488abd3dc4ac39c8361
http = httplib2.Http()
self.auth.refresh(http)
self.session.add_header('Authorization', "Bearer " + self.auth.access_token)
=======
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
=======
warnings.warn("""
ClientLogin is deprecated:
https://developers.google.com/identity/protocols/AuthForInstalledApps?csw=1
Authorization with email and password will stop working on April 20, 2015.
Please use oAuth2 authorization instead:
http://gspread.readthedocs.org/en/latest/oauth2.html
""", Warning)
"""Authorize client using ClientLogin protocol.
The credentials provided in `auth` parameter to class' constructor will be used.
This method is using API described at:
http://code.google.com/apis/accounts/docs/AuthForInstalledApps.html
:raises AuthenticationError: if login attempt fails.
"""
source = 'burnash-gspread-%s' % __version__
service = 'wise'
>>>>>>> Update README.md
if hasattr(self.auth, 'access_token'):
if not self.auth.access_token or \
(hasattr(self.auth, 'access_token_expired') and self.auth.access_token_expired):
import httplib2
<<<<<<< HEAD
http = httplib2.Http()
self.auth.refresh(http)
self.session.add_header('Authorization', "Bearer " + self.auth.access_token)
else:
self._deprecation_warning()
=======
http = httplib2.Http()
self.auth.refresh(http)
self.session.add_header('Authorization', "Bearer " + self.auth.access_token)
else:
>>>>>>> # This is a combination of 2 commits.
data = {'Email': self.auth[0],
'Passwd': self.auth[1],
'accountType': 'HOSTED_OR_GOOGLE',
'service': service,
'source': source}
<<<<<<< HEAD
url = AUTH_SERVER + '/accounts/ClientLogin'
try:
r = self.session.post(url, data)
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
content = r.read().decode()
token = self._get_auth_token(content)
=======
token = self._get_auth_token(r.content)
>>>>>>> Update README.md
auth_header = "GoogleLogin auth=%s" % token
self.session.add_header('Authorization', auth_header)
except HTTPError as ex:
if ex.message.strip() == '403: Error=BadAuthentication':
raise AuthenticationError("Incorrect username or password")
else:
raise AuthenticationError(
"Unable to authenticate. %s" % ex.message)
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< 95d918ab8c3e881f4363e5f5a50e98f79c768ddf
=======
=======
>>>>>>> Update README.md
=======
url = AUTH_SERVER + '/accounts/ClientLogin'
try:
r = self.session.post(url, data)
content = r.read().decode()
token = self._get_auth_token(content)
auth_header = "GoogleLogin auth=%s" % token
self.session.add_header('Authorization', auth_header)
except HTTPError as ex:
if ex.code == 403:
content = ex.read().decode()
if content.strip() == 'Error=BadAuthentication':
raise AuthenticationError("Incorrect username or password")
else:
raise AuthenticationError(
"Unable to authenticate. %s code" % ex.code)
else:
raise AuthenticationError(
"Unable to authenticate. %s code" % ex.code)
>>>>>>> # This is a combination of 2 commits.
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
>>>>>>> # This is a combination of 2 commits.
=======
>>>>>>> Update README.md
>>>>>>> Update README.md
def open(self, title):
"""Opens a spreadsheet.
:param title: A title of a spreadsheet.
:returns: a :class:`~gspread.Spreadsheet` instance.
If there's more than one spreadsheet with same title the first one
will be opened.
:raises gspread.SpreadsheetNotFound: if no spreadsheet with
specified `title` is found.
>>> c = gspread.authorize(credentials)
>>> c.open('My fancy spreadsheet')
"""
feed = self.get_spreadsheets_feed()
for elem in feed.findall(_ns('entry')):
elem_title = elem.find(_ns('title')).text
if elem_title.strip() == title:
return Spreadsheet(self, elem)
else:
raise SpreadsheetNotFound
def open_by_key(self, key):
"""Opens a spreadsheet specified by `key`.
:param key: A key of a spreadsheet as it appears in a URL in a browser.
:returns: a :class:`~gspread.Spreadsheet` instance.
:raises gspread.SpreadsheetNotFound: if no spreadsheet with
specified `key` is found.
>>> c = gspread.authorize(credentials)
>>> c.open_by_key('0BmgG6nO_6dprdS1MN3d3MkdPa142WFRrdnRRUWl1UFE')
"""
feed = self.get_spreadsheets_feed()
for elem in feed.findall(_ns('entry')):
alter_link = finditem(lambda x: x.get('rel') == 'alternate',
elem.findall(_ns('link')))
m = _url_key_re_v1.search(alter_link.get('href'))
if m and m.group(1) == key:
return Spreadsheet(self, elem)
m = _url_key_re_v2.search(alter_link.get('href'))
if m and m.group(1) == key:
return Spreadsheet(self, elem)
else:
raise SpreadsheetNotFound
def open_by_url(self, url):
"""Opens a spreadsheet specified by `url`.
:param url: URL of a spreadsheet as it appears in a browser.
:returns: a :class:`~gspread.Spreadsheet` instance.
:raises gspread.SpreadsheetNotFound: if no spreadsheet with
specified `url` is found.
>>> c = gspread.authorize(credentials)
>>> c.open_by_url('https://docs.google.com/spreadsheet/ccc?key=0Bm...FE&hl')
"""
m1 = _url_key_re_v1.search(url)
if m1:
return self.open_by_key(m1.group(1))
else:
m2 = _url_key_re_v2.search(url)
if m2:
return self.open_by_key(m2.group(1))
else:
raise NoValidUrlKeyFound
def openall(self, title=None):
"""Opens all available spreadsheets.
:param title: (optional) If specified can be used to filter
spreadsheets by title.
:returns: a list of :class:`~gspread.Spreadsheet` instances.
"""
feed = self.get_spreadsheets_feed()
result = []
for elem in feed.findall(_ns('entry')):
if title is not None:
elem_title = elem.find(_ns('title')).text
if elem_title.strip() != title:
continue
result.append(Spreadsheet(self, elem))
return result
def get_spreadsheets_feed(self, visibility='private', projection='full'):
url = construct_url('spreadsheets',
visibility=visibility, projection=projection)
r = self.session.get(url)
return ElementTree.fromstring(r.content)
def get_worksheets_feed(self, spreadsheet,
visibility='private', projection='full'):
url = construct_url('worksheets', spreadsheet,
visibility=visibility, projection=projection)
r = self.session.get(url)
return ElementTree.fromstring(r.content)
def get_cells_feed(self, worksheet,
visibility='private', projection='full', params=None):
url = construct_url('cells', worksheet,
visibility=visibility, projection=projection)
if params:
params = urlencode(params)
url = '%s?%s' % (url, params)
r = self.session.get(url)
return ElementTree.fromstring(r.content)
def get_feed(self, url):
r = self.session.get(url)
return ElementTree.fromstring(r.content)
def del_worksheet(self, worksheet):
url = construct_url(
'worksheet', worksheet, 'private', 'full', worksheet_version=worksheet.version)
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
r = self.session.delete(url)
<<<<<<< 95d918ab8c3e881f4363e5f5a50e98f79c768ddf
=======
<<<<<<< HEAD
=======
>>>>>>> # This is a combination of 2 commits.
=======
<<<<<<< HEAD
self.session.delete(url)
=======
r = self.session.delete(url)
<<<<<<< HEAD
=======
>>>>>>> Update README.md
# Even though there is nothing interesting in the response body
# we have to read it or the next request from this session will get a
# httplib.ResponseNotReady error.
r.read()
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< 95d918ab8c3e881f4363e5f5a50e98f79c768ddf
=======
>>>>>>> # This is a combination of 2 commits.
>>>>>>> # This is a combination of 2 commits.
=======
>>>>>>> # This is a combination of 2 commits.
>>>>>>> Update README.md
>>>>>>> Update README.md
def get_cells_cell_id_feed(self, worksheet, cell_id,
visibility='private', projection='full'):
url = construct_url('cells_cell_id', worksheet, cell_id=cell_id,
visibility=visibility, projection=projection)
r = self.session.get(url)
return ElementTree.fromstring(r.content)
def put_feed(self, url, data):
headers = {'Content-Type': 'application/atom+xml',
'If-Match': '*'}
<<<<<<< HEAD
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
=======
data = self._ensure_xml_header(data)
=======
<<<<<<< HEAD
>>>>>>> Update README.md
data = self._ensure_xml_header(data)
=======
data = self._add_xml_header(data)
>>>>>>> # This is a combination of 2 commits.
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
=======
>>>>>>> Update README.md
>>>>>>> Update README.md
try:
r = self.session.put(url, data, headers=headers)
except HTTPError as ex:
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< 95d918ab8c3e881f4363e5f5a50e98f79c768ddf
<<<<<<< 0f67973a7427fb0d14703e22f8f1308f0dfd6af5
if getattr(ex, 'code', None) == 403:
=======
if ex.code == 403:
<<<<<<< 7e91ce60c91237a29536f0b2f609ab27a82d3d68
>>>>>>> Squashing all the commits to simpy things for merge
=======
=======
<<<<<<< HEAD
if getattr(ex, 'code', None) == 403:
=======
if ex.code == 403:
>>>>>>> # This is a combination of 2 commits.
>>>>>>> # This is a combination of 2 commits.
=======
<<<<<<< HEAD
if getattr(ex, 'code', None) == 403:
=======
<<<<<<< HEAD
if getattr(ex, 'code', None) == 403:
=======
if ex.code == 403:
>>>>>>> # This is a combination of 2 commits.
>>>>>>> Update README.md
>>>>>>> Update README.md
raise UpdateCellError(ex.message)
else:
raise
return ElementTree.fromstring(r.content)
def post_feed(self, url, data):
headers = {'Content-Type': 'application/atom+xml'}
data = self._ensure_xml_header(data)
try:
r = self.session.post(url, data, headers=headers)
except HTTPError as ex:
raise RequestError(ex.message)
return ElementTree.fromstring(r.content)
def post_cells(self, worksheet, data):
headers = {'Content-Type': 'application/atom+xml',
'If-Match': '*'}
<<<<<<< HEAD
data = self._ensure_xml_header(data)
=======
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
data = self._add_xml_header(data)
>>>>>>> # This is a combination of 2 commits.
=======
<<<<<<< HEAD
data = self._ensure_xml_header(data)
=======
data = self._add_xml_header(data)
>>>>>>> # This is a combination of 2 commits.
>>>>>>> Update README.md
>>>>>>> Update README.md
url = construct_url('cells_batch', worksheet)
r = self.session.post(url, data, headers=headers)
return ElementTree.fromstring(r.content)
<<<<<<< HEAD
=======
def create(self, title):
"""Creates a new spreadsheet.
:param title: A title of a new spreadsheet.
<<<<<<< a69cd84f789e21aa91b9c488abd3dc4ac39c8361
:returns: a :class:`~gspread.Spreadsheet` instance.
=======
This is a shortcut function which instantiates :class:`Client`
and performs login right away.
>>>>>>> # This is a combination of 2 commits.
.. note::
In order to use this method, you need to add
``https://www.googleapis.com/auth/drive`` to your oAuth scope.
Example::
scope = [
'https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive'
]
Otherwise you will get an ``Insufficient Permission`` error
when you try to create a new spreadsheet.
"""
create_url = 'https://www.googleapis.com/drive/v2/files'
headers = {'Content-Type': 'application/json'}
data = {
'title': title,
'mimeType': 'application/vnd.google-apps.spreadsheet'
}
r = self.session.post(create_url, json.dumps(data), headers=headers)
spreadsheet_id = r.json()['id']
return self.open_by_key(spreadsheet_id)
def authorize(credentials):
"""Login to Google API using OAuth2 credentials.
This is a shortcut function which instantiates :class:`Client`
and performs login right away.
:returns: :class:`Client` instance.
"""
client = Client(auth=credentials)
client.login()
return client
<<<<<<< HEAD
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
=======
def login(email, password):
"""Login to Google API using `email` and `password`.
This is a shortcut function which instantiates :class:`Client`
and performs login right away.
:returns: :class:`Client` instance.
"""
client = Client(auth=(email, password))
client.login()
return client
<<<<<<< HEAD
>>>>>>> Update README.md
>>>>>>> Update README.md
=======
>>>>>>> # This is a combination of 2 commits.
def authorize(credentials):
"""Login to Google API using OAuth2 credentials.
This is a shortcut function which instantiates :class:`Client`
and performs login right away.
:returns: :class:`Client` instance.
"""
client = Client(auth=credentials)
client.login()
return client
<<<<<<< HEAD
<<<<<<< 46798d67c38d2cf8e1c751b684897cdc98598205
<<<<<<< 95d918ab8c3e881f4363e5f5a50e98f79c768ddf
<<<<<<< HEAD
<<<<<<< HEAD
=======
>>>>>>> 109de9d... added worksheet export #12
=======
>>>>>>> 120bad7... Squashing all the commits to simpy things for merge
=======
>>>>>>> d078bae... Fix bug:
=======
=======
>>>>>>> # This is a combination of 2 commits.
>>>>>>> # This is a combination of 2 commits.
=======
=======
<<<<<<< HEAD
=======
>>>>>>> # This is a combination of 2 commits.
>>>>>>> Update README.md
>>>>>>> Update README.md
| mit |
ClimbsRocks/scikit-learn | examples/ensemble/plot_forest_importances_faces.py | 403 | 1519 | """
=================================================
Pixel importances with a parallel forest of trees
=================================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
The code below also illustrates how the construction and the computation
of the predictions can be parallelized within multiple jobs.
"""
print(__doc__)
from time import time
import matplotlib.pyplot as plt
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Number of cores to use to perform parallel fitting of the forest model
n_jobs = 1
# Load the faces dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
print("Fitting ExtraTreesClassifier on faces data with %d cores..." % n_jobs)
t0 = time()
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
n_jobs=n_jobs,
random_state=0)
forest.fit(X, y)
print("done in %0.3fs" % (time() - t0))
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
plt.matshow(importances, cmap=plt.cm.hot)
plt.title("Pixel importances with forests of trees")
plt.show()
| bsd-3-clause |
minhphung171093/GreenERP_V7 | openerp/tools/amount_to_text_en.py | 441 | 5103 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from translate import _
_logger = logging.getLogger(__name__)
#-------------------------------------------------------------
#ENGLISH
#-------------------------------------------------------------
to_19 = ( 'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six',
'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen',
'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen' )
tens = ( 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety')
denom = ( '',
'Thousand', 'Million', 'Billion', 'Trillion', 'Quadrillion',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn(val):
"""convert a value < 100 to English.
"""
if val < 20:
return to_19[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19[val % 10]
return dcap
def _convert_nnn(val):
"""
convert a value < 1000 to english, special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19[rem] + ' Hundred'
if mod > 0:
word += ' '
if mod > 0:
word += _convert_nn(mod)
return word
def english_number(val):
if val < 100:
return _convert_nn(val)
if val < 1000:
return _convert_nnn(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn(l) + ' ' + denom[didx]
if r > 0:
ret = ret + ', ' + english_number(r)
return ret
def amount_to_text(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = english_number(int(list[0]))
end_word = english_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'Cents' or 'Cent'
return ' '.join(filter(None, [start_word, units_name, (start_word or units_name) and (end_word or cents_name) and 'and', end_word, cents_name]))
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'en' : amount_to_text}
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='en', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: thousands six cent cinquante-quatre.
"""
import openerp.loglevels as loglevels
# if nbr > 10000000:
# _logger.warning(_("Number too large '%d', can not translate it"))
# return str(nbr)
if not _translate_funcs.has_key(lang):
_logger.warning(_("no translation function found for lang: '%s'"), lang)
#TODO: (default should be en) same as above
lang = 'en'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", int_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", int_to_text(i, lang)
else:
print int_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
carljm/ansible-modules-extras | system/puppet.py | 27 | 6730 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import json
import os
import pipes
import stat
DOCUMENTATION = '''
---
module: puppet
short_description: Runs puppet
description:
- Runs I(puppet) agent or apply in a reliable manner
version_added: "2.0"
options:
timeout:
description:
- How long to wait for I(puppet) to finish.
required: false
default: 30m
puppetmaster:
description:
- The hostname of the puppetmaster to contact.
required: false
default: None
manifest:
desciption:
- Path to the manifest file to run puppet apply on.
required: false
default: None
show_diff:
description:
- Should puppet return diffs of changes applied. Defaults to off to avoid leaking secret changes by default.
required: false
default: no
choices: [ "yes", "no" ]
facts:
description:
- A dict of values to pass in as persistent external facter facts
required: false
default: None
facter_basename:
desciption:
- Basename of the facter output file
required: false
default: ansible
environment:
desciption:
- Puppet environment to be used.
required: false
default: None
requirements: [ puppet ]
author: "Monty Taylor (@emonty)"
'''
EXAMPLES = '''
# Run puppet agent and fail if anything goes wrong
- puppet
# Run puppet and timeout in 5 minutes
- puppet: timeout=5m
# Run puppet using a different environment
- puppet: environment=testing
'''
def _get_facter_dir():
if os.getuid() == 0:
return '/etc/facter/facts.d'
else:
return os.path.expanduser('~/.facter/facts.d')
def _write_structured_data(basedir, basename, data):
if not os.path.exists(basedir):
os.makedirs(basedir)
file_path = os.path.join(basedir, "{0}.json".format(basename))
# This is more complex than you might normally expect because we want to
# open the file with only u+rw set. Also, we use the stat constants
# because ansible still supports python 2.4 and the octal syntax changed
out_file = os.fdopen(
os.open(
file_path, os.O_CREAT | os.O_WRONLY,
stat.S_IRUSR | stat.S_IWUSR), 'w')
out_file.write(json.dumps(data).encode('utf8'))
out_file.close()
def main():
module = AnsibleModule(
argument_spec=dict(
timeout=dict(default="30m"),
puppetmaster=dict(required=False, default=None),
manifest=dict(required=False, default=None),
show_diff=dict(
default=False, aliases=['show-diff'], type='bool'),
facts=dict(default=None),
facter_basename=dict(default='ansible'),
environment=dict(required=False, default=None),
),
supports_check_mode=True,
mutually_exclusive=[
('puppetmaster', 'manifest'),
],
)
p = module.params
global PUPPET_CMD
PUPPET_CMD = module.get_bin_path("puppet", False)
if not PUPPET_CMD:
module.fail_json(
msg="Could not find puppet. Please ensure it is installed.")
if p['manifest']:
if not os.path.exists(p['manifest']):
module.fail_json(
msg="Manifest file %(manifest)s not found." % dict(
manifest=p['manifest']))
# Check if puppet is disabled here
if not p['manifest']:
rc, stdout, stderr = module.run_command(
PUPPET_CMD + " config print agent_disabled_lockfile")
if os.path.exists(stdout.strip()):
module.fail_json(
msg="Puppet agent is administratively disabled.", disabled=True)
elif rc != 0:
module.fail_json(
msg="Puppet agent state could not be determined.")
if module.params['facts'] and not module.check_mode:
_write_structured_data(
_get_facter_dir(),
module.params['facter_basename'],
module.params['facts'])
base_cmd = "timeout -s 9 %(timeout)s %(puppet_cmd)s" % dict(
timeout=pipes.quote(p['timeout']), puppet_cmd=PUPPET_CMD)
if not p['manifest']:
cmd = ("%(base_cmd)s agent --onetime"
" --ignorecache --no-daemonize --no-usecacheonfailure --no-splay"
" --detailed-exitcodes --verbose") % dict(
base_cmd=base_cmd,
)
if p['puppetmaster']:
cmd += " -- server %s" % pipes.quote(p['puppetmaster'])
if p['show_diff']:
cmd += " --show-diff"
if p['environment']:
cmd += " --environment '%s'" % p['environment']
if module.check_mode:
cmd += " --noop"
else:
cmd += " --no-noop"
else:
cmd = "%s apply --detailed-exitcodes " % base_cmd
if p['environment']:
cmd += "--environment '%s' " % p['environment']
if module.check_mode:
cmd += "--noop "
else:
cmd += "--no-noop "
cmd += pipes.quote(p['manifest'])
rc, stdout, stderr = module.run_command(cmd)
if rc == 0:
# success
module.exit_json(rc=rc, changed=False, stdout=stdout)
elif rc == 1:
# rc==1 could be because it's disabled
# rc==1 could also mean there was a compilation failure
disabled = "administratively disabled" in stdout
if disabled:
msg = "puppet is disabled"
else:
msg = "puppet did not run"
module.exit_json(
rc=rc, disabled=disabled, msg=msg,
error=True, stdout=stdout, stderr=stderr)
elif rc == 2:
# success with changes
module.exit_json(rc=0, changed=True)
elif rc == 124:
# timeout
module.exit_json(
rc=rc, msg="%s timed out" % cmd, stdout=stdout, stderr=stderr)
else:
# failure
module.fail_json(
rc=rc, msg="%s failed with return code: %d" % (cmd, rc),
stdout=stdout, stderr=stderr)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
timle1/try_django_1_10 | accounts/forms.py | 1 | 2285 | from django import forms
from django.contrib.auth import (
authenticate,
get_user_model,
login,
logout,
)
User = get_user_model()
class UserLoginForm(forms.Form):
username = forms.CharField()
password = forms.CharField(widget=forms.PasswordInput)
def clean(self, *args, **kwargs):
username = self.cleaned_data.get("username")
password = self.cleaned_data.get("password")
# user_qs = User.objects.filter(username=username)
# if user_qs.count() == 1:
# user = user_qs.first()
if username and password:
user = authenticate(username=username, password=password)
if not user:
raise forms.ValidationError("This user does not exist")
if not user.check_password(password):
raise forms.ValidationError("Incorrect passsword")
if not user.is_active:
raise forms.ValidationError("This user is not longer active.")
return super(UserLoginForm, self).clean(*args, **kwargs)
class UserRegisterForm(forms.ModelForm):
email = forms.EmailField(label='Email address')
email2 = forms.EmailField(label='Confirm Email')
password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = [
'username',
'email',
'email2',
'password'
]
# def clean(self, *args, **kwargs):
# email = self.cleaned_data.get('email')
# email2 = self.cleaned_data.get('email2')
# if email != email2:
# raise forms.ValidationError("Emails must match")
# email_qs = User.objects.filter(email=email)
# if email_qs.exists():
# raise forms.ValidationError("This email has already been registered")
# return super(UserRegisterForm,self).clean(*args, **kwargs)
def clean_email2(self):
email = self.cleaned_data.get('email')
email2 = self.cleaned_data.get('email2')
if email != email2:
raise forms.ValidationError("Emails must match")
email_qs = User.objects.filter(email=email)
if email_qs.exists():
raise forms.ValidationError("This email has already been registered")
return email
| mit |
Appono/oheurydices | vendor/distribute-0.6.36/setuptools/tests/test_sandbox.py | 204 | 1724 | """develop tests
"""
import sys
import os
import shutil
import unittest
import tempfile
from setuptools.sandbox import DirectorySandbox, SandboxViolation
def has_win32com():
"""
Run this to determine if the local machine has win32com, and if it
does, include additional tests.
"""
if not sys.platform.startswith('win32'):
return False
try:
mod = __import__('win32com')
except ImportError:
return False
return True
class TestSandbox(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.dir)
def test_devnull(self):
if sys.version < '2.4':
return
sandbox = DirectorySandbox(self.dir)
sandbox.run(self._file_writer(os.devnull))
def _file_writer(path):
def do_write():
f = open(path, 'w')
f.write('xxx')
f.close()
return do_write
_file_writer = staticmethod(_file_writer)
if has_win32com():
def test_win32com(self):
"""
win32com should not be prevented from caching COM interfaces
in gen_py.
"""
import win32com
gen_py = win32com.__gen_path__
target = os.path.join(gen_py, 'test_write')
sandbox = DirectorySandbox(self.dir)
try:
try:
sandbox.run(self._file_writer(target))
except SandboxViolation:
self.fail("Could not create gen_py file due to SandboxViolation")
finally:
if os.path.exists(target): os.remove(target)
if __name__ == '__main__':
unittest.main()
| mit |
johnchronis/exareme | exareme-tools/madis/src/lib/chardet/charsetprober.py | 216 | 1914 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import constants, re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(r'([\x00-\x7F])+', ' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(r'([A-Za-z])+', ' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
| mit |
ddervs/bad-boids | boids/tests/test_boids.py | 1 | 2868 | import os
import yaml
from mock import patch
from numpy.testing import assert_array_almost_equal
from matplotlib import animation
import boids.boids.boids as boids
from nose.tools import assert_raises
config_filename = 'boids/config.yaml'
config = yaml.load(open(config_filename))
def test_boids_fixtures():
regression_data = yaml.load(open(os.path.join(os.path.dirname(__file__), 'fixture.yml')))
boid_data = regression_data["reg_before"]
test_boids = boids.Boids(boid_data, config)
# Regression test
test_boids.update_boids()
for after, before in zip(regression_data["reg_after"], test_boids.boids):
assert_array_almost_equal(after, before, 2)
# Test sub_functions
test_boids.fly_to_middle()
for after, before in zip(regression_data["fly_to_middle"], test_boids.boids):
assert_array_almost_equal(after, before, 2)
test_boids.fly_away_nearby()
for after, before in zip(regression_data["fly_away_nearby"], test_boids.boids):
assert_array_almost_equal(after, before, 2)
test_boids.match_speed()
for after, before in zip(regression_data["match_speed"], test_boids.boids):
assert_array_almost_equal(after, before, 2)
test_boids.move_boids()
for after, before in zip(regression_data["move_boids"], test_boids.boids):
assert_array_almost_equal(after, before, 2)
def test_new_flock():
num_boids = 20
boids_range = [-100, 100]
test_boids = boids.new_flock(num_boids, boids_range, boids_range, boids_range, boids_range)
for array in test_boids:
# Check right number of boids
assert cmp(array.shape, (num_boids, 2)) == 0
# Check boids positions and velocities in range
in_range = boids_range[0] < array.all() < boids_range[1]
assert in_range
@patch.object(boids.Boids, 'update_boids')
def test_animate(mock_update_boids):
regression_data = yaml.load(open(os.path.join(os.path.dirname(__file__), 'fixture.yml')))
boid_data = regression_data["reg_before"]
test_boids = boids.Boids(boid_data, config)
# Test that animation calls update_boids method
frame = None
test_boids.animate(frame)
assert mock_update_boids.called
@patch.object(animation, 'FuncAnimation')
def test_run_animation(mock_FuncAnimation):
regression_data = yaml.load(open(os.path.join(os.path.dirname(__file__), 'fixture.yml')))
boid_data = regression_data["reg_before"]
test_boids = boids.Boids(boid_data, config)
# Test that run_animation calls FuncAnimation
test_boids.run_animation()
assert mock_FuncAnimation.called
def test_init():
# Test that appropriate exceptions raised if incorrect types passed
some_list = [1, 2, 3]
assert_raises(TypeError, boids.Boids, some_list, dict(key=some_list))
assert_raises(TypeError, boids.Boids, (some_list, some_list), some_list)
| mit |
mozvip/CouchPotatoServer | libs/sqlalchemy/util/__init__.py | 17 | 1742 | # util/__init__.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from compat import callable, cmp, reduce, defaultdict, py25_dict, \
threading, py3k_warning, jython, pypy, win32, set_types, buffer, pickle, \
update_wrapper, partial, md5_hex, decode_slice, dottedgetter,\
parse_qsl, any, contextmanager
from _collections import NamedTuple, ImmutableContainer, immutabledict, \
Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \
column_dict, ordered_column_set, populate_column_dict, unique_list, \
UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
to_column_set, update_copy, flatten_iterator, WeakIdentityMapping, \
LRUCache, ScopedRegistry, ThreadLocalRegistry
from langhelpers import iterate_attributes, class_hierarchy, \
portable_instancemethod, unbound_method_to_callable, \
getargspec_init, format_argspec_init, format_argspec_plus, \
get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
memoized_property, memoized_instancemethod, \
reset_memoized, group_expirable_memoized_property, importlater, \
monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
classproperty, set_creation_order, warn_exception, warn, NoneType,\
constructor_copy, methods_equivalent, chop_traceback, asint,\
generic_repr, counter
from deprecations import warn_deprecated, warn_pending_deprecation, \
deprecated, pending_deprecation
| gpl-3.0 |
dudepare/bedrock | bedrock/mozorg/tests/test_decorators.py | 45 | 1378 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
from math import floor
from django.test import RequestFactory
from django.utils.http import parse_http_date
from bedrock.mozorg.tests import TestCase
from bedrock.mozorg.tests import views
class ViewDecoratorTests(TestCase):
def setUp(self):
self.rf = RequestFactory()
def _test_cache_headers(self, view, hours):
"""
Should have appropriate Cache-Control and Expires headers.
"""
test_request = self.rf.get('/hi-there-dude/')
resp = view(test_request)
num_seconds = hours * 60 * 60
self.assertEqual(resp['cache-control'], 'max-age=%d' % num_seconds)
now_date = floor(time.time())
exp_date = parse_http_date(resp['expires'])
self.assertAlmostEqual(now_date + num_seconds, exp_date, delta=2)
def test_cache_headers_48_hours(self):
"""
Test a view that should be cached for 48 hours.
"""
self._test_cache_headers(views.view_test_48_hrs, 48)
def test_cache_headers_30_days(self):
"""
Test a view that should be cached for 30 days.
"""
self._test_cache_headers(views.view_test_30_days, 30 * 24)
| mpl-2.0 |
aferr/LatticeMemCtl | tests/long/se/30.eon/test.py | 21 | 1743 | # Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Korey Sewell
m5.util.addToPath('../configs/common')
from cpu2000 import eon_cook
workload = eon_cook(isa, opsys, 'mdred')
root.system.cpu.workload = workload.makeLiveProcess()
| bsd-3-clause |
Skeen/OctoPrint | src/octoprint/_version.py | 29 | 10232 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (build by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.10 (https://github.com/warner/python-versioneer)
# these strings will be replaced by git during git-archive
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
import subprocess
import sys
import errno
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % args[0])
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version >= '3':
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % args[0])
return None
return stdout
import sys
import re
import os.path
def get_gits(root, verbose=False):
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
return None
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
return GITS
def get_expanded_variables(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# variables. When used from setup.py, we don't want to import
# _version.py, so we do it with a regexp instead. This function is not
# used from _version.py.
variables = {}
try:
f = open(versionfile_abs,"r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
variables["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
variables["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return variables
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
refnames = variables["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("variables are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return { "version": r,
"full": variables["full"].strip() }
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": variables["full"].strip(),
"full": variables["full"].strip() }
def versions_from_lookup(lookup, root, verbose=False):
GITS = get_gits(root, verbose=verbose)
if GITS is None:
return {}
stdout = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
cwd=root)
if stdout is None:
return {}
current_branch = stdout.strip()
for matcher, tag, ref_commit in lookup:
if matcher.match(current_branch):
if tag is None or ref_commit is None:
return {}
stdout = run_command(GITS, ["rev-list", "%s..HEAD" % ref_commit, "--count"], cwd=root)
if stdout is None:
return {}
num_commits = stdout.strip()
stdout =run_command(GITS, ["rev-parse", "--short", "HEAD"], cwd=root)
if stdout is None:
return {}
short_hash = stdout.strip()
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"], cwd=root)
if stdout is None:
return {}
dirty = stdout.strip().endswith("-dirty")
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
version = "%s-%s-g%s" % (tag, num_commits, short_hash)
if dirty:
version += "-dirty"
full += "-dirty"
return {"version": version, "full": full, "branch": current_branch}
return {}
def versions_from_vcs(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' variables were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
GITS = get_gits(root, verbose=verbose)
if GITS is None:
return {}
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
cwd=root)
if stdout is None:
return {}
if not stdout.startswith(tag_prefix):
if verbose:
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
return {}
tag = stdout[len(tag_prefix):]
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if stdout is None:
return {}
full = stdout.strip()
if tag.endswith("-dirty"):
full += "-dirty"
stdout = run_command(GITS, ["rev-parse", "--abbrev-ref", "HEAD"],
cwd=root)
if stdout is None:
branch = None
else:
branch = stdout.strip()
return {"version": tag, "full": full, "branch": branch}
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
(root, dirname, parentdir_prefix))
return None
return {"version": dirname[len(parentdir_prefix):], "full": "", "branch": ""}
tag_prefix = ""
parentdir_prefix = ""
versionfile_source = "src/octoprint/_version.py"
lookupfile = ".versioneer-lookup"
def parse_lookup_file(root, lookup_path=None):
if not lookup_path:
lookup_path = lookupfile
if not lookup_path:
return []
path = os.path.join(root, lookup_path)
if not os.path.exists(path):
return []
import re
lookup = []
with open(os.path.join(root, lookup_path), "r") as f:
for line in f:
if '#' in line:
line = line[:line.rindex('#')]
line = line.strip()
try:
split_line = line.split()
if len(split_line) == 3:
pattern, tag, ref_commit = split_line
lookup.append([re.compile(pattern), tag, ref_commit])
elif len(split_line) >= 1:
lookup.append([re.compile(split_line[0]), None, None])
except:
break
return lookup
def get_versions(default={"version": "unknown", "full": "", "branch": "unknown"}, lookup_path=None, verbose=False):
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded variables.
variables = { "refnames": git_refnames, "full": git_full }
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
if ver:
return ver
try:
root = os.path.abspath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in range(len(versionfile_source.split("/"))):
root = os.path.dirname(root)
except NameError:
return default
lookup = parse_lookup_file(root, lookup_path=lookup_path)
return (versions_from_lookup(lookup, root, verbose)
or versions_from_vcs(tag_prefix, root, verbose)
or versions_from_parentdir(parentdir_prefix, root, verbose)
or default)
| agpl-3.0 |
Azulinho/ansible | lib/ansible/module_utils/database.py | 108 | 5859 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class SQLParseError(Exception):
pass
class UnclosedQuoteError(SQLParseError):
pass
# maps a type of identifier to the maximum number of dot levels that are
# allowed to specify that identifier. For example, a database column can be
# specified by up to 4 levels: database.schema.table.column
_PG_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, schema=2, table=3, column=4, role=1)
_MYSQL_IDENTIFIER_TO_DOT_LEVEL = dict(database=1, table=2, column=3, role=1, vars=1)
def _find_end_quote(identifier, quote_char):
accumulate = 0
while True:
try:
quote = identifier.index(quote_char)
except ValueError:
raise UnclosedQuoteError
accumulate = accumulate + quote
try:
next_char = identifier[quote + 1]
except IndexError:
return accumulate
if next_char == quote_char:
try:
identifier = identifier[quote + 2:]
accumulate = accumulate + 2
except IndexError:
raise UnclosedQuoteError
else:
return accumulate
def _identifier_parse(identifier, quote_char):
if not identifier:
raise SQLParseError('Identifier name unspecified or unquoted trailing dot')
already_quoted = False
if identifier.startswith(quote_char):
already_quoted = True
try:
end_quote = _find_end_quote(identifier[1:], quote_char=quote_char) + 1
except UnclosedQuoteError:
already_quoted = False
else:
if end_quote < len(identifier) - 1:
if identifier[end_quote + 1] == '.':
dot = end_quote + 1
first_identifier = identifier[:dot]
next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
further_identifiers.insert(0, first_identifier)
else:
raise SQLParseError('User escaped identifiers must escape extra quotes')
else:
further_identifiers = [identifier]
if not already_quoted:
try:
dot = identifier.index('.')
except ValueError:
identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
if dot == 0 or dot >= len(identifier) - 1:
identifier = identifier.replace(quote_char, quote_char * 2)
identifier = ''.join((quote_char, identifier, quote_char))
further_identifiers = [identifier]
else:
first_identifier = identifier[:dot]
next_identifier = identifier[dot + 1:]
further_identifiers = _identifier_parse(next_identifier, quote_char)
first_identifier = first_identifier.replace(quote_char, quote_char * 2)
first_identifier = ''.join((quote_char, first_identifier, quote_char))
further_identifiers.insert(0, first_identifier)
return further_identifiers
def pg_quote_identifier(identifier, id_type):
identifier_fragments = _identifier_parse(identifier, quote_char='"')
if len(identifier_fragments) > _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]:
raise SQLParseError('PostgreSQL does not support %s with more than %i dots' % (id_type, _PG_IDENTIFIER_TO_DOT_LEVEL[id_type]))
return '.'.join(identifier_fragments)
def mysql_quote_identifier(identifier, id_type):
identifier_fragments = _identifier_parse(identifier, quote_char='`')
if len(identifier_fragments) > _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]:
raise SQLParseError('MySQL does not support %s with more than %i dots' % (id_type, _MYSQL_IDENTIFIER_TO_DOT_LEVEL[id_type]))
special_cased_fragments = []
for fragment in identifier_fragments:
if fragment == '`*`':
special_cased_fragments.append('*')
else:
special_cased_fragments.append(fragment)
return '.'.join(special_cased_fragments)
| gpl-3.0 |
postlund/home-assistant | script/scaffold/templates/device_trigger/integration/device_trigger.py | 10 | 3251 | """Provides device automations for NEW_NAME."""
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType, state
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.const import (
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from . import DOMAIN
# TODO specify your supported trigger types.
TRIGGER_TYPES = {"turned_on", "turned_off"}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
}
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for NEW_NAME devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = []
# TODO Read this comment and remove it.
# This example shows how to iterate over the entities of this device
# that match this integration. If your triggers instead rely on
# events fired by devices without entities, do something like:
# zha_device = await _async_get_zha_device(hass, device_id)
# return zha_device.device_triggers
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
# Add triggers for each entity that belongs to this integration
# TODO add your own triggers.
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turned_on",
}
)
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turned_off",
}
)
return triggers
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
# TODO Implement your own logic to attach triggers.
# Generally we suggest to re-use the existing state or event
# triggers from the automation integration.
if config[CONF_TYPE] == "turned_on":
from_state = STATE_OFF
to_state = STATE_ON
else:
from_state = STATE_ON
to_state = STATE_OFF
state_config = {
state.CONF_PLATFORM: "state",
CONF_ENTITY_ID: config[CONF_ENTITY_ID],
state.CONF_FROM: from_state,
state.CONF_TO: to_state,
}
state_config = state.TRIGGER_SCHEMA(state_config)
return await state.async_attach_trigger(
hass, state_config, action, automation_info, platform_type="device"
)
| apache-2.0 |
loveheaven/ocropy | OLD/distance.py | 15 | 2661 | ################################################################
### Native code neural network with backpropagation training.
################################################################
from __future__ import with_statement
__all__ = "MLP".split()
from numpy import *
from pylab import *
from scipy import *
from native import *
import multiprocessing
def c_order(a):
"""Check whether the elements of the array are in C order."""
return tuple(a.strides)==tuple(sorted(a.strides,reverse=1))
cdist_native_c = r'''
#include <math.h>
#include <stdio.h>
#include <assert.h>
#include <stdlib.h>
#include <omp.h>
int maxthreads = 1;
void cdist(int d,int na,int nb,float a[na][d],float b[nb][d],float result[na][nb]) {
int n = na*nb;
#pragma omp parallel for num_threads (maxthreads)
for(int job=0;job<n;job++) {
int i = job/nb;
int j = job%nb;
double total = 0.0;
for(int k=0;k<d;k++) {
float delta = a[i][k]-b[j][k];
total += delta*delta;
}
result[i][j] = sqrt(total);
}
}
'''
cdist_native = None
def cdist_native_load():
# FIXME move to cython
global cdist_native
if cdist_native is not None: return
cdist_native = compile_and_load(cdist_native_c)
cdist_native.cdist.argtypes = [I,I,I,A2F,A2F,A2F]
global maxthreads
maxthreads = c_int.in_dll(cdist_native,"maxthreads")
maxthreads.value = multiprocessing.cpu_count()
def cdist(a,b,out=None,threads=-1):
cdist_native_load()
if type(a)==list or a.dtype!=dtype('float32') or not c_order(a):
a = array(a,dtype='float32',order="C")
if type(b)==list or b.dtype!=dtype('float32') or not c_order(b):
b = array(b,dtype='float32',order="C")
assert a.ndim==2
assert b.ndim==2
assert a.shape[1]==b.shape[1]
na = len(a)
nb = len(b)
d = a.shape[1]
if out is None:
out = zeros((len(a),len(b)),'float32')
if threads<0: threads = multiprocessing.cpu_count()
maxthreads.value = threads
cdist_native.cdist(d,na,nb,a,b,out)
return out
class ProtoDists:
def __init__(self):
pass
def setProtos(self,b):
assert b.ndim==2
assert c_order(b)
if b.dtype!=dtype('float32'): b = array(b,'float32')
self.b = b
def cdist(self,a):
if a.dtype!=dtype('float32'): a = array(a,'float32')
return cdist(a,self.b,threads=-1)
if __name__=="__main__":
cdist_native_load()
a = array(randn(3,5),'f')
b = array(randn(7,5),'f')
from scipy.spatial.distance import cdist as oldcdist
out = cdist(a,b)
print out
out2 = oldcdist(a,b)
print out2
| apache-2.0 |
agconti/njode | env/lib/python2.7/site-packages/setuptools/tests/test_develop.py | 125 | 3607 | """develop tests
"""
import os
import shutil
import site
import sys
import tempfile
import unittest
from distutils.errors import DistutilsError
from setuptools.command.develop import develop
from setuptools.command import easy_install as easy_install_pkg
from setuptools.compat import StringIO
from setuptools.dist import Distribution
SETUP_PY = """\
from setuptools import setup
setup(name='foo',
packages=['foo'],
use_2to3=True,
)
"""
INIT_PY = """print "foo"
"""
class TestDevelopTest(unittest.TestCase):
def setUp(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
# Directory structure
self.dir = tempfile.mkdtemp()
os.mkdir(os.path.join(self.dir, 'foo'))
# setup.py
setup = os.path.join(self.dir, 'setup.py')
f = open(setup, 'w')
f.write(SETUP_PY)
f.close()
self.old_cwd = os.getcwd()
# foo/__init__.py
init = os.path.join(self.dir, 'foo', '__init__.py')
f = open(init, 'w')
f.write(INIT_PY)
f.close()
os.chdir(self.dir)
self.old_base = site.USER_BASE
site.USER_BASE = tempfile.mkdtemp()
self.old_site = site.USER_SITE
site.USER_SITE = tempfile.mkdtemp()
def tearDown(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix):
return
os.chdir(self.old_cwd)
shutil.rmtree(self.dir)
shutil.rmtree(site.USER_BASE)
shutil.rmtree(site.USER_SITE)
site.USER_BASE = self.old_base
site.USER_SITE = self.old_site
def test_develop(self):
if sys.version < "2.6" or hasattr(sys, 'real_prefix'):
return
dist = Distribution(
dict(name='foo',
packages=['foo'],
use_2to3=True,
version='0.0',
))
dist.script_name = 'setup.py'
cmd = develop(dist)
cmd.user = 1
cmd.ensure_finalized()
cmd.install_dir = site.USER_SITE
cmd.user = 1
old_stdout = sys.stdout
#sys.stdout = StringIO()
try:
cmd.run()
finally:
sys.stdout = old_stdout
# let's see if we got our egg link at the right place
content = os.listdir(site.USER_SITE)
content.sort()
self.assertEqual(content, ['easy-install.pth', 'foo.egg-link'])
# Check that we are using the right code.
egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt')
try:
path = egg_link_file.read().split()[0].strip()
finally:
egg_link_file.close()
init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt')
try:
init = init_file.read().strip()
finally:
init_file.close()
if sys.version < "3":
self.assertEqual(init, 'print "foo"')
else:
self.assertEqual(init, 'print("foo")')
def notest_develop_with_setup_requires(self):
wanted = ("Could not find suitable distribution for "
"Requirement.parse('I-DONT-EXIST')")
old_dir = os.getcwd()
os.chdir(self.dir)
try:
try:
dist = Distribution({'setup_requires': ['I_DONT_EXIST']})
except DistutilsError:
e = sys.exc_info()[1]
error = str(e)
if error == wanted:
pass
finally:
os.chdir(old_dir)
| bsd-3-clause |
quoclieu/codebrew17-starving | env/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/request.py | 714 | 5988 | from __future__ import absolute_import
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
from .filepost import encode_multipart_formdata
__all__ = ['RequestMethods']
class RequestMethods(object):
"""
Convenience mixin for classes who implement a :meth:`urlopen` method, such
as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
:class:`~urllib3.poolmanager.PoolManager`.
Provides behavior for making common types of HTTP request methods and
decides which type of request field encoding to use.
Specifically,
:meth:`.request_encode_url` is for sending requests whose fields are
encoded in the URL (such as GET, HEAD, DELETE).
:meth:`.request_encode_body` is for sending requests whose fields are
encoded in the *body* of the request using multipart or www-form-urlencoded
(such as for POST, PUT, PATCH).
:meth:`.request` is for making any kind of request, it will look up the
appropriate encoding format and use one of the above two methods to make
the request.
Initializer parameters:
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
"""
_encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
def __init__(self, headers=None):
self.headers = headers or {}
def urlopen(self, method, url, body=None, headers=None,
encode_multipart=True, multipart_boundary=None,
**kw): # Abstract
raise NotImplemented("Classes extending RequestMethods must implement "
"their own ``urlopen`` method.")
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the appropriate encoding of
``fields`` based on the ``method`` used.
This is a convenience method that requires the least amount of manual
effort. It can be used in most situations, while still having the
option to drop down to more specific methods when necessary, such as
:meth:`request_encode_url`, :meth:`request_encode_body`,
or even the lowest level :meth:`urlopen`.
"""
method = method.upper()
if method in self._encode_url_methods:
return self.request_encode_url(method, url, fields=fields,
headers=headers,
**urlopen_kw)
else:
return self.request_encode_body(method, url, fields=fields,
headers=headers,
**urlopen_kw)
def request_encode_url(self, method, url, fields=None, headers=None,
**urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
"""
if headers is None:
headers = self.headers
extra_kw = {'headers': headers}
extra_kw.update(urlopen_kw)
if fields:
url += '?' + urlencode(fields)
return self.urlopen(method, url, **extra_kw)
def request_encode_body(self, method, url, fields=None, headers=None,
encode_multipart=True, multipart_boundary=None,
**urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
:meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
the payload with the appropriate content type. Otherwise
:meth:`urllib.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
safe to use it in other times too. However, it may break request
signing, such as with OAuth.
Supports an optional ``fields`` parameter of key/value strings AND
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
the MIME type is optional. For example::
fields = {
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(),
'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
}
When uploading a file, providing a filename (the first parameter of the
tuple) is optional but recommended to best mimick behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will
be overwritten because it depends on the dynamic random boundary string
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
"""
if headers is None:
headers = self.headers
extra_kw = {'headers': {}}
if fields:
if 'body' in urlopen_kw:
raise TypeError(
"request got values for both 'fields' and 'body', can only specify one.")
if encode_multipart:
body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
else:
body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
extra_kw['body'] = body
extra_kw['headers'] = {'Content-Type': content_type}
extra_kw['headers'].update(headers)
extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw)
| mit |
MQQiang/kbengine | kbe/src/lib/python/Lib/email/__init__.py | 98 | 1789 | # Copyright (C) 2001-2007 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""A package for parsing, handling, and generating email messages."""
__version__ = '5.1.0'
__all__ = [
'base64mime',
'charset',
'encoders',
'errors',
'feedparser',
'generator',
'header',
'iterators',
'message',
'message_from_file',
'message_from_binary_file',
'message_from_string',
'message_from_bytes',
'mime',
'parser',
'quoprimime',
'utils',
]
# Some convenience routines. Don't import Parser and Message as side-effects
# of importing email since those cascadingly import most of the rest of the
# email package.
def message_from_string(s, *args, **kws):
"""Parse a string into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import Parser
return Parser(*args, **kws).parsestr(s)
def message_from_bytes(s, *args, **kws):
"""Parse a bytes string into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import BytesParser
return BytesParser(*args, **kws).parsebytes(s)
def message_from_file(fp, *args, **kws):
"""Read a file and parse its contents into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import Parser
return Parser(*args, **kws).parse(fp)
def message_from_binary_file(fp, *args, **kws):
"""Read a binary file and parse its contents into a Message object model.
Optional _class and strict are passed to the Parser constructor.
"""
from email.parser import BytesParser
return BytesParser(*args, **kws).parse(fp)
| lgpl-3.0 |
stanhu/linguist | samples/Python/gen-py-linguist-thrift.py | 61 | 2259 | #
# Autogenerated by Thrift Compiler (1.0.0-dev)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class PullRequest:
"""
Attributes:
- title
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'title', None, None, ), # 1
)
def __init__(self, title=None,):
self.title = title
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.title = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('PullRequest')
if self.title is not None:
oprot.writeFieldBegin('title', TType.STRING, 1)
oprot.writeString(self.title)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.title)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| mit |
SUSE/azure-sdk-for-python | azure-mgmt-keyvault/azure/mgmt/keyvault/key_vault_management_client.py | 4 | 3598 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.vaults_operations import VaultsOperations
from . import models
class KeyVaultManagementClientConfiguration(AzureConfiguration):
"""Configuration for KeyVaultManagementClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify Microsoft Azure subscription. The subscription ID forms part of
the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not isinstance(subscription_id, str):
raise TypeError("Parameter 'subscription_id' must be str.")
if not base_url:
base_url = 'https://management.azure.com'
super(KeyVaultManagementClientConfiguration, self).__init__(base_url)
self.add_user_agent('keyvaultmanagementclient/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class KeyVaultManagementClient(object):
"""The Azure management API provides a RESTful set of web services that interact with Azure Key Vault.
:ivar config: Configuration for client.
:vartype config: KeyVaultManagementClientConfiguration
:ivar vaults: Vaults operations
:vartype vaults: azure.mgmt.keyvault.operations.VaultsOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify Microsoft Azure subscription. The subscription ID forms part of
the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = KeyVaultManagementClientConfiguration(credentials, subscription_id, base_url)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '2016-10-01'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.vaults = VaultsOperations(
self._client, self.config, self._serialize, self._deserialize)
| mit |
jtrag/namebench | libnamebench/cli.py | 173 | 4760 | #!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple DNS server comparison benchmarking tool.
Designed to assist system administrators in selection and prioritization.
"""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import math
import sys
import base_ui
import conn_quality
import nameserver_list
class NameBenchCli(base_ui.BaseUI):
"""A command-line implementation of the namebench workflow."""
def __init__(self, options):
self.options = options
self.last_msg = (None, None, None, None)
self.last_msg_count_posted = 0
super(NameBenchCli, self).__init__()
def UpdateStatus(self, msg, count=None, total=None, error=False, debug=False):
"""Status updates for the command-line. A lot of voodoo here."""
if self.last_msg == (msg, count, total, error):
return None
if debug:
return None
if error:
print
print '* ERROR: %s' % msg
sys.exit(2)
elif not total:
self.last_msg_count_posted = 0
sys.stdout.write('- %s\n' % msg)
elif self.last_msg[0] != msg:
self.last_msg_count_posted = 0
sys.stdout.write('- %s: %s/%s' % (msg, count, total))
self.start_ts = datetime.datetime.now()
last_count = 0
else:
last_count = self.last_msg[1]
if total:
if count and (count - last_count > 0):
# Write a few dots to catch up to where we should be.
catch_up = int(math.ceil((count - last_count) / 2.0))
sys.stdout.write('.' * catch_up)
if count == total:
sys.stdout.write('%s/%s in %s\n' % (count, total, datetime.datetime.now() - self.start_ts))
elif total > 25 and count and (count - self.last_msg_count_posted > (total * 0.20)):
sys.stdout.write(str(count))
self.last_msg_count_posted = count
sys.stdout.flush()
self.last_msg = (msg, count, total, error)
def RunAndOpenReports(self):
self.RunBenchmark()
print "\n%s\n" % self.reporter.CreateReport(format='ascii')
self.CreateReports()
if self.options.open_webbrowser:
self.DisplayHtmlReport()
def Execute(self):
"""Called by namebench.py to start the show."""
print('namebench %s - %s (%s) on %s' %
(self.options.version, self.options.input_source or 'best source',
self.options.select_mode, datetime.datetime.now()))
print ('threads=%s/%s queries=%s runs=%s timeout=%s health_timeout=%s servers=%s' %
(self.options.health_thread_count, self.options.benchmark_thread_count,
self.options.query_count,
self.options.run_count, self.options.timeout,
self.options.health_timeout, self.options.num_servers))
print '-' * 78
if not self.options.tags:
print "You need to specify some DNS servers to benchmark. Try:"
print ""
print "namebench.py -s all # Test best available DNS servers"
print "namebench.py -s preferred,isp # Only test preferred + ISP DNS servers"
print "namebench.py -s system # Only test current system DNS servers"
print "namebench.py -s global 8.8.8.8 # Benchmark global DNS servers + 8.8.8.8"
print "namebench.py 8.8.8.8 10.0.0.1 # Benchmark just these two servers"
print ""
print "For more assistance, get help via namebench.py -h"
sys.exit(1)
self.PrepareNameServers()
try:
self.LoadDataSources()
self.PrepareTestRecords()
print '-' * 78
if not self.options.skip_health_checks:
self.CheckNameServerHealth()
print 'Final list of nameservers considered:'
print '-' * 78
for n in self.nameservers.SortEnabledByFastest():
print '%-15.15s %-18.18s %-4.0fms | %s' % (n.ip, n.name, n.check_average,
n.warnings_string)
print ''
print ''
self.PrepareBenchmark()
self.RunAndOpenReports()
except (nameserver_list.OutgoingUdpInterception,
nameserver_list.TooFewNameservers,
conn_quality.OfflineConnection):
(exc_type, exception) = sys.exc_info()[0:2]
self.UpdateStatus("%s - %s" % (exc_type, exception), error=True)
| apache-2.0 |
mhotwagner/abackend | abackend-env/lib/python3.5/site-packages/django/core/checks/compatibility/django_1_8_0.py | 286 | 1052 | from __future__ import unicode_literals
from django.conf import global_settings, settings
from .. import Tags, Warning, register
@register(Tags.compatibility)
def check_duplicate_template_settings(app_configs, **kwargs):
if settings.TEMPLATES:
values = [
'TEMPLATE_DIRS',
'ALLOWED_INCLUDE_ROOTS',
'TEMPLATE_CONTEXT_PROCESSORS',
'TEMPLATE_DEBUG',
'TEMPLATE_LOADERS',
'TEMPLATE_STRING_IF_INVALID',
]
duplicates = [
value for value in values
if getattr(settings, value) != getattr(global_settings, value)
]
if duplicates:
return [Warning(
"The standalone TEMPLATE_* settings were deprecated in Django "
"1.8 and the TEMPLATES dictionary takes precedence. You must "
"put the values of the following settings into your default "
"TEMPLATES dict: %s." % ", ".join(duplicates),
id='1_8.W001',
)]
return []
| mit |
chenyyx/scikit-learn-doc-zh | examples/en/cross_decomposition/plot_compare_cross_decomposition.py | 55 | 4953 | """
===================================
Compare cross decomposition methods
===================================
Simple usage of various cross decomposition algorithms:
- PLSCanonical
- PLSRegression, with multivariate response, a.k.a. PLS2
- PLSRegression, with univariate response, a.k.a. PLS1
- CCA
Given 2 multivariate covarying two-dimensional datasets, X, and Y,
PLS extracts the 'directions of covariance', i.e. the components of each
datasets that explain the most shared variance between both datasets.
This is apparent on the **scatterplot matrix** display: components 1 in
dataset X and dataset Y are maximally correlated (points lie around the
first diagonal). This is also true for components 2 in both dataset,
however, the correlation across datasets for different components is
weak: the point cloud is very spherical.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.cross_decomposition import PLSCanonical, PLSRegression, CCA
# #############################################################################
# Dataset based latent variables model
n = 500
# 2 latents vars:
l1 = np.random.normal(size=n)
l2 = np.random.normal(size=n)
latents = np.array([l1, l1, l2, l2]).T
X = latents + np.random.normal(size=4 * n).reshape((n, 4))
Y = latents + np.random.normal(size=4 * n).reshape((n, 4))
X_train = X[:n // 2]
Y_train = Y[:n // 2]
X_test = X[n // 2:]
Y_test = Y[n // 2:]
print("Corr(X)")
print(np.round(np.corrcoef(X.T), 2))
print("Corr(Y)")
print(np.round(np.corrcoef(Y.T), 2))
# #############################################################################
# Canonical (symmetric) PLS
# Transform data
# ~~~~~~~~~~~~~~
plsca = PLSCanonical(n_components=2)
plsca.fit(X_train, Y_train)
X_train_r, Y_train_r = plsca.transform(X_train, Y_train)
X_test_r, Y_test_r = plsca.transform(X_test, Y_test)
# Scatter plot of scores
# ~~~~~~~~~~~~~~~~~~~~~~
# 1) On diagonal plot X vs Y scores on each components
plt.figure(figsize=(12, 8))
plt.subplot(221)
plt.scatter(X_train_r[:, 0], Y_train_r[:, 0], label="train",
marker="o", c="b", s=25)
plt.scatter(X_test_r[:, 0], Y_test_r[:, 0], label="test",
marker="o", c="r", s=25)
plt.xlabel("x scores")
plt.ylabel("y scores")
plt.title('Comp. 1: X vs Y (test corr = %.2f)' %
np.corrcoef(X_test_r[:, 0], Y_test_r[:, 0])[0, 1])
plt.xticks(())
plt.yticks(())
plt.legend(loc="best")
plt.subplot(224)
plt.scatter(X_train_r[:, 1], Y_train_r[:, 1], label="train",
marker="o", c="b", s=25)
plt.scatter(X_test_r[:, 1], Y_test_r[:, 1], label="test",
marker="o", c="r", s=25)
plt.xlabel("x scores")
plt.ylabel("y scores")
plt.title('Comp. 2: X vs Y (test corr = %.2f)' %
np.corrcoef(X_test_r[:, 1], Y_test_r[:, 1])[0, 1])
plt.xticks(())
plt.yticks(())
plt.legend(loc="best")
# 2) Off diagonal plot components 1 vs 2 for X and Y
plt.subplot(222)
plt.scatter(X_train_r[:, 0], X_train_r[:, 1], label="train",
marker="*", c="b", s=50)
plt.scatter(X_test_r[:, 0], X_test_r[:, 1], label="test",
marker="*", c="r", s=50)
plt.xlabel("X comp. 1")
plt.ylabel("X comp. 2")
plt.title('X comp. 1 vs X comp. 2 (test corr = %.2f)'
% np.corrcoef(X_test_r[:, 0], X_test_r[:, 1])[0, 1])
plt.legend(loc="best")
plt.xticks(())
plt.yticks(())
plt.subplot(223)
plt.scatter(Y_train_r[:, 0], Y_train_r[:, 1], label="train",
marker="*", c="b", s=50)
plt.scatter(Y_test_r[:, 0], Y_test_r[:, 1], label="test",
marker="*", c="r", s=50)
plt.xlabel("Y comp. 1")
plt.ylabel("Y comp. 2")
plt.title('Y comp. 1 vs Y comp. 2 , (test corr = %.2f)'
% np.corrcoef(Y_test_r[:, 0], Y_test_r[:, 1])[0, 1])
plt.legend(loc="best")
plt.xticks(())
plt.yticks(())
plt.show()
# #############################################################################
# PLS regression, with multivariate response, a.k.a. PLS2
n = 1000
q = 3
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
B = np.array([[1, 2] + [0] * (p - 2)] * q).T
# each Yj = 1*X1 + 2*X2 + noize
Y = np.dot(X, B) + np.random.normal(size=n * q).reshape((n, q)) + 5
pls2 = PLSRegression(n_components=3)
pls2.fit(X, Y)
print("True B (such that: Y = XB + Err)")
print(B)
# compare pls2.coef_ with B
print("Estimated B")
print(np.round(pls2.coef_, 1))
pls2.predict(X)
# PLS regression, with univariate response, a.k.a. PLS1
n = 1000
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
y = X[:, 0] + 2 * X[:, 1] + np.random.normal(size=n * 1) + 5
pls1 = PLSRegression(n_components=3)
pls1.fit(X, y)
# note that the number of components exceeds 1 (the dimension of y)
print("Estimated betas")
print(np.round(pls1.coef_, 1))
# #############################################################################
# CCA (PLS mode B with symmetric deflation)
cca = CCA(n_components=2)
cca.fit(X_train, Y_train)
X_train_r, Y_train_r = cca.transform(X_train, Y_train)
X_test_r, Y_test_r = cca.transform(X_test, Y_test)
| gpl-3.0 |
godfather1103/WeiboRobot | python27/1.0/lib/msilib/schema.py | 52 | 83722 | from . import Table
_Validation = Table('_Validation')
_Validation.add_field(1,'Table',11552)
_Validation.add_field(2,'Column',11552)
_Validation.add_field(3,'Nullable',3332)
_Validation.add_field(4,'MinValue',4356)
_Validation.add_field(5,'MaxValue',4356)
_Validation.add_field(6,'KeyTable',7679)
_Validation.add_field(7,'KeyColumn',5378)
_Validation.add_field(8,'Category',7456)
_Validation.add_field(9,'Set',7679)
_Validation.add_field(10,'Description',7679)
ActionText = Table('ActionText')
ActionText.add_field(1,'Action',11592)
ActionText.add_field(2,'Description',7936)
ActionText.add_field(3,'Template',7936)
AdminExecuteSequence = Table('AdminExecuteSequence')
AdminExecuteSequence.add_field(1,'Action',11592)
AdminExecuteSequence.add_field(2,'Condition',7679)
AdminExecuteSequence.add_field(3,'Sequence',5378)
Condition = Table('Condition')
Condition.add_field(1,'Feature_',11558)
Condition.add_field(2,'Level',9474)
Condition.add_field(3,'Condition',7679)
AdminUISequence = Table('AdminUISequence')
AdminUISequence.add_field(1,'Action',11592)
AdminUISequence.add_field(2,'Condition',7679)
AdminUISequence.add_field(3,'Sequence',5378)
AdvtExecuteSequence = Table('AdvtExecuteSequence')
AdvtExecuteSequence.add_field(1,'Action',11592)
AdvtExecuteSequence.add_field(2,'Condition',7679)
AdvtExecuteSequence.add_field(3,'Sequence',5378)
AdvtUISequence = Table('AdvtUISequence')
AdvtUISequence.add_field(1,'Action',11592)
AdvtUISequence.add_field(2,'Condition',7679)
AdvtUISequence.add_field(3,'Sequence',5378)
AppId = Table('AppId')
AppId.add_field(1,'AppId',11558)
AppId.add_field(2,'RemoteServerName',7679)
AppId.add_field(3,'LocalService',7679)
AppId.add_field(4,'ServiceParameters',7679)
AppId.add_field(5,'DllSurrogate',7679)
AppId.add_field(6,'ActivateAtStorage',5378)
AppId.add_field(7,'RunAsInteractiveUser',5378)
AppSearch = Table('AppSearch')
AppSearch.add_field(1,'Property',11592)
AppSearch.add_field(2,'Signature_',11592)
Property = Table('Property')
Property.add_field(1,'Property',11592)
Property.add_field(2,'Value',3840)
BBControl = Table('BBControl')
BBControl.add_field(1,'Billboard_',11570)
BBControl.add_field(2,'BBControl',11570)
BBControl.add_field(3,'Type',3378)
BBControl.add_field(4,'X',1282)
BBControl.add_field(5,'Y',1282)
BBControl.add_field(6,'Width',1282)
BBControl.add_field(7,'Height',1282)
BBControl.add_field(8,'Attributes',4356)
BBControl.add_field(9,'Text',7986)
Billboard = Table('Billboard')
Billboard.add_field(1,'Billboard',11570)
Billboard.add_field(2,'Feature_',3366)
Billboard.add_field(3,'Action',7474)
Billboard.add_field(4,'Ordering',5378)
Feature = Table('Feature')
Feature.add_field(1,'Feature',11558)
Feature.add_field(2,'Feature_Parent',7462)
Feature.add_field(3,'Title',8000)
Feature.add_field(4,'Description',8191)
Feature.add_field(5,'Display',5378)
Feature.add_field(6,'Level',1282)
Feature.add_field(7,'Directory_',7496)
Feature.add_field(8,'Attributes',1282)
Binary = Table('Binary')
Binary.add_field(1,'Name',11592)
Binary.add_field(2,'Data',2304)
BindImage = Table('BindImage')
BindImage.add_field(1,'File_',11592)
BindImage.add_field(2,'Path',7679)
File = Table('File')
File.add_field(1,'File',11592)
File.add_field(2,'Component_',3400)
File.add_field(3,'FileName',4095)
File.add_field(4,'FileSize',260)
File.add_field(5,'Version',7496)
File.add_field(6,'Language',7444)
File.add_field(7,'Attributes',5378)
File.add_field(8,'Sequence',1282)
CCPSearch = Table('CCPSearch')
CCPSearch.add_field(1,'Signature_',11592)
CheckBox = Table('CheckBox')
CheckBox.add_field(1,'Property',11592)
CheckBox.add_field(2,'Value',7488)
Class = Table('Class')
Class.add_field(1,'CLSID',11558)
Class.add_field(2,'Context',11552)
Class.add_field(3,'Component_',11592)
Class.add_field(4,'ProgId_Default',7679)
Class.add_field(5,'Description',8191)
Class.add_field(6,'AppId_',7462)
Class.add_field(7,'FileTypeMask',7679)
Class.add_field(8,'Icon_',7496)
Class.add_field(9,'IconIndex',5378)
Class.add_field(10,'DefInprocHandler',7456)
Class.add_field(11,'Argument',7679)
Class.add_field(12,'Feature_',3366)
Class.add_field(13,'Attributes',5378)
Component = Table('Component')
Component.add_field(1,'Component',11592)
Component.add_field(2,'ComponentId',7462)
Component.add_field(3,'Directory_',3400)
Component.add_field(4,'Attributes',1282)
Component.add_field(5,'Condition',7679)
Component.add_field(6,'KeyPath',7496)
Icon = Table('Icon')
Icon.add_field(1,'Name',11592)
Icon.add_field(2,'Data',2304)
ProgId = Table('ProgId')
ProgId.add_field(1,'ProgId',11775)
ProgId.add_field(2,'ProgId_Parent',7679)
ProgId.add_field(3,'Class_',7462)
ProgId.add_field(4,'Description',8191)
ProgId.add_field(5,'Icon_',7496)
ProgId.add_field(6,'IconIndex',5378)
ComboBox = Table('ComboBox')
ComboBox.add_field(1,'Property',11592)
ComboBox.add_field(2,'Order',9474)
ComboBox.add_field(3,'Value',3392)
ComboBox.add_field(4,'Text',8000)
CompLocator = Table('CompLocator')
CompLocator.add_field(1,'Signature_',11592)
CompLocator.add_field(2,'ComponentId',3366)
CompLocator.add_field(3,'Type',5378)
Complus = Table('Complus')
Complus.add_field(1,'Component_',11592)
Complus.add_field(2,'ExpType',13570)
Directory = Table('Directory')
Directory.add_field(1,'Directory',11592)
Directory.add_field(2,'Directory_Parent',7496)
Directory.add_field(3,'DefaultDir',4095)
Control = Table('Control')
Control.add_field(1,'Dialog_',11592)
Control.add_field(2,'Control',11570)
Control.add_field(3,'Type',3348)
Control.add_field(4,'X',1282)
Control.add_field(5,'Y',1282)
Control.add_field(6,'Width',1282)
Control.add_field(7,'Height',1282)
Control.add_field(8,'Attributes',4356)
Control.add_field(9,'Property',7474)
Control.add_field(10,'Text',7936)
Control.add_field(11,'Control_Next',7474)
Control.add_field(12,'Help',7986)
Dialog = Table('Dialog')
Dialog.add_field(1,'Dialog',11592)
Dialog.add_field(2,'HCentering',1282)
Dialog.add_field(3,'VCentering',1282)
Dialog.add_field(4,'Width',1282)
Dialog.add_field(5,'Height',1282)
Dialog.add_field(6,'Attributes',4356)
Dialog.add_field(7,'Title',8064)
Dialog.add_field(8,'Control_First',3378)
Dialog.add_field(9,'Control_Default',7474)
Dialog.add_field(10,'Control_Cancel',7474)
ControlCondition = Table('ControlCondition')
ControlCondition.add_field(1,'Dialog_',11592)
ControlCondition.add_field(2,'Control_',11570)
ControlCondition.add_field(3,'Action',11570)
ControlCondition.add_field(4,'Condition',11775)
ControlEvent = Table('ControlEvent')
ControlEvent.add_field(1,'Dialog_',11592)
ControlEvent.add_field(2,'Control_',11570)
ControlEvent.add_field(3,'Event',11570)
ControlEvent.add_field(4,'Argument',11775)
ControlEvent.add_field(5,'Condition',15871)
ControlEvent.add_field(6,'Ordering',5378)
CreateFolder = Table('CreateFolder')
CreateFolder.add_field(1,'Directory_',11592)
CreateFolder.add_field(2,'Component_',11592)
CustomAction = Table('CustomAction')
CustomAction.add_field(1,'Action',11592)
CustomAction.add_field(2,'Type',1282)
CustomAction.add_field(3,'Source',7496)
CustomAction.add_field(4,'Target',7679)
DrLocator = Table('DrLocator')
DrLocator.add_field(1,'Signature_',11592)
DrLocator.add_field(2,'Parent',15688)
DrLocator.add_field(3,'Path',15871)
DrLocator.add_field(4,'Depth',5378)
DuplicateFile = Table('DuplicateFile')
DuplicateFile.add_field(1,'FileKey',11592)
DuplicateFile.add_field(2,'Component_',3400)
DuplicateFile.add_field(3,'File_',3400)
DuplicateFile.add_field(4,'DestName',8191)
DuplicateFile.add_field(5,'DestFolder',7496)
Environment = Table('Environment')
Environment.add_field(1,'Environment',11592)
Environment.add_field(2,'Name',4095)
Environment.add_field(3,'Value',8191)
Environment.add_field(4,'Component_',3400)
Error = Table('Error')
Error.add_field(1,'Error',9474)
Error.add_field(2,'Message',7936)
EventMapping = Table('EventMapping')
EventMapping.add_field(1,'Dialog_',11592)
EventMapping.add_field(2,'Control_',11570)
EventMapping.add_field(3,'Event',11570)
EventMapping.add_field(4,'Attribute',3378)
Extension = Table('Extension')
Extension.add_field(1,'Extension',11775)
Extension.add_field(2,'Component_',11592)
Extension.add_field(3,'ProgId_',7679)
Extension.add_field(4,'MIME_',7488)
Extension.add_field(5,'Feature_',3366)
MIME = Table('MIME')
MIME.add_field(1,'ContentType',11584)
MIME.add_field(2,'Extension_',3583)
MIME.add_field(3,'CLSID',7462)
FeatureComponents = Table('FeatureComponents')
FeatureComponents.add_field(1,'Feature_',11558)
FeatureComponents.add_field(2,'Component_',11592)
FileSFPCatalog = Table('FileSFPCatalog')
FileSFPCatalog.add_field(1,'File_',11592)
FileSFPCatalog.add_field(2,'SFPCatalog_',11775)
SFPCatalog = Table('SFPCatalog')
SFPCatalog.add_field(1,'SFPCatalog',11775)
SFPCatalog.add_field(2,'Catalog',2304)
SFPCatalog.add_field(3,'Dependency',7424)
Font = Table('Font')
Font.add_field(1,'File_',11592)
Font.add_field(2,'FontTitle',7552)
IniFile = Table('IniFile')
IniFile.add_field(1,'IniFile',11592)
IniFile.add_field(2,'FileName',4095)
IniFile.add_field(3,'DirProperty',7496)
IniFile.add_field(4,'Section',3936)
IniFile.add_field(5,'Key',3968)
IniFile.add_field(6,'Value',4095)
IniFile.add_field(7,'Action',1282)
IniFile.add_field(8,'Component_',3400)
IniLocator = Table('IniLocator')
IniLocator.add_field(1,'Signature_',11592)
IniLocator.add_field(2,'FileName',3583)
IniLocator.add_field(3,'Section',3424)
IniLocator.add_field(4,'Key',3456)
IniLocator.add_field(5,'Field',5378)
IniLocator.add_field(6,'Type',5378)
InstallExecuteSequence = Table('InstallExecuteSequence')
InstallExecuteSequence.add_field(1,'Action',11592)
InstallExecuteSequence.add_field(2,'Condition',7679)
InstallExecuteSequence.add_field(3,'Sequence',5378)
InstallUISequence = Table('InstallUISequence')
InstallUISequence.add_field(1,'Action',11592)
InstallUISequence.add_field(2,'Condition',7679)
InstallUISequence.add_field(3,'Sequence',5378)
IsolatedComponent = Table('IsolatedComponent')
IsolatedComponent.add_field(1,'Component_Shared',11592)
IsolatedComponent.add_field(2,'Component_Application',11592)
LaunchCondition = Table('LaunchCondition')
LaunchCondition.add_field(1,'Condition',11775)
LaunchCondition.add_field(2,'Description',4095)
ListBox = Table('ListBox')
ListBox.add_field(1,'Property',11592)
ListBox.add_field(2,'Order',9474)
ListBox.add_field(3,'Value',3392)
ListBox.add_field(4,'Text',8000)
ListView = Table('ListView')
ListView.add_field(1,'Property',11592)
ListView.add_field(2,'Order',9474)
ListView.add_field(3,'Value',3392)
ListView.add_field(4,'Text',8000)
ListView.add_field(5,'Binary_',7496)
LockPermissions = Table('LockPermissions')
LockPermissions.add_field(1,'LockObject',11592)
LockPermissions.add_field(2,'Table',11552)
LockPermissions.add_field(3,'Domain',15871)
LockPermissions.add_field(4,'User',11775)
LockPermissions.add_field(5,'Permission',4356)
Media = Table('Media')
Media.add_field(1,'DiskId',9474)
Media.add_field(2,'LastSequence',1282)
Media.add_field(3,'DiskPrompt',8000)
Media.add_field(4,'Cabinet',7679)
Media.add_field(5,'VolumeLabel',7456)
Media.add_field(6,'Source',7496)
MoveFile = Table('MoveFile')
MoveFile.add_field(1,'FileKey',11592)
MoveFile.add_field(2,'Component_',3400)
MoveFile.add_field(3,'SourceName',8191)
MoveFile.add_field(4,'DestName',8191)
MoveFile.add_field(5,'SourceFolder',7496)
MoveFile.add_field(6,'DestFolder',3400)
MoveFile.add_field(7,'Options',1282)
MsiAssembly = Table('MsiAssembly')
MsiAssembly.add_field(1,'Component_',11592)
MsiAssembly.add_field(2,'Feature_',3366)
MsiAssembly.add_field(3,'File_Manifest',7496)
MsiAssembly.add_field(4,'File_Application',7496)
MsiAssembly.add_field(5,'Attributes',5378)
MsiAssemblyName = Table('MsiAssemblyName')
MsiAssemblyName.add_field(1,'Component_',11592)
MsiAssemblyName.add_field(2,'Name',11775)
MsiAssemblyName.add_field(3,'Value',3583)
MsiDigitalCertificate = Table('MsiDigitalCertificate')
MsiDigitalCertificate.add_field(1,'DigitalCertificate',11592)
MsiDigitalCertificate.add_field(2,'CertData',2304)
MsiDigitalSignature = Table('MsiDigitalSignature')
MsiDigitalSignature.add_field(1,'Table',11552)
MsiDigitalSignature.add_field(2,'SignObject',11592)
MsiDigitalSignature.add_field(3,'DigitalCertificate_',3400)
MsiDigitalSignature.add_field(4,'Hash',6400)
MsiFileHash = Table('MsiFileHash')
MsiFileHash.add_field(1,'File_',11592)
MsiFileHash.add_field(2,'Options',1282)
MsiFileHash.add_field(3,'HashPart1',260)
MsiFileHash.add_field(4,'HashPart2',260)
MsiFileHash.add_field(5,'HashPart3',260)
MsiFileHash.add_field(6,'HashPart4',260)
MsiPatchHeaders = Table('MsiPatchHeaders')
MsiPatchHeaders.add_field(1,'StreamRef',11558)
MsiPatchHeaders.add_field(2,'Header',2304)
ODBCAttribute = Table('ODBCAttribute')
ODBCAttribute.add_field(1,'Driver_',11592)
ODBCAttribute.add_field(2,'Attribute',11560)
ODBCAttribute.add_field(3,'Value',8191)
ODBCDriver = Table('ODBCDriver')
ODBCDriver.add_field(1,'Driver',11592)
ODBCDriver.add_field(2,'Component_',3400)
ODBCDriver.add_field(3,'Description',3583)
ODBCDriver.add_field(4,'File_',3400)
ODBCDriver.add_field(5,'File_Setup',7496)
ODBCDataSource = Table('ODBCDataSource')
ODBCDataSource.add_field(1,'DataSource',11592)
ODBCDataSource.add_field(2,'Component_',3400)
ODBCDataSource.add_field(3,'Description',3583)
ODBCDataSource.add_field(4,'DriverDescription',3583)
ODBCDataSource.add_field(5,'Registration',1282)
ODBCSourceAttribute = Table('ODBCSourceAttribute')
ODBCSourceAttribute.add_field(1,'DataSource_',11592)
ODBCSourceAttribute.add_field(2,'Attribute',11552)
ODBCSourceAttribute.add_field(3,'Value',8191)
ODBCTranslator = Table('ODBCTranslator')
ODBCTranslator.add_field(1,'Translator',11592)
ODBCTranslator.add_field(2,'Component_',3400)
ODBCTranslator.add_field(3,'Description',3583)
ODBCTranslator.add_field(4,'File_',3400)
ODBCTranslator.add_field(5,'File_Setup',7496)
Patch = Table('Patch')
Patch.add_field(1,'File_',11592)
Patch.add_field(2,'Sequence',9474)
Patch.add_field(3,'PatchSize',260)
Patch.add_field(4,'Attributes',1282)
Patch.add_field(5,'Header',6400)
Patch.add_field(6,'StreamRef_',7462)
PatchPackage = Table('PatchPackage')
PatchPackage.add_field(1,'PatchId',11558)
PatchPackage.add_field(2,'Media_',1282)
PublishComponent = Table('PublishComponent')
PublishComponent.add_field(1,'ComponentId',11558)
PublishComponent.add_field(2,'Qualifier',11775)
PublishComponent.add_field(3,'Component_',11592)
PublishComponent.add_field(4,'AppData',8191)
PublishComponent.add_field(5,'Feature_',3366)
RadioButton = Table('RadioButton')
RadioButton.add_field(1,'Property',11592)
RadioButton.add_field(2,'Order',9474)
RadioButton.add_field(3,'Value',3392)
RadioButton.add_field(4,'X',1282)
RadioButton.add_field(5,'Y',1282)
RadioButton.add_field(6,'Width',1282)
RadioButton.add_field(7,'Height',1282)
RadioButton.add_field(8,'Text',8000)
RadioButton.add_field(9,'Help',7986)
Registry = Table('Registry')
Registry.add_field(1,'Registry',11592)
Registry.add_field(2,'Root',1282)
Registry.add_field(3,'Key',4095)
Registry.add_field(4,'Name',8191)
Registry.add_field(5,'Value',7936)
Registry.add_field(6,'Component_',3400)
RegLocator = Table('RegLocator')
RegLocator.add_field(1,'Signature_',11592)
RegLocator.add_field(2,'Root',1282)
RegLocator.add_field(3,'Key',3583)
RegLocator.add_field(4,'Name',7679)
RegLocator.add_field(5,'Type',5378)
RemoveFile = Table('RemoveFile')
RemoveFile.add_field(1,'FileKey',11592)
RemoveFile.add_field(2,'Component_',3400)
RemoveFile.add_field(3,'FileName',8191)
RemoveFile.add_field(4,'DirProperty',3400)
RemoveFile.add_field(5,'InstallMode',1282)
RemoveIniFile = Table('RemoveIniFile')
RemoveIniFile.add_field(1,'RemoveIniFile',11592)
RemoveIniFile.add_field(2,'FileName',4095)
RemoveIniFile.add_field(3,'DirProperty',7496)
RemoveIniFile.add_field(4,'Section',3936)
RemoveIniFile.add_field(5,'Key',3968)
RemoveIniFile.add_field(6,'Value',8191)
RemoveIniFile.add_field(7,'Action',1282)
RemoveIniFile.add_field(8,'Component_',3400)
RemoveRegistry = Table('RemoveRegistry')
RemoveRegistry.add_field(1,'RemoveRegistry',11592)
RemoveRegistry.add_field(2,'Root',1282)
RemoveRegistry.add_field(3,'Key',4095)
RemoveRegistry.add_field(4,'Name',8191)
RemoveRegistry.add_field(5,'Component_',3400)
ReserveCost = Table('ReserveCost')
ReserveCost.add_field(1,'ReserveKey',11592)
ReserveCost.add_field(2,'Component_',3400)
ReserveCost.add_field(3,'ReserveFolder',7496)
ReserveCost.add_field(4,'ReserveLocal',260)
ReserveCost.add_field(5,'ReserveSource',260)
SelfReg = Table('SelfReg')
SelfReg.add_field(1,'File_',11592)
SelfReg.add_field(2,'Cost',5378)
ServiceControl = Table('ServiceControl')
ServiceControl.add_field(1,'ServiceControl',11592)
ServiceControl.add_field(2,'Name',4095)
ServiceControl.add_field(3,'Event',1282)
ServiceControl.add_field(4,'Arguments',8191)
ServiceControl.add_field(5,'Wait',5378)
ServiceControl.add_field(6,'Component_',3400)
ServiceInstall = Table('ServiceInstall')
ServiceInstall.add_field(1,'ServiceInstall',11592)
ServiceInstall.add_field(2,'Name',3583)
ServiceInstall.add_field(3,'DisplayName',8191)
ServiceInstall.add_field(4,'ServiceType',260)
ServiceInstall.add_field(5,'StartType',260)
ServiceInstall.add_field(6,'ErrorControl',260)
ServiceInstall.add_field(7,'LoadOrderGroup',7679)
ServiceInstall.add_field(8,'Dependencies',7679)
ServiceInstall.add_field(9,'StartName',7679)
ServiceInstall.add_field(10,'Password',7679)
ServiceInstall.add_field(11,'Arguments',7679)
ServiceInstall.add_field(12,'Component_',3400)
ServiceInstall.add_field(13,'Description',8191)
Shortcut = Table('Shortcut')
Shortcut.add_field(1,'Shortcut',11592)
Shortcut.add_field(2,'Directory_',3400)
Shortcut.add_field(3,'Name',3968)
Shortcut.add_field(4,'Component_',3400)
Shortcut.add_field(5,'Target',3400)
Shortcut.add_field(6,'Arguments',7679)
Shortcut.add_field(7,'Description',8191)
Shortcut.add_field(8,'Hotkey',5378)
Shortcut.add_field(9,'Icon_',7496)
Shortcut.add_field(10,'IconIndex',5378)
Shortcut.add_field(11,'ShowCmd',5378)
Shortcut.add_field(12,'WkDir',7496)
Signature = Table('Signature')
Signature.add_field(1,'Signature',11592)
Signature.add_field(2,'FileName',3583)
Signature.add_field(3,'MinVersion',7444)
Signature.add_field(4,'MaxVersion',7444)
Signature.add_field(5,'MinSize',4356)
Signature.add_field(6,'MaxSize',4356)
Signature.add_field(7,'MinDate',4356)
Signature.add_field(8,'MaxDate',4356)
Signature.add_field(9,'Languages',7679)
TextStyle = Table('TextStyle')
TextStyle.add_field(1,'TextStyle',11592)
TextStyle.add_field(2,'FaceName',3360)
TextStyle.add_field(3,'Size',1282)
TextStyle.add_field(4,'Color',4356)
TextStyle.add_field(5,'StyleBits',5378)
TypeLib = Table('TypeLib')
TypeLib.add_field(1,'LibID',11558)
TypeLib.add_field(2,'Language',9474)
TypeLib.add_field(3,'Component_',11592)
TypeLib.add_field(4,'Version',4356)
TypeLib.add_field(5,'Description',8064)
TypeLib.add_field(6,'Directory_',7496)
TypeLib.add_field(7,'Feature_',3366)
TypeLib.add_field(8,'Cost',4356)
UIText = Table('UIText')
UIText.add_field(1,'Key',11592)
UIText.add_field(2,'Text',8191)
Upgrade = Table('Upgrade')
Upgrade.add_field(1,'UpgradeCode',11558)
Upgrade.add_field(2,'VersionMin',15636)
Upgrade.add_field(3,'VersionMax',15636)
Upgrade.add_field(4,'Language',15871)
Upgrade.add_field(5,'Attributes',8452)
Upgrade.add_field(6,'Remove',7679)
Upgrade.add_field(7,'ActionProperty',3400)
Verb = Table('Verb')
Verb.add_field(1,'Extension_',11775)
Verb.add_field(2,'Verb',11552)
Verb.add_field(3,'Sequence',5378)
Verb.add_field(4,'Command',8191)
Verb.add_field(5,'Argument',8191)
tables=[_Validation, ActionText, AdminExecuteSequence, Condition, AdminUISequence, AdvtExecuteSequence, AdvtUISequence, AppId, AppSearch, Property, BBControl, Billboard, Feature, Binary, BindImage, File, CCPSearch, CheckBox, Class, Component, Icon, ProgId, ComboBox, CompLocator, Complus, Directory, Control, Dialog, ControlCondition, ControlEvent, CreateFolder, CustomAction, DrLocator, DuplicateFile, Environment, Error, EventMapping, Extension, MIME, FeatureComponents, FileSFPCatalog, SFPCatalog, Font, IniFile, IniLocator, InstallExecuteSequence, InstallUISequence, IsolatedComponent, LaunchCondition, ListBox, ListView, LockPermissions, Media, MoveFile, MsiAssembly, MsiAssemblyName, MsiDigitalCertificate, MsiDigitalSignature, MsiFileHash, MsiPatchHeaders, ODBCAttribute, ODBCDriver, ODBCDataSource, ODBCSourceAttribute, ODBCTranslator, Patch, PatchPackage, PublishComponent, RadioButton, Registry, RegLocator, RemoveFile, RemoveIniFile, RemoveRegistry, ReserveCost, SelfReg, ServiceControl, ServiceInstall, Shortcut, Signature, TextStyle, TypeLib, UIText, Upgrade, Verb]
_Validation_records = [
(u'_Validation',u'Table',u'N',None, None, None, None, u'Identifier',None, u'Name of table',),
(u'_Validation',u'Column',u'N',None, None, None, None, u'Identifier',None, u'Name of column',),
(u'_Validation',u'Description',u'Y',None, None, None, None, u'Text',None, u'Description of column',),
(u'_Validation',u'Set',u'Y',None, None, None, None, u'Text',None, u'Set of values that are permitted',),
(u'_Validation',u'Category',u'Y',None, None, None, None, None, u'Text;Formatted;Template;Condition;Guid;Path;Version;Language;Identifier;Binary;UpperCase;LowerCase;Filename;Paths;AnyPath;WildCardFilename;RegPath;KeyFormatted;CustomSource;Property;Cabinet;Shortcut;URL',u'String category',),
(u'_Validation',u'KeyColumn',u'Y',1,32,None, None, None, None, u'Column to which foreign key connects',),
(u'_Validation',u'KeyTable',u'Y',None, None, None, None, u'Identifier',None, u'For foreign key, Name of table to which data must link',),
(u'_Validation',u'MaxValue',u'Y',-2147483647,2147483647,None, None, None, None, u'Maximum value allowed',),
(u'_Validation',u'MinValue',u'Y',-2147483647,2147483647,None, None, None, None, u'Minimum value allowed',),
(u'_Validation',u'Nullable',u'N',None, None, None, None, None, u'Y;N;@',u'Whether the column is nullable',),
(u'ActionText',u'Description',u'Y',None, None, None, None, u'Text',None, u'Localized description displayed in progress dialog and log when action is executing.',),
(u'ActionText',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to be described.',),
(u'ActionText',u'Template',u'Y',None, None, None, None, u'Template',None, u'Optional localized format template used to format action data records for display during action execution.',),
(u'AdminExecuteSequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdminExecuteSequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdminExecuteSequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'Condition',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Expression evaluated to determine if Level in the Feature table is to change.',),
(u'Condition',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Reference to a Feature entry in Feature table.',),
(u'Condition',u'Level',u'N',0,32767,None, None, None, None, u'New selection Level to set in Feature table if Condition evaluates to TRUE.',),
(u'AdminUISequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdminUISequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdminUISequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'AdvtExecuteSequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdvtExecuteSequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdvtExecuteSequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'AdvtUISequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'AdvtUISequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'AdvtUISequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'AppId',u'AppId',u'N',None, None, None, None, u'Guid',None, None, ),
(u'AppId',u'ActivateAtStorage',u'Y',0,1,None, None, None, None, None, ),
(u'AppId',u'DllSurrogate',u'Y',None, None, None, None, u'Text',None, None, ),
(u'AppId',u'LocalService',u'Y',None, None, None, None, u'Text',None, None, ),
(u'AppId',u'RemoteServerName',u'Y',None, None, None, None, u'Formatted',None, None, ),
(u'AppId',u'RunAsInteractiveUser',u'Y',0,1,None, None, None, None, None, ),
(u'AppId',u'ServiceParameters',u'Y',None, None, None, None, u'Text',None, None, ),
(u'AppSearch',u'Property',u'N',None, None, None, None, u'Identifier',None, u'The property associated with a Signature',),
(u'AppSearch',u'Signature_',u'N',None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,u'Identifier',None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
(u'Property',u'Property',u'N',None, None, None, None, u'Identifier',None, u'Name of property, uppercase if settable by launcher or loader.',),
(u'Property',u'Value',u'N',None, None, None, None, u'Text',None, u'String value for property. Never null or empty.',),
(u'BBControl',u'Type',u'N',None, None, None, None, u'Identifier',None, u'The type of the control.',),
(u'BBControl',u'Y',u'N',0,32767,None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'BBControl',u'Text',u'Y',None, None, None, None, u'Text',None, u'A string used to set the initial text contained within a control (if appropriate).',),
(u'BBControl',u'BBControl',u'N',None, None, None, None, u'Identifier',None, u'Name of the control. This name must be unique within a billboard, but can repeat on different billboard.',),
(u'BBControl',u'Attributes',u'Y',0,2147483647,None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.',),
(u'BBControl',u'Billboard_',u'N',None, None, u'Billboard',1,u'Identifier',None, u'External key to the Billboard table, name of the billboard.',),
(u'BBControl',u'Height',u'N',0,32767,None, None, None, None, u'Height of the bounding rectangle of the control.',),
(u'BBControl',u'Width',u'N',0,32767,None, None, None, None, u'Width of the bounding rectangle of the control.',),
(u'BBControl',u'X',u'N',0,32767,None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'Billboard',u'Action',u'Y',None, None, None, None, u'Identifier',None, u'The name of an action. The billboard is displayed during the progress messages received from this action.',),
(u'Billboard',u'Billboard',u'N',None, None, None, None, u'Identifier',None, u'Name of the billboard.',),
(u'Billboard',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'An external key to the Feature Table. The billboard is shown only if this feature is being installed.',),
(u'Billboard',u'Ordering',u'Y',0,32767,None, None, None, None, u'A positive integer. If there is more than one billboard corresponding to an action they will be shown in the order defined by this column.',),
(u'Feature',u'Description',u'Y',None, None, None, None, u'Text',None, u'Longer descriptive text describing a visible feature item.',),
(u'Feature',u'Attributes',u'N',None, None, None, None, None, u'0;1;2;4;5;6;8;9;10;16;17;18;20;21;22;24;25;26;32;33;34;36;37;38;48;49;50;52;53;54',u'Feature attributes',),
(u'Feature',u'Feature',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular feature record.',),
(u'Feature',u'Directory_',u'Y',None, None, u'Directory',1,u'UpperCase',None, u'The name of the Directory that can be configured by the UI. A non-null value will enable the browse button.',),
(u'Feature',u'Level',u'N',0,32767,None, None, None, None, u'The install level at which record will be initially selected. An install level of 0 will disable an item and prevent its display.',),
(u'Feature',u'Title',u'Y',None, None, None, None, u'Text',None, u'Short text identifying a visible feature item.',),
(u'Feature',u'Display',u'Y',0,32767,None, None, None, None, u'Numeric sort order, used to force a specific display ordering.',),
(u'Feature',u'Feature_Parent',u'Y',None, None, u'Feature',1,u'Identifier',None, u'Optional key of a parent record in the same table. If the parent is not selected, then the record will not be installed. Null indicates a root item.',),
(u'Binary',u'Name',u'N',None, None, None, None, u'Identifier',None, u'Unique key identifying the binary data.',),
(u'Binary',u'Data',u'N',None, None, None, None, u'Binary',None, u'The unformatted binary data.',),
(u'BindImage',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'The index into the File table. This must be an executable file.',),
(u'BindImage',u'Path',u'Y',None, None, None, None, u'Paths',None, u'A list of ; delimited paths that represent the paths to be searched for the import DLLS. The list is usually a list of properties each enclosed within square brackets [] .',),
(u'File',u'Sequence',u'N',1,32767,None, None, None, None, u'Sequence with respect to the media images; order must track cabinet order.',),
(u'File',u'Attributes',u'Y',0,32767,None, None, None, None, u'Integer containing bit flags representing file attributes (with the decimal value of each bit position in parentheses)',),
(u'File',u'File',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token, must match identifier in cabinet. For uncompressed files, this field is ignored.',),
(u'File',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the file.',),
(u'File',u'FileName',u'N',None, None, None, None, u'Filename',None, u'File name used for installation, may be localized. This may contain a "short name|long name" pair.',),
(u'File',u'FileSize',u'N',0,2147483647,None, None, None, None, u'Size of file in bytes (long integer).',),
(u'File',u'Language',u'Y',None, None, None, None, u'Language',None, u'List of decimal language Ids, comma-separated if more than one.',),
(u'File',u'Version',u'Y',None, None, u'File',1,u'Version',None, u'Version string for versioned files; Blank for unversioned files.',),
(u'CCPSearch',u'Signature_',u'N',None, None, u'Signature;RegLocator;IniLocator;DrLocator;CompLocator',1,u'Identifier',None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature, RegLocator, IniLocator, CompLocator and the DrLocator tables.',),
(u'CheckBox',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to the item.',),
(u'CheckBox',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The value string associated with the item.',),
(u'Class',u'Description',u'Y',None, None, None, None, u'Text',None, u'Localized description for the Class.',),
(u'Class',u'Attributes',u'Y',None, 32767,None, None, None, None, u'Class registration attributes.',),
(u'Class',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
(u'Class',u'AppId_',u'Y',None, None, u'AppId',1,u'Guid',None, u'Optional AppID containing DCOM information for associated application (string GUID).',),
(u'Class',u'Argument',u'Y',None, None, None, None, u'Formatted',None, u'optional argument for LocalServers.',),
(u'Class',u'CLSID',u'N',None, None, None, None, u'Guid',None, u'The CLSID of an OLE factory.',),
(u'Class',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
(u'Class',u'Context',u'N',None, None, None, None, u'Identifier',None, u'The numeric server context for this server. CLSCTX_xxxx',),
(u'Class',u'DefInprocHandler',u'Y',None, None, None, None, u'Filename',u'1;2;3',u'Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"',),
(u'Class',u'FileTypeMask',u'Y',None, None, None, None, u'Text',None, u'Optional string containing information for the HKCRthis CLSID) key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
(u'Class',u'Icon_',u'Y',None, None, u'Icon',1,u'Identifier',None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.',),
(u'Class',u'IconIndex',u'Y',-32767,32767,None, None, None, None, u'Optional icon index.',),
(u'Class',u'ProgId_Default',u'Y',None, None, u'ProgId',1,u'Text',None, u'Optional ProgId associated with this CLSID.',),
(u'Component',u'Condition',u'Y',None, None, None, None, u'Condition',None, u"A conditional statement that will disable this component if the specified condition evaluates to the 'True' state. If a component is disabled, it will not be installed, regardless of the 'Action' state associated with the component.",),
(u'Component',u'Attributes',u'N',None, None, None, None, None, None, u'Remote execution option, one of irsEnum',),
(u'Component',u'Component',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular component record.',),
(u'Component',u'ComponentId',u'Y',None, None, None, None, u'Guid',None, u'A string GUID unique to this component, version, and language.',),
(u'Component',u'Directory_',u'N',None, None, u'Directory',1,u'Identifier',None, u'Required key of a Directory table record. This is actually a property name whose value contains the actual path, set either by the AppSearch action or with the default setting obtained from the Directory table.',),
(u'Component',u'KeyPath',u'Y',None, None, u'File;Registry;ODBCDataSource',1,u'Identifier',None, u'Either the primary key into the File table, Registry table, or ODBCDataSource table. This extract path is stored when the component is installed, and is used to detect the presence of the component and to return the path to it.',),
(u'Icon',u'Name',u'N',None, None, None, None, u'Identifier',None, u'Primary key. Name of the icon file.',),
(u'Icon',u'Data',u'N',None, None, None, None, u'Binary',None, u'Binary stream. The binary icon data in PE (.DLL or .EXE) or icon (.ICO) format.',),
(u'ProgId',u'Description',u'Y',None, None, None, None, u'Text',None, u'Localized description for the Program identifier.',),
(u'ProgId',u'Icon_',u'Y',None, None, u'Icon',1,u'Identifier',None, u'Optional foreign key into the Icon Table, specifying the icon file associated with this ProgId. Will be written under the DefaultIcon key.',),
(u'ProgId',u'IconIndex',u'Y',-32767,32767,None, None, None, None, u'Optional icon index.',),
(u'ProgId',u'ProgId',u'N',None, None, None, None, u'Text',None, u'The Program Identifier. Primary key.',),
(u'ProgId',u'Class_',u'Y',None, None, u'Class',1,u'Guid',None, u'The CLSID of an OLE factory corresponding to the ProgId.',),
(u'ProgId',u'ProgId_Parent',u'Y',None, None, u'ProgId',1,u'Text',None, u'The Parent Program Identifier. If specified, the ProgId column becomes a version independent prog id.',),
(u'ComboBox',u'Text',u'Y',None, None, None, None, u'Formatted',None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
(u'ComboBox',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this item. All the items tied to the same property become part of the same combobox.',),
(u'ComboBox',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value string associated with this item. Selecting the line will set the associated property to this value.',),
(u'ComboBox',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list.\tThe integers do not have to be consecutive.',),
(u'CompLocator',u'Type',u'Y',0,1,None, None, None, None, u'A boolean value that determines if the registry value is a filename or a directory location.',),
(u'CompLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
(u'CompLocator',u'ComponentId',u'N',None, None, None, None, u'Guid',None, u'A string GUID unique to this component, version, and language.',),
(u'Complus',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the ComPlus component.',),
(u'Complus',u'ExpType',u'Y',0,32767,None, None, None, None, u'ComPlus component attributes.',),
(u'Directory',u'Directory',u'N',None, None, None, None, u'Identifier',None, u'Unique identifier for directory entry, primary key. If a property by this name is defined, it contains the full path to the directory.',),
(u'Directory',u'DefaultDir',u'N',None, None, None, None, u'DefaultDir',None, u"The default sub-path under parent's path.",),
(u'Directory',u'Directory_Parent',u'Y',None, None, u'Directory',1,u'Identifier',None, u'Reference to the entry in this table specifying the default parent directory. A record parented to itself or with a Null parent represents a root of the install tree.',),
(u'Control',u'Type',u'N',None, None, None, None, u'Identifier',None, u'The type of the control.',),
(u'Control',u'Y',u'N',0,32767,None, None, None, None, u'Vertical coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'Control',u'Text',u'Y',None, None, None, None, u'Formatted',None, u'A string used to set the initial text contained within a control (if appropriate).',),
(u'Control',u'Property',u'Y',None, None, None, None, u'Identifier',None, u'The name of a defined property to be linked to this control. ',),
(u'Control',u'Attributes',u'Y',0,2147483647,None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this control.',),
(u'Control',u'Height',u'N',0,32767,None, None, None, None, u'Height of the bounding rectangle of the control.',),
(u'Control',u'Width',u'N',0,32767,None, None, None, None, u'Width of the bounding rectangle of the control.',),
(u'Control',u'X',u'N',0,32767,None, None, None, None, u'Horizontal coordinate of the upper left corner of the bounding rectangle of the control.',),
(u'Control',u'Control',u'N',None, None, None, None, u'Identifier',None, u'Name of the control. This name must be unique within a dialog, but can repeat on different dialogs. ',),
(u'Control',u'Control_Next',u'Y',None, None, u'Control',2,u'Identifier',None, u'The name of an other control on the same dialog. This link defines the tab order of the controls. The links have to form one or more cycles!',),
(u'Control',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'External key to the Dialog table, name of the dialog.',),
(u'Control',u'Help',u'Y',None, None, None, None, u'Text',None, u'The help strings used with the button. The text is optional. ',),
(u'Dialog',u'Attributes',u'Y',0,2147483647,None, None, None, None, u'A 32-bit word that specifies the attribute flags to be applied to this dialog.',),
(u'Dialog',u'Height',u'N',0,32767,None, None, None, None, u'Height of the bounding rectangle of the dialog.',),
(u'Dialog',u'Width',u'N',0,32767,None, None, None, None, u'Width of the bounding rectangle of the dialog.',),
(u'Dialog',u'Dialog',u'N',None, None, None, None, u'Identifier',None, u'Name of the dialog.',),
(u'Dialog',u'Control_Cancel',u'Y',None, None, u'Control',2,u'Identifier',None, u'Defines the cancel control. Hitting escape or clicking on the close icon on the dialog is equivalent to pushing this button.',),
(u'Dialog',u'Control_Default',u'Y',None, None, u'Control',2,u'Identifier',None, u'Defines the default control. Hitting return is equivalent to pushing this button.',),
(u'Dialog',u'Control_First',u'N',None, None, u'Control',2,u'Identifier',None, u'Defines the control that has the focus when the dialog is created.',),
(u'Dialog',u'HCentering',u'N',0,100,None, None, None, None, u'Horizontal position of the dialog on a 0-100 scale. 0 means left end, 100 means right end of the screen, 50 center.',),
(u'Dialog',u'Title',u'Y',None, None, None, None, u'Formatted',None, u"A text string specifying the title to be displayed in the title bar of the dialog's window.",),
(u'Dialog',u'VCentering',u'N',0,100,None, None, None, None, u'Vertical position of the dialog on a 0-100 scale. 0 means top end, 100 means bottom end of the screen, 50 center.',),
(u'ControlCondition',u'Action',u'N',None, None, None, None, None, u'Default;Disable;Enable;Hide;Show',u'The desired action to be taken on the specified control.',),
(u'ControlCondition',u'Condition',u'N',None, None, None, None, u'Condition',None, u'A standard conditional statement that specifies under which conditions the action should be triggered.',),
(u'ControlCondition',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'A foreign key to the Dialog table, name of the dialog.',),
(u'ControlCondition',u'Control_',u'N',None, None, u'Control',2,u'Identifier',None, u'A foreign key to the Control table, name of the control.',),
(u'ControlEvent',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'A standard conditional statement that specifies under which conditions an event should be triggered.',),
(u'ControlEvent',u'Ordering',u'Y',0,2147483647,None, None, None, None, u'An integer used to order several events tied to the same control. Can be left blank.',),
(u'ControlEvent',u'Argument',u'N',None, None, None, None, u'Formatted',None, u'A value to be used as a modifier when triggering a particular event.',),
(u'ControlEvent',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'A foreign key to the Dialog table, name of the dialog.',),
(u'ControlEvent',u'Control_',u'N',None, None, u'Control',2,u'Identifier',None, u'A foreign key to the Control table, name of the control',),
(u'ControlEvent',u'Event',u'N',None, None, None, None, u'Formatted',None, u'An identifier that specifies the type of the event that should take place when the user interacts with control specified by the first two entries.',),
(u'CreateFolder',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table.',),
(u'CreateFolder',u'Directory_',u'N',None, None, u'Directory',1,u'Identifier',None, u'Primary key, could be foreign key into the Directory table.',),
(u'CustomAction',u'Type',u'N',1,16383,None, None, None, None, u'The numeric custom action type, consisting of source location, code type, entry, option flags.',),
(u'CustomAction',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Primary key, name of action, normally appears in sequence table unless private use.',),
(u'CustomAction',u'Source',u'Y',None, None, None, None, u'CustomSource',None, u'The table reference of the source of the code.',),
(u'CustomAction',u'Target',u'Y',None, None, None, None, u'Formatted',None, u'Excecution parameter, depends on the type of custom action',),
(u'DrLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
(u'DrLocator',u'Path',u'Y',None, None, None, None, u'AnyPath',None, u'The path on the user system. This is a either a subpath below the value of the Parent or a full path. The path may contain properties enclosed within [ ] that will be expanded.',),
(u'DrLocator',u'Depth',u'Y',0,32767,None, None, None, None, u'The depth below the path to which the Signature_ is recursively searched. If absent, the depth is assumed to be 0.',),
(u'DrLocator',u'Parent',u'Y',None, None, None, None, u'Identifier',None, u'The parent file signature. It is also a foreign key in the Signature table. If null and the Path column does not expand to a full path, then all the fixed drives of the user system are searched using the Path.',),
(u'DuplicateFile',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Foreign key referencing the source file to be duplicated.',),
(u'DuplicateFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the duplicate file.',),
(u'DuplicateFile',u'DestFolder',u'Y',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full pathname to a destination folder.',),
(u'DuplicateFile',u'DestName',u'Y',None, None, None, None, u'Filename',None, u'Filename to be given to the duplicate file.',),
(u'DuplicateFile',u'FileKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular file entry',),
(u'Environment',u'Name',u'N',None, None, None, None, u'Text',None, u'The name of the environmental value.',),
(u'Environment',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The value to set in the environmental settings.',),
(u'Environment',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the installing of the environmental value.',),
(u'Environment',u'Environment',u'N',None, None, None, None, u'Identifier',None, u'Unique identifier for the environmental variable setting',),
(u'Error',u'Error',u'N',0,32767,None, None, None, None, u'Integer error number, obtained from header file IError(...) macros.',),
(u'Error',u'Message',u'Y',None, None, None, None, u'Template',None, u'Error formatting template, obtained from user ed. or localizers.',),
(u'EventMapping',u'Dialog_',u'N',None, None, u'Dialog',1,u'Identifier',None, u'A foreign key to the Dialog table, name of the Dialog.',),
(u'EventMapping',u'Control_',u'N',None, None, u'Control',2,u'Identifier',None, u'A foreign key to the Control table, name of the control.',),
(u'EventMapping',u'Event',u'N',None, None, None, None, u'Identifier',None, u'An identifier that specifies the type of the event that the control subscribes to.',),
(u'EventMapping',u'Attribute',u'N',None, None, None, None, u'Identifier',None, u'The name of the control attribute, that is set when this event is received.',),
(u'Extension',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the CLSID factory to be operational.',),
(u'Extension',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
(u'Extension',u'Extension',u'N',None, None, None, None, u'Text',None, u'The extension associated with the table row.',),
(u'Extension',u'MIME_',u'Y',None, None, u'MIME',1,u'Text',None, u'Optional Context identifier, typically "type/format" associated with the extension',),
(u'Extension',u'ProgId_',u'Y',None, None, u'ProgId',1,u'Text',None, u'Optional ProgId associated with this extension.',),
(u'MIME',u'CLSID',u'Y',None, None, None, None, u'Guid',None, u'Optional associated CLSID.',),
(u'MIME',u'ContentType',u'N',None, None, None, None, u'Text',None, u'Primary key. Context identifier, typically "type/format".',),
(u'MIME',u'Extension_',u'N',None, None, u'Extension',1,u'Text',None, u'Optional associated extension (without dot)',),
(u'FeatureComponents',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Foreign key into Feature table.',),
(u'FeatureComponents',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into Component table.',),
(u'FileSFPCatalog',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'File associated with the catalog',),
(u'FileSFPCatalog',u'SFPCatalog_',u'N',None, None, u'SFPCatalog',1,u'Filename',None, u'Catalog associated with the file',),
(u'SFPCatalog',u'SFPCatalog',u'N',None, None, None, None, u'Filename',None, u'File name for the catalog.',),
(u'SFPCatalog',u'Catalog',u'N',None, None, None, None, u'Binary',None, u'SFP Catalog',),
(u'SFPCatalog',u'Dependency',u'Y',None, None, None, None, u'Formatted',None, u'Parent catalog - only used by SFP',),
(u'Font',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Primary key, foreign key into File table referencing font file.',),
(u'Font',u'FontTitle',u'Y',None, None, None, None, u'Text',None, u'Font name.',),
(u'IniFile',u'Action',u'N',None, None, None, None, None, u'0;1;3',u'The type of modification to be made, one of iifEnum',),
(u'IniFile',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value to be written.',),
(u'IniFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the installing of the .INI value.',),
(u'IniFile',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The .INI file name in which to write the information',),
(u'IniFile',u'IniFile',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'IniFile',u'DirProperty',u'Y',None, None, None, None, u'Identifier',None, u'Foreign key into the Directory table denoting the directory where the .INI file is.',),
(u'IniFile',u'Key',u'N',None, None, None, None, u'Formatted',None, u'The .INI file key below Section.',),
(u'IniFile',u'Section',u'N',None, None, None, None, u'Formatted',None, u'The .INI file Section.',),
(u'IniLocator',u'Type',u'Y',0,2,None, None, None, None, u'An integer value that determines if the .INI value read is a filename or a directory location or to be used as is w/o interpretation.',),
(u'IniLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table.',),
(u'IniLocator',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The .INI file name.',),
(u'IniLocator',u'Key',u'N',None, None, None, None, u'Text',None, u'Key value (followed by an equals sign in INI file).',),
(u'IniLocator',u'Section',u'N',None, None, None, None, u'Text',None, u'Section name within in file (within square brackets in INI file).',),
(u'IniLocator',u'Field',u'Y',0,32767,None, None, None, None, u'The field in the .INI line. If Field is null or 0 the entire line is read.',),
(u'InstallExecuteSequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'InstallExecuteSequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'InstallExecuteSequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'InstallUISequence',u'Action',u'N',None, None, None, None, u'Identifier',None, u'Name of action to invoke, either in the engine or the handler DLL.',),
(u'InstallUISequence',u'Condition',u'Y',None, None, None, None, u'Condition',None, u'Optional expression which skips the action if evaluates to expFalse.If the expression syntax is invalid, the engine will terminate, returning iesBadActionData.',),
(u'InstallUISequence',u'Sequence',u'Y',-4,32767,None, None, None, None, u'Number that determines the sort order in which the actions are to be executed. Leave blank to suppress action.',),
(u'IsolatedComponent',u'Component_Application',u'N',None, None, u'Component',1,u'Identifier',None, u'Key to Component table item for application',),
(u'IsolatedComponent',u'Component_Shared',u'N',None, None, u'Component',1,u'Identifier',None, u'Key to Component table item to be isolated',),
(u'LaunchCondition',u'Description',u'N',None, None, None, None, u'Formatted',None, u'Localizable text to display when condition fails and install must abort.',),
(u'LaunchCondition',u'Condition',u'N',None, None, None, None, u'Condition',None, u'Expression which must evaluate to TRUE in order for install to commence.',),
(u'ListBox',u'Text',u'Y',None, None, None, None, u'Text',None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
(u'ListBox',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listbox.',),
(u'ListBox',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value string associated with this item. Selecting the line will set the associated property to this value.',),
(u'ListBox',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
(u'ListView',u'Text',u'Y',None, None, None, None, u'Text',None, u'The visible text to be assigned to the item. Optional. If this entry or the entire column is missing, the text is the same as the value.',),
(u'ListView',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this item. All the items tied to the same property become part of the same listview.',),
(u'ListView',u'Value',u'N',None, None, None, None, u'Identifier',None, u'The value string associated with this item. Selecting the line will set the associated property to this value.',),
(u'ListView',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
(u'ListView',u'Binary_',u'Y',None, None, u'Binary',1,u'Identifier',None, u'The name of the icon to be displayed with the icon. The binary information is looked up from the Binary Table.',),
(u'LockPermissions',u'Table',u'N',None, None, None, None, u'Identifier',u'Directory;File;Registry',u'Reference to another table name',),
(u'LockPermissions',u'Domain',u'Y',None, None, None, None, u'Formatted',None, u'Domain name for user whose permissions are being set. (usually a property)',),
(u'LockPermissions',u'LockObject',u'N',None, None, None, None, u'Identifier',None, u'Foreign key into Registry or File table',),
(u'LockPermissions',u'Permission',u'Y',-2147483647,2147483647,None, None, None, None, u'Permission Access mask. Full Control = 268435456 (GENERIC_ALL = 0x10000000)',),
(u'LockPermissions',u'User',u'N',None, None, None, None, u'Formatted',None, u'User for permissions to be set. (usually a property)',),
(u'Media',u'Source',u'Y',None, None, None, None, u'Property',None, u'The property defining the location of the cabinet file.',),
(u'Media',u'Cabinet',u'Y',None, None, None, None, u'Cabinet',None, u'If some or all of the files stored on the media are compressed in a cabinet, the name of that cabinet.',),
(u'Media',u'DiskId',u'N',1,32767,None, None, None, None, u'Primary key, integer to determine sort order for table.',),
(u'Media',u'DiskPrompt',u'Y',None, None, None, None, u'Text',None, u'Disk name: the visible text actually printed on the disk. This will be used to prompt the user when this disk needs to be inserted.',),
(u'Media',u'LastSequence',u'N',0,32767,None, None, None, None, u'File sequence number for the last file for this media.',),
(u'Media',u'VolumeLabel',u'Y',None, None, None, None, u'Text',None, u'The label attributed to the volume.',),
(u'ModuleComponents',u'Component',u'N',None, None, u'Component',1,u'Identifier',None, u'Component contained in the module.',),
(u'ModuleComponents',u'Language',u'N',None, None, u'ModuleSignature',2,None, None, u'Default language ID for module (may be changed by transform).',),
(u'ModuleComponents',u'ModuleID',u'N',None, None, u'ModuleSignature',1,u'Identifier',None, u'Module containing the component.',),
(u'ModuleSignature',u'Language',u'N',None, None, None, None, None, None, u'Default decimal language of module.',),
(u'ModuleSignature',u'Version',u'N',None, None, None, None, u'Version',None, u'Version of the module.',),
(u'ModuleSignature',u'ModuleID',u'N',None, None, None, None, u'Identifier',None, u'Module identifier (String.GUID).',),
(u'ModuleDependency',u'ModuleID',u'N',None, None, u'ModuleSignature',1,u'Identifier',None, u'Module requiring the dependency.',),
(u'ModuleDependency',u'ModuleLanguage',u'N',None, None, u'ModuleSignature',2,None, None, u'Language of module requiring the dependency.',),
(u'ModuleDependency',u'RequiredID',u'N',None, None, None, None, None, None, u'String.GUID of required module.',),
(u'ModuleDependency',u'RequiredLanguage',u'N',None, None, None, None, None, None, u'LanguageID of the required module.',),
(u'ModuleDependency',u'RequiredVersion',u'Y',None, None, None, None, u'Version',None, u'Version of the required version.',),
(u'ModuleExclusion',u'ModuleID',u'N',None, None, u'ModuleSignature',1,u'Identifier',None, u'String.GUID of module with exclusion requirement.',),
(u'ModuleExclusion',u'ModuleLanguage',u'N',None, None, u'ModuleSignature',2,None, None, u'LanguageID of module with exclusion requirement.',),
(u'ModuleExclusion',u'ExcludedID',u'N',None, None, None, None, None, None, u'String.GUID of excluded module.',),
(u'ModuleExclusion',u'ExcludedLanguage',u'N',None, None, None, None, None, None, u'Language of excluded module.',),
(u'ModuleExclusion',u'ExcludedMaxVersion',u'Y',None, None, None, None, u'Version',None, u'Maximum version of excluded module.',),
(u'ModuleExclusion',u'ExcludedMinVersion',u'Y',None, None, None, None, u'Version',None, u'Minimum version of excluded module.',),
(u'MoveFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'If this component is not "selected" for installation or removal, no action will be taken on the associated MoveFile entry',),
(u'MoveFile',u'DestFolder',u'N',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
(u'MoveFile',u'DestName',u'Y',None, None, None, None, u'Filename',None, u'Name to be given to the original file after it is moved or copied. If blank, the destination file will be given the same name as the source file',),
(u'MoveFile',u'FileKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key that uniquely identifies a particular MoveFile record',),
(u'MoveFile',u'Options',u'N',0,1,None, None, None, None, u'Integer value specifying the MoveFile operating mode, one of imfoEnum',),
(u'MoveFile',u'SourceFolder',u'Y',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full path to the source directory',),
(u'MoveFile',u'SourceName',u'Y',None, None, None, None, u'Text',None, u"Name of the source file(s) to be moved or copied. Can contain the '*' or '?' wildcards.",),
(u'MsiAssembly',u'Attributes',u'Y',None, None, None, None, None, None, u'Assembly attributes',),
(u'MsiAssembly',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Foreign key into Feature table.',),
(u'MsiAssembly',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into Component table.',),
(u'MsiAssembly',u'File_Application',u'Y',None, None, u'File',1,u'Identifier',None, u'Foreign key into File table, denoting the application context for private assemblies. Null for global assemblies.',),
(u'MsiAssembly',u'File_Manifest',u'Y',None, None, u'File',1,u'Identifier',None, u'Foreign key into the File table denoting the manifest file for the assembly.',),
(u'MsiAssemblyName',u'Name',u'N',None, None, None, None, u'Text',None, u'The name part of the name-value pairs for the assembly name.',),
(u'MsiAssemblyName',u'Value',u'N',None, None, None, None, u'Text',None, u'The value part of the name-value pairs for the assembly name.',),
(u'MsiAssemblyName',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into Component table.',),
(u'MsiDigitalCertificate',u'CertData',u'N',None, None, None, None, u'Binary',None, u'A certificate context blob for a signer certificate',),
(u'MsiDigitalCertificate',u'DigitalCertificate',u'N',None, None, None, None, u'Identifier',None, u'A unique identifier for the row',),
(u'MsiDigitalSignature',u'Table',u'N',None, None, None, None, None, u'Media',u'Reference to another table name (only Media table is supported)',),
(u'MsiDigitalSignature',u'DigitalCertificate_',u'N',None, None, u'MsiDigitalCertificate',1,u'Identifier',None, u'Foreign key to MsiDigitalCertificate table identifying the signer certificate',),
(u'MsiDigitalSignature',u'Hash',u'Y',None, None, None, None, u'Binary',None, u'The encoded hash blob from the digital signature',),
(u'MsiDigitalSignature',u'SignObject',u'N',None, None, None, None, u'Text',None, u'Foreign key to Media table',),
(u'MsiFileHash',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Primary key, foreign key into File table referencing file with this hash',),
(u'MsiFileHash',u'Options',u'N',0,32767,None, None, None, None, u'Various options and attributes for this hash.',),
(u'MsiFileHash',u'HashPart1',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiFileHash',u'HashPart2',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiFileHash',u'HashPart3',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiFileHash',u'HashPart4',u'N',None, None, None, None, None, None, u'Size of file in bytes (long integer).',),
(u'MsiPatchHeaders',u'StreamRef',u'N',None, None, None, None, u'Identifier',None, u'Primary key. A unique identifier for the row.',),
(u'MsiPatchHeaders',u'Header',u'N',None, None, None, None, u'Binary',None, u'Binary stream. The patch header, used for patch validation.',),
(u'ODBCAttribute',u'Value',u'Y',None, None, None, None, u'Text',None, u'Value for ODBC driver attribute',),
(u'ODBCAttribute',u'Attribute',u'N',None, None, None, None, u'Text',None, u'Name of ODBC driver attribute',),
(u'ODBCAttribute',u'Driver_',u'N',None, None, u'ODBCDriver',1,u'Identifier',None, u'Reference to ODBC driver in ODBCDriver table',),
(u'ODBCDriver',u'Description',u'N',None, None, None, None, u'Text',None, u'Text used as registered name for driver, non-localized',),
(u'ODBCDriver',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Reference to key driver file',),
(u'ODBCDriver',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reference to associated component',),
(u'ODBCDriver',u'Driver',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized.internal token for driver',),
(u'ODBCDriver',u'File_Setup',u'Y',None, None, u'File',1,u'Identifier',None, u'Optional reference to key driver setup DLL',),
(u'ODBCDataSource',u'Description',u'N',None, None, None, None, u'Text',None, u'Text used as registered name for data source',),
(u'ODBCDataSource',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reference to associated component',),
(u'ODBCDataSource',u'DataSource',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized.internal token for data source',),
(u'ODBCDataSource',u'DriverDescription',u'N',None, None, None, None, u'Text',None, u'Reference to driver description, may be existing driver',),
(u'ODBCDataSource',u'Registration',u'N',0,1,None, None, None, None, u'Registration option: 0=machine, 1=user, others t.b.d.',),
(u'ODBCSourceAttribute',u'Value',u'Y',None, None, None, None, u'Text',None, u'Value for ODBC data source attribute',),
(u'ODBCSourceAttribute',u'Attribute',u'N',None, None, None, None, u'Text',None, u'Name of ODBC data source attribute',),
(u'ODBCSourceAttribute',u'DataSource_',u'N',None, None, u'ODBCDataSource',1,u'Identifier',None, u'Reference to ODBC data source in ODBCDataSource table',),
(u'ODBCTranslator',u'Description',u'N',None, None, None, None, u'Text',None, u'Text used as registered name for translator',),
(u'ODBCTranslator',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Reference to key translator file',),
(u'ODBCTranslator',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reference to associated component',),
(u'ODBCTranslator',u'File_Setup',u'Y',None, None, u'File',1,u'Identifier',None, u'Optional reference to key translator setup DLL',),
(u'ODBCTranslator',u'Translator',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized.internal token for translator',),
(u'Patch',u'Sequence',u'N',0,32767,None, None, None, None, u'Primary key, sequence with respect to the media images; order must track cabinet order.',),
(u'Patch',u'Attributes',u'N',0,32767,None, None, None, None, u'Integer containing bit flags representing patch attributes',),
(u'Patch',u'File_',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token, foreign key to File table, must match identifier in cabinet.',),
(u'Patch',u'Header',u'Y',None, None, None, None, u'Binary',None, u'Binary stream. The patch header, used for patch validation.',),
(u'Patch',u'PatchSize',u'N',0,2147483647,None, None, None, None, u'Size of patch in bytes (long integer).',),
(u'Patch',u'StreamRef_',u'Y',None, None, None, None, u'Identifier',None, u'Identifier. Foreign key to the StreamRef column of the MsiPatchHeaders table.',),
(u'PatchPackage',u'Media_',u'N',0,32767,None, None, None, None, u'Foreign key to DiskId column of Media table. Indicates the disk containing the patch package.',),
(u'PatchPackage',u'PatchId',u'N',None, None, None, None, u'Guid',None, u'A unique string GUID representing this patch.',),
(u'PublishComponent',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Foreign key into the Feature table.',),
(u'PublishComponent',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table.',),
(u'PublishComponent',u'ComponentId',u'N',None, None, None, None, u'Guid',None, u'A string GUID that represents the component id that will be requested by the alien product.',),
(u'PublishComponent',u'AppData',u'Y',None, None, None, None, u'Text',None, u'This is localisable Application specific data that can be associated with a Qualified Component.',),
(u'PublishComponent',u'Qualifier',u'N',None, None, None, None, u'Text',None, u'This is defined only when the ComponentId column is an Qualified Component Id. This is the Qualifier for ProvideComponentIndirect.',),
(u'RadioButton',u'Y',u'N',0,32767,None, None, None, None, u'The vertical coordinate of the upper left corner of the bounding rectangle of the radio button.',),
(u'RadioButton',u'Text',u'Y',None, None, None, None, u'Text',None, u'The visible title to be assigned to the radio button.',),
(u'RadioButton',u'Property',u'N',None, None, None, None, u'Identifier',None, u'A named property to be tied to this radio button. All the buttons tied to the same property become part of the same group.',),
(u'RadioButton',u'Height',u'N',0,32767,None, None, None, None, u'The height of the button.',),
(u'RadioButton',u'Width',u'N',0,32767,None, None, None, None, u'The width of the button.',),
(u'RadioButton',u'X',u'N',0,32767,None, None, None, None, u'The horizontal coordinate of the upper left corner of the bounding rectangle of the radio button.',),
(u'RadioButton',u'Value',u'N',None, None, None, None, u'Formatted',None, u'The value string associated with this button. Selecting the button will set the associated property to this value.',),
(u'RadioButton',u'Order',u'N',1,32767,None, None, None, None, u'A positive integer used to determine the ordering of the items within one list..The integers do not have to be consecutive.',),
(u'RadioButton',u'Help',u'Y',None, None, None, None, u'Text',None, u'The help strings used with the button. The text is optional.',),
(u'Registry',u'Name',u'Y',None, None, None, None, u'Formatted',None, u'The registry value name.',),
(u'Registry',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The registry value.',),
(u'Registry',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the installing of the registry value.',),
(u'Registry',u'Key',u'N',None, None, None, None, u'RegPath',None, u'The key for the registry value.',),
(u'Registry',u'Registry',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'Registry',u'Root',u'N',-1,3,None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.',),
(u'RegLocator',u'Name',u'Y',None, None, None, None, u'Formatted',None, u'The registry value name.',),
(u'RegLocator',u'Type',u'Y',0,18,None, None, None, None, u'An integer value that determines if the registry value is a filename or a directory location or to be used as is w/o interpretation.',),
(u'RegLocator',u'Signature_',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature_ represents a unique file signature and is also the foreign key in the Signature table. If the type is 0, the registry values refers a directory, and _Signature is not a foreign key.',),
(u'RegLocator',u'Key',u'N',None, None, None, None, u'RegPath',None, u'The key for the registry value.',),
(u'RegLocator',u'Root',u'N',0,3,None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum.',),
(u'RemoveFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key referencing Component that controls the file to be removed.',),
(u'RemoveFile',u'FileKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key used to identify a particular file entry',),
(u'RemoveFile',u'FileName',u'Y',None, None, None, None, u'WildCardFilename',None, u'Name of the file to be removed.',),
(u'RemoveFile',u'DirProperty',u'N',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full pathname to the folder of the file to be removed.',),
(u'RemoveFile',u'InstallMode',u'N',None, None, None, None, None, u'1;2;3',u'Installation option, one of iimEnum.',),
(u'RemoveIniFile',u'Action',u'N',None, None, None, None, None, u'2;4',u'The type of modification to be made, one of iifEnum.',),
(u'RemoveIniFile',u'Value',u'Y',None, None, None, None, u'Formatted',None, u'The value to be deleted. The value is required when Action is iifIniRemoveTag',),
(u'RemoveIniFile',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the deletion of the .INI value.',),
(u'RemoveIniFile',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The .INI file name in which to delete the information',),
(u'RemoveIniFile',u'DirProperty',u'Y',None, None, None, None, u'Identifier',None, u'Foreign key into the Directory table denoting the directory where the .INI file is.',),
(u'RemoveIniFile',u'Key',u'N',None, None, None, None, u'Formatted',None, u'The .INI file key below Section.',),
(u'RemoveIniFile',u'Section',u'N',None, None, None, None, u'Formatted',None, u'The .INI file Section.',),
(u'RemoveIniFile',u'RemoveIniFile',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'RemoveRegistry',u'Name',u'Y',None, None, None, None, u'Formatted',None, u'The registry value name.',),
(u'RemoveRegistry',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table referencing component that controls the deletion of the registry value.',),
(u'RemoveRegistry',u'Key',u'N',None, None, None, None, u'RegPath',None, u'The key for the registry value.',),
(u'RemoveRegistry',u'Root',u'N',-1,3,None, None, None, None, u'The predefined root key for the registry value, one of rrkEnum',),
(u'RemoveRegistry',u'RemoveRegistry',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'ReserveCost',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Reserve a specified amount of space if this component is to be installed.',),
(u'ReserveCost',u'ReserveFolder',u'Y',None, None, None, None, u'Identifier',None, u'Name of a property whose value is assumed to resolve to the full path to the destination directory',),
(u'ReserveCost',u'ReserveKey',u'N',None, None, None, None, u'Identifier',None, u'Primary key that uniquely identifies a particular ReserveCost record',),
(u'ReserveCost',u'ReserveLocal',u'N',0,2147483647,None, None, None, None, u'Disk space to reserve if linked component is installed locally.',),
(u'ReserveCost',u'ReserveSource',u'N',0,2147483647,None, None, None, None, u'Disk space to reserve if linked component is installed to run from the source location.',),
(u'SelfReg',u'File_',u'N',None, None, u'File',1,u'Identifier',None, u'Foreign key into the File table denoting the module that needs to be registered.',),
(u'SelfReg',u'Cost',u'Y',0,32767,None, None, None, None, u'The cost of registering the module.',),
(u'ServiceControl',u'Name',u'N',None, None, None, None, u'Formatted',None, u'Name of a service. /, \\, comma and space are invalid',),
(u'ServiceControl',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table that controls the startup of the service',),
(u'ServiceControl',u'Event',u'N',0,187,None, None, None, None, u'Bit field: Install: 0x1 = Start, 0x2 = Stop, 0x8 = Delete, Uninstall: 0x10 = Start, 0x20 = Stop, 0x80 = Delete',),
(u'ServiceControl',u'ServiceControl',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'ServiceControl',u'Arguments',u'Y',None, None, None, None, u'Formatted',None, u'Arguments for the service. Separate by [~].',),
(u'ServiceControl',u'Wait',u'Y',0,1,None, None, None, None, u'Boolean for whether to wait for the service to fully start',),
(u'ServiceInstall',u'Name',u'N',None, None, None, None, u'Formatted',None, u'Internal Name of the Service',),
(u'ServiceInstall',u'Description',u'Y',None, None, None, None, u'Text',None, u'Description of service.',),
(u'ServiceInstall',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table that controls the startup of the service',),
(u'ServiceInstall',u'Arguments',u'Y',None, None, None, None, u'Formatted',None, u'Arguments to include in every start of the service, passed to WinMain',),
(u'ServiceInstall',u'ServiceInstall',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'ServiceInstall',u'Dependencies',u'Y',None, None, None, None, u'Formatted',None, u'Other services this depends on to start. Separate by [~], and end with [~][~]',),
(u'ServiceInstall',u'DisplayName',u'Y',None, None, None, None, u'Formatted',None, u'External Name of the Service',),
(u'ServiceInstall',u'ErrorControl',u'N',-2147483647,2147483647,None, None, None, None, u'Severity of error if service fails to start',),
(u'ServiceInstall',u'LoadOrderGroup',u'Y',None, None, None, None, u'Formatted',None, u'LoadOrderGroup',),
(u'ServiceInstall',u'Password',u'Y',None, None, None, None, u'Formatted',None, u'password to run service with. (with StartName)',),
(u'ServiceInstall',u'ServiceType',u'N',-2147483647,2147483647,None, None, None, None, u'Type of the service',),
(u'ServiceInstall',u'StartName',u'Y',None, None, None, None, u'Formatted',None, u'User or object name to run service as',),
(u'ServiceInstall',u'StartType',u'N',0,4,None, None, None, None, u'Type of the service',),
(u'Shortcut',u'Name',u'N',None, None, None, None, u'Filename',None, u'The name of the shortcut to be created.',),
(u'Shortcut',u'Description',u'Y',None, None, None, None, u'Text',None, u'The description for the shortcut.',),
(u'Shortcut',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Foreign key into the Component table denoting the component whose selection gates the shortcut creation/deletion.',),
(u'Shortcut',u'Icon_',u'Y',None, None, u'Icon',1,u'Identifier',None, u'Foreign key into the File table denoting the external icon file for the shortcut.',),
(u'Shortcut',u'IconIndex',u'Y',-32767,32767,None, None, None, None, u'The icon index for the shortcut.',),
(u'Shortcut',u'Directory_',u'N',None, None, u'Directory',1,u'Identifier',None, u'Foreign key into the Directory table denoting the directory where the shortcut file is created.',),
(u'Shortcut',u'Target',u'N',None, None, None, None, u'Shortcut',None, u'The shortcut target. This is usually a property that is expanded to a file or a folder that the shortcut points to.',),
(u'Shortcut',u'Arguments',u'Y',None, None, None, None, u'Formatted',None, u'The command-line arguments for the shortcut.',),
(u'Shortcut',u'Shortcut',u'N',None, None, None, None, u'Identifier',None, u'Primary key, non-localized token.',),
(u'Shortcut',u'Hotkey',u'Y',0,32767,None, None, None, None, u'The hotkey for the shortcut. It has the virtual-key code for the key in the low-order byte, and the modifier flags in the high-order byte. ',),
(u'Shortcut',u'ShowCmd',u'Y',None, None, None, None, None, u'1;3;7',u'The show command for the application window.The following values may be used.',),
(u'Shortcut',u'WkDir',u'Y',None, None, None, None, u'Identifier',None, u'Name of property defining location of working directory.',),
(u'Signature',u'FileName',u'N',None, None, None, None, u'Filename',None, u'The name of the file. This may contain a "short name|long name" pair.',),
(u'Signature',u'Signature',u'N',None, None, None, None, u'Identifier',None, u'The table key. The Signature represents a unique file signature.',),
(u'Signature',u'Languages',u'Y',None, None, None, None, u'Language',None, u'The languages supported by the file.',),
(u'Signature',u'MaxDate',u'Y',0,2147483647,None, None, None, None, u'The maximum creation date of the file.',),
(u'Signature',u'MaxSize',u'Y',0,2147483647,None, None, None, None, u'The maximum size of the file. ',),
(u'Signature',u'MaxVersion',u'Y',None, None, None, None, u'Text',None, u'The maximum version of the file.',),
(u'Signature',u'MinDate',u'Y',0,2147483647,None, None, None, None, u'The minimum creation date of the file.',),
(u'Signature',u'MinSize',u'Y',0,2147483647,None, None, None, None, u'The minimum size of the file.',),
(u'Signature',u'MinVersion',u'Y',None, None, None, None, u'Text',None, u'The minimum version of the file.',),
(u'TextStyle',u'TextStyle',u'N',None, None, None, None, u'Identifier',None, u'Name of the style. The primary key of this table. This name is embedded in the texts to indicate a style change.',),
(u'TextStyle',u'Color',u'Y',0,16777215,None, None, None, None, u'A long integer indicating the color of the string in the RGB format (Red, Green, Blue each 0-255, RGB = R + 256*G + 256^2*B).',),
(u'TextStyle',u'FaceName',u'N',None, None, None, None, u'Text',None, u'A string indicating the name of the font used. Required. The string must be at most 31 characters long.',),
(u'TextStyle',u'Size',u'N',0,32767,None, None, None, None, u'The size of the font used. This size is given in our units (1/12 of the system font height). Assuming that the system font is set to 12 point size, this is equivalent to the point size.',),
(u'TextStyle',u'StyleBits',u'Y',0,15,None, None, None, None, u'A combination of style bits.',),
(u'TypeLib',u'Description',u'Y',None, None, None, None, u'Text',None, None, ),
(u'TypeLib',u'Feature_',u'N',None, None, u'Feature',1,u'Identifier',None, u'Required foreign key into the Feature Table, specifying the feature to validate or install in order for the type library to be operational.',),
(u'TypeLib',u'Component_',u'N',None, None, u'Component',1,u'Identifier',None, u'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
(u'TypeLib',u'Directory_',u'Y',None, None, u'Directory',1,u'Identifier',None, u'Optional. The foreign key into the Directory table denoting the path to the help file for the type library.',),
(u'TypeLib',u'Language',u'N',0,32767,None, None, None, None, u'The language of the library.',),
(u'TypeLib',u'Version',u'Y',0,16777215,None, None, None, None, u'The version of the library. The minor version is in the lower 8 bits of the integer. The major version is in the next 16 bits. ',),
(u'TypeLib',u'Cost',u'Y',0,2147483647,None, None, None, None, u'The cost associated with the registration of the typelib. This column is currently optional.',),
(u'TypeLib',u'LibID',u'N',None, None, None, None, u'Guid',None, u'The GUID that represents the library.',),
(u'UIText',u'Text',u'Y',None, None, None, None, u'Text',None, u'The localized version of the string.',),
(u'UIText',u'Key',u'N',None, None, None, None, u'Identifier',None, u'A unique key that identifies the particular string.',),
(u'Upgrade',u'Attributes',u'N',0,2147483647,None, None, None, None, u'The attributes of this product set.',),
(u'Upgrade',u'Language',u'Y',None, None, None, None, u'Language',None, u'A comma-separated list of languages for either products in this set or products not in this set.',),
(u'Upgrade',u'ActionProperty',u'N',None, None, None, None, u'UpperCase',None, u'The property to set when a product in this set is found.',),
(u'Upgrade',u'Remove',u'Y',None, None, None, None, u'Formatted',None, u'The list of features to remove when uninstalling a product from this set. The default is "ALL".',),
(u'Upgrade',u'UpgradeCode',u'N',None, None, None, None, u'Guid',None, u'The UpgradeCode GUID belonging to the products in this set.',),
(u'Upgrade',u'VersionMax',u'Y',None, None, None, None, u'Text',None, u'The maximum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
(u'Upgrade',u'VersionMin',u'Y',None, None, None, None, u'Text',None, u'The minimum ProductVersion of the products in this set. The set may or may not include products with this particular version.',),
(u'Verb',u'Sequence',u'Y',0,32767,None, None, None, None, u'Order within the verbs for a particular extension. Also used simply to specify the default verb.',),
(u'Verb',u'Argument',u'Y',None, None, None, None, u'Formatted',None, u'Optional value for the command arguments.',),
(u'Verb',u'Extension_',u'N',None, None, u'Extension',1,u'Text',None, u'The extension associated with the table row.',),
(u'Verb',u'Verb',u'N',None, None, None, None, u'Text',None, u'The verb for the command.',),
(u'Verb',u'Command',u'Y',None, None, None, None, u'Formatted',None, u'The command text.',),
]
| gpl-3.0 |
KaranToor/MA450 | google-cloud-sdk/.install/.backup/lib/googlecloudsdk/third_party/appengine/proto/protocoltype_pb.py | 2 | 117591 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: net/proto/protocoltype.proto
from googlecloudsdk.third_party.appengine.proto import ProtocolBuffer
import array
import base64
import thread
try:
from googlecloudsdk.third_party.appengine.proto import _net_proto___parse__python
except ImportError:
_net_proto___parse__python = None
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class ProtocolDescriptor_EnumTypeTag(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_value_ = 0
value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += self.lengthVarInt64(self.value_)
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 2
n += self.lengthString(len(self.name_))
if (self.has_value_):
n += 2
n += self.lengthVarInt64(self.value_)
return n
def Clear(self):
self.clear_name()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(146)
out.putPrefixedString(self.name_)
out.putVarInt32(152)
out.putVarInt32(self.value_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(146)
out.putPrefixedString(self.name_)
if (self.has_value_):
out.putVarInt32(152)
out.putVarInt32(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 140: break
if tt == 146:
self.set_name(d.getPrefixedString())
continue
if tt == 152:
self.set_value(d.getVarInt32())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt32(self.value_))
return res
class ProtocolDescriptor_TagOption(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += self.lengthString(len(self.value_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 2
n += self.lengthString(len(self.name_))
if (self.has_value_):
n += 2
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_name()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(170)
out.putPrefixedString(self.name_)
out.putVarInt32(178)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(170)
out.putPrefixedString(self.name_)
if (self.has_value_):
out.putVarInt32(178)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 164: break
if tt == 170:
self.set_name(d.getPrefixedString())
continue
if tt == 178:
self.set_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
class ProtocolDescriptor_Tag(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_number_ = 0
number_ = 0
has_wire_type_ = 0
wire_type_ = 0
has_declared_type_ = 0
declared_type_ = 0
has_label_ = 0
label_ = 0
has_default_value_ = 0
default_value_ = ""
has_foreign_ = 0
foreign_ = ""
has_foreign_proto_name_ = 0
foreign_proto_name_ = ""
has_flags_ = 0
flags_ = 0
has_parent_ = 0
parent_ = -1
has_enum_id_ = 0
enum_id_ = -1
has_deprecated_ = 0
deprecated_ = 0
def __init__(self, contents=None):
self.option_ = []
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def number(self): return self.number_
def set_number(self, x):
self.has_number_ = 1
self.number_ = x
def clear_number(self):
if self.has_number_:
self.has_number_ = 0
self.number_ = 0
def has_number(self): return self.has_number_
def wire_type(self): return self.wire_type_
def set_wire_type(self, x):
self.has_wire_type_ = 1
self.wire_type_ = x
def clear_wire_type(self):
if self.has_wire_type_:
self.has_wire_type_ = 0
self.wire_type_ = 0
def has_wire_type(self): return self.has_wire_type_
def declared_type(self): return self.declared_type_
def set_declared_type(self, x):
self.has_declared_type_ = 1
self.declared_type_ = x
def clear_declared_type(self):
if self.has_declared_type_:
self.has_declared_type_ = 0
self.declared_type_ = 0
def has_declared_type(self): return self.has_declared_type_
def label(self): return self.label_
def set_label(self, x):
self.has_label_ = 1
self.label_ = x
def clear_label(self):
if self.has_label_:
self.has_label_ = 0
self.label_ = 0
def has_label(self): return self.has_label_
def default_value(self): return self.default_value_
def set_default_value(self, x):
self.has_default_value_ = 1
self.default_value_ = x
def clear_default_value(self):
if self.has_default_value_:
self.has_default_value_ = 0
self.default_value_ = ""
def has_default_value(self): return self.has_default_value_
def foreign(self): return self.foreign_
def set_foreign(self, x):
self.has_foreign_ = 1
self.foreign_ = x
def clear_foreign(self):
if self.has_foreign_:
self.has_foreign_ = 0
self.foreign_ = ""
def has_foreign(self): return self.has_foreign_
def foreign_proto_name(self): return self.foreign_proto_name_
def set_foreign_proto_name(self, x):
self.has_foreign_proto_name_ = 1
self.foreign_proto_name_ = x
def clear_foreign_proto_name(self):
if self.has_foreign_proto_name_:
self.has_foreign_proto_name_ = 0
self.foreign_proto_name_ = ""
def has_foreign_proto_name(self): return self.has_foreign_proto_name_
def flags(self): return self.flags_
def set_flags(self, x):
self.has_flags_ = 1
self.flags_ = x
def clear_flags(self):
if self.has_flags_:
self.has_flags_ = 0
self.flags_ = 0
def has_flags(self): return self.has_flags_
def parent(self): return self.parent_
def set_parent(self, x):
self.has_parent_ = 1
self.parent_ = x
def clear_parent(self):
if self.has_parent_:
self.has_parent_ = 0
self.parent_ = -1
def has_parent(self): return self.has_parent_
def enum_id(self): return self.enum_id_
def set_enum_id(self, x):
self.has_enum_id_ = 1
self.enum_id_ = x
def clear_enum_id(self):
if self.has_enum_id_:
self.has_enum_id_ = 0
self.enum_id_ = -1
def has_enum_id(self): return self.has_enum_id_
def deprecated(self): return self.deprecated_
def set_deprecated(self, x):
self.has_deprecated_ = 1
self.deprecated_ = x
def clear_deprecated(self):
if self.has_deprecated_:
self.has_deprecated_ = 0
self.deprecated_ = 0
def has_deprecated(self): return self.has_deprecated_
def option_size(self): return len(self.option_)
def option_list(self): return self.option_
def option(self, i):
return self.option_[i]
def mutable_option(self, i):
return self.option_[i]
def add_option(self):
x = ProtocolDescriptor_TagOption()
self.option_.append(x)
return x
def clear_option(self):
self.option_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_number()): self.set_number(x.number())
if (x.has_wire_type()): self.set_wire_type(x.wire_type())
if (x.has_declared_type()): self.set_declared_type(x.declared_type())
if (x.has_label()): self.set_label(x.label())
if (x.has_default_value()): self.set_default_value(x.default_value())
if (x.has_foreign()): self.set_foreign(x.foreign())
if (x.has_foreign_proto_name()): self.set_foreign_proto_name(x.foreign_proto_name())
if (x.has_flags()): self.set_flags(x.flags())
if (x.has_parent()): self.set_parent(x.parent())
if (x.has_enum_id()): self.set_enum_id(x.enum_id())
if (x.has_deprecated()): self.set_deprecated(x.deprecated())
for i in xrange(x.option_size()): self.add_option().CopyFrom(x.option(i))
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_number_ != x.has_number_: return 0
if self.has_number_ and self.number_ != x.number_: return 0
if self.has_wire_type_ != x.has_wire_type_: return 0
if self.has_wire_type_ and self.wire_type_ != x.wire_type_: return 0
if self.has_declared_type_ != x.has_declared_type_: return 0
if self.has_declared_type_ and self.declared_type_ != x.declared_type_: return 0
if self.has_label_ != x.has_label_: return 0
if self.has_label_ and self.label_ != x.label_: return 0
if self.has_default_value_ != x.has_default_value_: return 0
if self.has_default_value_ and self.default_value_ != x.default_value_: return 0
if self.has_foreign_ != x.has_foreign_: return 0
if self.has_foreign_ and self.foreign_ != x.foreign_: return 0
if self.has_foreign_proto_name_ != x.has_foreign_proto_name_: return 0
if self.has_foreign_proto_name_ and self.foreign_proto_name_ != x.foreign_proto_name_: return 0
if self.has_flags_ != x.has_flags_: return 0
if self.has_flags_ and self.flags_ != x.flags_: return 0
if self.has_parent_ != x.has_parent_: return 0
if self.has_parent_ and self.parent_ != x.parent_: return 0
if self.has_enum_id_ != x.has_enum_id_: return 0
if self.has_enum_id_ and self.enum_id_ != x.enum_id_: return 0
if self.has_deprecated_ != x.has_deprecated_: return 0
if self.has_deprecated_ and self.deprecated_ != x.deprecated_: return 0
if len(self.option_) != len(x.option_): return 0
for e1, e2 in zip(self.option_, x.option_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_number_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: number not set.')
if (not self.has_wire_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: wire_type not set.')
if (not self.has_declared_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: declared_type not set.')
if (not self.has_label_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: label not set.')
for p in self.option_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += self.lengthVarInt64(self.number_)
n += self.lengthVarInt64(self.wire_type_)
n += self.lengthVarInt64(self.declared_type_)
n += self.lengthVarInt64(self.label_)
if (self.has_default_value_): n += 1 + self.lengthString(len(self.default_value_))
if (self.has_foreign_): n += 1 + self.lengthString(len(self.foreign_))
if (self.has_foreign_proto_name_): n += 2 + self.lengthString(len(self.foreign_proto_name_))
if (self.has_flags_): n += 1 + self.lengthVarInt64(self.flags_)
if (self.has_parent_): n += 1 + self.lengthVarInt64(self.parent_)
if (self.has_enum_id_): n += 1 + self.lengthVarInt64(self.enum_id_)
if (self.has_deprecated_): n += 3
n += 4 * len(self.option_)
for i in xrange(len(self.option_)): n += self.option_[i].ByteSize()
return n + 5
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_number_):
n += 1
n += self.lengthVarInt64(self.number_)
if (self.has_wire_type_):
n += 1
n += self.lengthVarInt64(self.wire_type_)
if (self.has_declared_type_):
n += 1
n += self.lengthVarInt64(self.declared_type_)
if (self.has_label_):
n += 1
n += self.lengthVarInt64(self.label_)
if (self.has_default_value_): n += 1 + self.lengthString(len(self.default_value_))
if (self.has_foreign_): n += 1 + self.lengthString(len(self.foreign_))
if (self.has_foreign_proto_name_): n += 2 + self.lengthString(len(self.foreign_proto_name_))
if (self.has_flags_): n += 1 + self.lengthVarInt64(self.flags_)
if (self.has_parent_): n += 1 + self.lengthVarInt64(self.parent_)
if (self.has_enum_id_): n += 1 + self.lengthVarInt64(self.enum_id_)
if (self.has_deprecated_): n += 3
n += 4 * len(self.option_)
for i in xrange(len(self.option_)): n += self.option_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_name()
self.clear_number()
self.clear_wire_type()
self.clear_declared_type()
self.clear_label()
self.clear_default_value()
self.clear_foreign()
self.clear_foreign_proto_name()
self.clear_flags()
self.clear_parent()
self.clear_enum_id()
self.clear_deprecated()
self.clear_option()
def OutputUnchecked(self, out):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
out.putVarInt32(32)
out.putVarInt32(self.number_)
out.putVarInt32(40)
out.putVarInt32(self.wire_type_)
out.putVarInt32(48)
out.putVarInt32(self.declared_type_)
out.putVarInt32(56)
out.putVarInt32(self.label_)
if (self.has_default_value_):
out.putVarInt32(66)
out.putPrefixedString(self.default_value_)
if (self.has_foreign_):
out.putVarInt32(74)
out.putPrefixedString(self.foreign_)
if (self.has_flags_):
out.putVarInt32(80)
out.putVarInt32(self.flags_)
if (self.has_parent_):
out.putVarInt32(96)
out.putVarInt32(self.parent_)
if (self.has_enum_id_):
out.putVarInt32(104)
out.putVarInt32(self.enum_id_)
for i in xrange(len(self.option_)):
out.putVarInt32(163)
self.option_[i].OutputUnchecked(out)
out.putVarInt32(164)
if (self.has_deprecated_):
out.putVarInt32(208)
out.putBoolean(self.deprecated_)
if (self.has_foreign_proto_name_):
out.putVarInt32(226)
out.putPrefixedString(self.foreign_proto_name_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_number_):
out.putVarInt32(32)
out.putVarInt32(self.number_)
if (self.has_wire_type_):
out.putVarInt32(40)
out.putVarInt32(self.wire_type_)
if (self.has_declared_type_):
out.putVarInt32(48)
out.putVarInt32(self.declared_type_)
if (self.has_label_):
out.putVarInt32(56)
out.putVarInt32(self.label_)
if (self.has_default_value_):
out.putVarInt32(66)
out.putPrefixedString(self.default_value_)
if (self.has_foreign_):
out.putVarInt32(74)
out.putPrefixedString(self.foreign_)
if (self.has_flags_):
out.putVarInt32(80)
out.putVarInt32(self.flags_)
if (self.has_parent_):
out.putVarInt32(96)
out.putVarInt32(self.parent_)
if (self.has_enum_id_):
out.putVarInt32(104)
out.putVarInt32(self.enum_id_)
for i in xrange(len(self.option_)):
out.putVarInt32(163)
self.option_[i].OutputPartial(out)
out.putVarInt32(164)
if (self.has_deprecated_):
out.putVarInt32(208)
out.putBoolean(self.deprecated_)
if (self.has_foreign_proto_name_):
out.putVarInt32(226)
out.putPrefixedString(self.foreign_proto_name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_number(d.getVarInt32())
continue
if tt == 40:
self.set_wire_type(d.getVarInt32())
continue
if tt == 48:
self.set_declared_type(d.getVarInt32())
continue
if tt == 56:
self.set_label(d.getVarInt32())
continue
if tt == 66:
self.set_default_value(d.getPrefixedString())
continue
if tt == 74:
self.set_foreign(d.getPrefixedString())
continue
if tt == 80:
self.set_flags(d.getVarInt32())
continue
if tt == 96:
self.set_parent(d.getVarInt32())
continue
if tt == 104:
self.set_enum_id(d.getVarInt32())
continue
if tt == 163:
self.add_option().TryMerge(d)
continue
if tt == 208:
self.set_deprecated(d.getBoolean())
continue
if tt == 226:
self.set_foreign_proto_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_number_: res+=prefix+("number: %s\n" % self.DebugFormatInt32(self.number_))
if self.has_wire_type_: res+=prefix+("wire_type: %s\n" % self.DebugFormatInt32(self.wire_type_))
if self.has_declared_type_: res+=prefix+("declared_type: %s\n" % self.DebugFormatInt32(self.declared_type_))
if self.has_label_: res+=prefix+("label: %s\n" % self.DebugFormatInt32(self.label_))
if self.has_default_value_: res+=prefix+("default_value: %s\n" % self.DebugFormatString(self.default_value_))
if self.has_foreign_: res+=prefix+("foreign: %s\n" % self.DebugFormatString(self.foreign_))
if self.has_foreign_proto_name_: res+=prefix+("foreign_proto_name: %s\n" % self.DebugFormatString(self.foreign_proto_name_))
if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatInt32(self.flags_))
if self.has_parent_: res+=prefix+("parent: %s\n" % self.DebugFormatInt32(self.parent_))
if self.has_enum_id_: res+=prefix+("enum_id: %s\n" % self.DebugFormatInt32(self.enum_id_))
if self.has_deprecated_: res+=prefix+("deprecated: %s\n" % self.DebugFormatBool(self.deprecated_))
cnt=0
for e in self.option_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Option%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
class ProtocolDescriptor_EnumType(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_parent_ = 0
parent_ = 0
has_proto2_name_ = 0
proto2_name_ = ""
has_allow_alias_ = 0
allow_alias_ = 0
def __init__(self, contents=None):
self.tag_ = []
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def parent(self): return self.parent_
def set_parent(self, x):
self.has_parent_ = 1
self.parent_ = x
def clear_parent(self):
if self.has_parent_:
self.has_parent_ = 0
self.parent_ = 0
def has_parent(self): return self.has_parent_
def tag_size(self): return len(self.tag_)
def tag_list(self): return self.tag_
def tag(self, i):
return self.tag_[i]
def mutable_tag(self, i):
return self.tag_[i]
def add_tag(self):
x = ProtocolDescriptor_EnumTypeTag()
self.tag_.append(x)
return x
def clear_tag(self):
self.tag_ = []
def proto2_name(self): return self.proto2_name_
def set_proto2_name(self, x):
self.has_proto2_name_ = 1
self.proto2_name_ = x
def clear_proto2_name(self):
if self.has_proto2_name_:
self.has_proto2_name_ = 0
self.proto2_name_ = ""
def has_proto2_name(self): return self.has_proto2_name_
def allow_alias(self): return self.allow_alias_
def set_allow_alias(self, x):
self.has_allow_alias_ = 1
self.allow_alias_ = x
def clear_allow_alias(self):
if self.has_allow_alias_:
self.has_allow_alias_ = 0
self.allow_alias_ = 0
def has_allow_alias(self): return self.has_allow_alias_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_parent()): self.set_parent(x.parent())
for i in xrange(x.tag_size()): self.add_tag().CopyFrom(x.tag(i))
if (x.has_proto2_name()): self.set_proto2_name(x.proto2_name())
if (x.has_allow_alias()): self.set_allow_alias(x.allow_alias())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_parent_ != x.has_parent_: return 0
if self.has_parent_ and self.parent_ != x.parent_: return 0
if len(self.tag_) != len(x.tag_): return 0
for e1, e2 in zip(self.tag_, x.tag_):
if e1 != e2: return 0
if self.has_proto2_name_ != x.has_proto2_name_: return 0
if self.has_proto2_name_ and self.proto2_name_ != x.proto2_name_: return 0
if self.has_allow_alias_ != x.has_allow_alias_: return 0
if self.has_allow_alias_ and self.allow_alias_ != x.allow_alias_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
for p in self.tag_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
if (self.has_parent_): n += 2 + self.lengthVarInt64(self.parent_)
n += 4 * len(self.tag_)
for i in xrange(len(self.tag_)): n += self.tag_[i].ByteSize()
if (self.has_proto2_name_): n += 2 + self.lengthString(len(self.proto2_name_))
if (self.has_allow_alias_): n += 3
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_parent_): n += 2 + self.lengthVarInt64(self.parent_)
n += 4 * len(self.tag_)
for i in xrange(len(self.tag_)): n += self.tag_[i].ByteSizePartial()
if (self.has_proto2_name_): n += 2 + self.lengthString(len(self.proto2_name_))
if (self.has_allow_alias_): n += 3
return n
def Clear(self):
self.clear_name()
self.clear_parent()
self.clear_tag()
self.clear_proto2_name()
self.clear_allow_alias()
def OutputUnchecked(self, out):
out.putVarInt32(122)
out.putPrefixedString(self.name_)
if (self.has_parent_):
out.putVarInt32(128)
out.putVarInt32(self.parent_)
for i in xrange(len(self.tag_)):
out.putVarInt32(139)
self.tag_[i].OutputUnchecked(out)
out.putVarInt32(140)
if (self.has_proto2_name_):
out.putVarInt32(250)
out.putPrefixedString(self.proto2_name_)
if (self.has_allow_alias_):
out.putVarInt32(264)
out.putBoolean(self.allow_alias_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(122)
out.putPrefixedString(self.name_)
if (self.has_parent_):
out.putVarInt32(128)
out.putVarInt32(self.parent_)
for i in xrange(len(self.tag_)):
out.putVarInt32(139)
self.tag_[i].OutputPartial(out)
out.putVarInt32(140)
if (self.has_proto2_name_):
out.putVarInt32(250)
out.putPrefixedString(self.proto2_name_)
if (self.has_allow_alias_):
out.putVarInt32(264)
out.putBoolean(self.allow_alias_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 116: break
if tt == 122:
self.set_name(d.getPrefixedString())
continue
if tt == 128:
self.set_parent(d.getVarInt32())
continue
if tt == 139:
self.add_tag().TryMerge(d)
continue
if tt == 250:
self.set_proto2_name(d.getPrefixedString())
continue
if tt == 264:
self.set_allow_alias(d.getBoolean())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_parent_: res+=prefix+("parent: %s\n" % self.DebugFormatInt32(self.parent_))
cnt=0
for e in self.tag_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Tag%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_proto2_name_: res+=prefix+("proto2_name: %s\n" % self.DebugFormatString(self.proto2_name_))
if self.has_allow_alias_: res+=prefix+("allow_alias: %s\n" % self.DebugFormatBool(self.allow_alias_))
return res
class ProtocolDescriptor(ProtocolBuffer.ProtocolMessage):
# WireType values
WIRETYPE_NUMERIC = 0
WIRETYPE_DOUBLE = 1
WIRETYPE_STRING = 2
WIRETYPE_STARTGROUP = 3
WIRETYPE_ENDGROUP = 4
WIRETYPE_FLOAT = 5
_WireType_NAMES = {
0: "WIRETYPE_NUMERIC",
1: "WIRETYPE_DOUBLE",
2: "WIRETYPE_STRING",
3: "WIRETYPE_STARTGROUP",
4: "WIRETYPE_ENDGROUP",
5: "WIRETYPE_FLOAT",
}
def WireType_Name(cls, x): return cls._WireType_NAMES.get(x, "")
WireType_Name = classmethod(WireType_Name)
# Label values
LABEL_OPTIONAL = 1
LABEL_REQUIRED = 2
LABEL_REPEATED = 3
_Label_NAMES = {
1: "LABEL_OPTIONAL",
2: "LABEL_REQUIRED",
3: "LABEL_REPEATED",
}
def Label_Name(cls, x): return cls._Label_NAMES.get(x, "")
Label_Name = classmethod(Label_Name)
# DeclaredType values
TYPE_DOUBLE = 1
TYPE_FLOAT = 2
TYPE_INT64 = 3
TYPE_UINT64 = 4
TYPE_INT32 = 5
TYPE_FIXED64 = 6
TYPE_FIXED32 = 7
TYPE_BOOL = 8
TYPE_STRING = 9
TYPE_GROUP = 10
TYPE_FOREIGN = 11
_DeclaredType_NAMES = {
1: "TYPE_DOUBLE",
2: "TYPE_FLOAT",
3: "TYPE_INT64",
4: "TYPE_UINT64",
5: "TYPE_INT32",
6: "TYPE_FIXED64",
7: "TYPE_FIXED32",
8: "TYPE_BOOL",
9: "TYPE_STRING",
10: "TYPE_GROUP",
11: "TYPE_FOREIGN",
}
def DeclaredType_Name(cls, x): return cls._DeclaredType_NAMES.get(x, "")
DeclaredType_Name = classmethod(DeclaredType_Name)
has_filename_ = 0
filename_ = ""
has_name_ = 0
name_ = ""
has_proto_name_ = 0
proto_name_ = ""
has_proto2_file_descriptor_ = 0
proto2_file_descriptor_ = ""
has_proto2_file_main_ = 0
proto2_file_main_ = ""
has_proto2_name_ = 0
proto2_name_ = ""
has_proto2_extension_info_ = 0
proto2_extension_info_ = ""
has_proto2_file_scope_info_ = 0
proto2_file_scope_info_ = ""
def __init__(self, contents=None):
self.tag_ = []
self.enumtype_ = []
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def proto_name(self): return self.proto_name_
def set_proto_name(self, x):
self.has_proto_name_ = 1
self.proto_name_ = x
def clear_proto_name(self):
if self.has_proto_name_:
self.has_proto_name_ = 0
self.proto_name_ = ""
def has_proto_name(self): return self.has_proto_name_
def tag_size(self): return len(self.tag_)
def tag_list(self): return self.tag_
def tag(self, i):
return self.tag_[i]
def mutable_tag(self, i):
return self.tag_[i]
def add_tag(self):
x = ProtocolDescriptor_Tag()
self.tag_.append(x)
return x
def clear_tag(self):
self.tag_ = []
def enumtype_size(self): return len(self.enumtype_)
def enumtype_list(self): return self.enumtype_
def enumtype(self, i):
return self.enumtype_[i]
def mutable_enumtype(self, i):
return self.enumtype_[i]
def add_enumtype(self):
x = ProtocolDescriptor_EnumType()
self.enumtype_.append(x)
return x
def clear_enumtype(self):
self.enumtype_ = []
def proto2_file_descriptor(self): return self.proto2_file_descriptor_
def set_proto2_file_descriptor(self, x):
self.has_proto2_file_descriptor_ = 1
self.proto2_file_descriptor_ = x
def clear_proto2_file_descriptor(self):
if self.has_proto2_file_descriptor_:
self.has_proto2_file_descriptor_ = 0
self.proto2_file_descriptor_ = ""
def has_proto2_file_descriptor(self): return self.has_proto2_file_descriptor_
def proto2_file_main(self): return self.proto2_file_main_
def set_proto2_file_main(self, x):
self.has_proto2_file_main_ = 1
self.proto2_file_main_ = x
def clear_proto2_file_main(self):
if self.has_proto2_file_main_:
self.has_proto2_file_main_ = 0
self.proto2_file_main_ = ""
def has_proto2_file_main(self): return self.has_proto2_file_main_
def proto2_name(self): return self.proto2_name_
def set_proto2_name(self, x):
self.has_proto2_name_ = 1
self.proto2_name_ = x
def clear_proto2_name(self):
if self.has_proto2_name_:
self.has_proto2_name_ = 0
self.proto2_name_ = ""
def has_proto2_name(self): return self.has_proto2_name_
def proto2_extension_info(self): return self.proto2_extension_info_
def set_proto2_extension_info(self, x):
self.has_proto2_extension_info_ = 1
self.proto2_extension_info_ = x
def clear_proto2_extension_info(self):
if self.has_proto2_extension_info_:
self.has_proto2_extension_info_ = 0
self.proto2_extension_info_ = ""
def has_proto2_extension_info(self): return self.has_proto2_extension_info_
def proto2_file_scope_info(self): return self.proto2_file_scope_info_
def set_proto2_file_scope_info(self, x):
self.has_proto2_file_scope_info_ = 1
self.proto2_file_scope_info_ = x
def clear_proto2_file_scope_info(self):
if self.has_proto2_file_scope_info_:
self.has_proto2_file_scope_info_ = 0
self.proto2_file_scope_info_ = ""
def has_proto2_file_scope_info(self): return self.has_proto2_file_scope_info_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_name()): self.set_name(x.name())
if (x.has_proto_name()): self.set_proto_name(x.proto_name())
for i in xrange(x.tag_size()): self.add_tag().CopyFrom(x.tag(i))
for i in xrange(x.enumtype_size()): self.add_enumtype().CopyFrom(x.enumtype(i))
if (x.has_proto2_file_descriptor()): self.set_proto2_file_descriptor(x.proto2_file_descriptor())
if (x.has_proto2_file_main()): self.set_proto2_file_main(x.proto2_file_main())
if (x.has_proto2_name()): self.set_proto2_name(x.proto2_name())
if (x.has_proto2_extension_info()): self.set_proto2_extension_info(x.proto2_extension_info())
if (x.has_proto2_file_scope_info()): self.set_proto2_file_scope_info(x.proto2_file_scope_info())
if _net_proto___parse__python is not None:
def _CMergeFromString(self, s):
_net_proto___parse__python.MergeFromString(self, 'ProtocolDescriptor', s)
if _net_proto___parse__python is not None:
def _CEncode(self):
return _net_proto___parse__python.Encode(self, 'ProtocolDescriptor')
if _net_proto___parse__python is not None:
def _CEncodePartial(self):
return _net_proto___parse__python.EncodePartial(self, 'ProtocolDescriptor')
if _net_proto___parse__python is not None:
def _CToASCII(self, output_format):
return _net_proto___parse__python.ToASCII(self, 'ProtocolDescriptor', output_format)
if _net_proto___parse__python is not None:
def ParseASCII(self, s):
_net_proto___parse__python.ParseASCII(self, 'ProtocolDescriptor', s)
if _net_proto___parse__python is not None:
def ParseASCIIIgnoreUnknown(self, s):
_net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'ProtocolDescriptor', s)
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_proto_name_ != x.has_proto_name_: return 0
if self.has_proto_name_ and self.proto_name_ != x.proto_name_: return 0
if len(self.tag_) != len(x.tag_): return 0
for e1, e2 in zip(self.tag_, x.tag_):
if e1 != e2: return 0
if len(self.enumtype_) != len(x.enumtype_): return 0
for e1, e2 in zip(self.enumtype_, x.enumtype_):
if e1 != e2: return 0
if self.has_proto2_file_descriptor_ != x.has_proto2_file_descriptor_: return 0
if self.has_proto2_file_descriptor_ and self.proto2_file_descriptor_ != x.proto2_file_descriptor_: return 0
if self.has_proto2_file_main_ != x.has_proto2_file_main_: return 0
if self.has_proto2_file_main_ and self.proto2_file_main_ != x.proto2_file_main_: return 0
if self.has_proto2_name_ != x.has_proto2_name_: return 0
if self.has_proto2_name_ and self.proto2_name_ != x.proto2_name_: return 0
if self.has_proto2_extension_info_ != x.has_proto2_extension_info_: return 0
if self.has_proto2_extension_info_ and self.proto2_extension_info_ != x.proto2_extension_info_: return 0
if self.has_proto2_file_scope_info_ != x.has_proto2_file_scope_info_: return 0
if self.has_proto2_file_scope_info_ and self.proto2_file_scope_info_ != x.proto2_file_scope_info_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
for p in self.tag_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.enumtype_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthString(len(self.name_))
if (self.has_proto_name_): n += 2 + self.lengthString(len(self.proto_name_))
n += 2 * len(self.tag_)
for i in xrange(len(self.tag_)): n += self.tag_[i].ByteSize()
n += 2 * len(self.enumtype_)
for i in xrange(len(self.enumtype_)): n += self.enumtype_[i].ByteSize()
if (self.has_proto2_file_descriptor_): n += 2 + self.lengthString(len(self.proto2_file_descriptor_))
if (self.has_proto2_file_main_): n += 2 + self.lengthString(len(self.proto2_file_main_))
if (self.has_proto2_name_): n += 2 + self.lengthString(len(self.proto2_name_))
if (self.has_proto2_extension_info_): n += 2 + self.lengthString(len(self.proto2_extension_info_))
if (self.has_proto2_file_scope_info_): n += 2 + self.lengthString(len(self.proto2_file_scope_info_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_proto_name_): n += 2 + self.lengthString(len(self.proto_name_))
n += 2 * len(self.tag_)
for i in xrange(len(self.tag_)): n += self.tag_[i].ByteSizePartial()
n += 2 * len(self.enumtype_)
for i in xrange(len(self.enumtype_)): n += self.enumtype_[i].ByteSizePartial()
if (self.has_proto2_file_descriptor_): n += 2 + self.lengthString(len(self.proto2_file_descriptor_))
if (self.has_proto2_file_main_): n += 2 + self.lengthString(len(self.proto2_file_main_))
if (self.has_proto2_name_): n += 2 + self.lengthString(len(self.proto2_name_))
if (self.has_proto2_extension_info_): n += 2 + self.lengthString(len(self.proto2_extension_info_))
if (self.has_proto2_file_scope_info_): n += 2 + self.lengthString(len(self.proto2_file_scope_info_))
return n
def Clear(self):
self.clear_filename()
self.clear_name()
self.clear_proto_name()
self.clear_tag()
self.clear_enumtype()
self.clear_proto2_file_descriptor()
self.clear_proto2_file_main()
self.clear_proto2_name()
self.clear_proto2_extension_info()
self.clear_proto2_file_scope_info()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
for i in xrange(len(self.tag_)):
out.putVarInt32(19)
self.tag_[i].OutputUnchecked(out)
out.putVarInt32(20)
out.putVarInt32(90)
out.putPrefixedString(self.filename_)
for i in xrange(len(self.enumtype_)):
out.putVarInt32(115)
self.enumtype_[i].OutputUnchecked(out)
out.putVarInt32(116)
if (self.has_proto2_file_descriptor_):
out.putVarInt32(186)
out.putPrefixedString(self.proto2_file_descriptor_)
if (self.has_proto2_file_main_):
out.putVarInt32(194)
out.putPrefixedString(self.proto2_file_main_)
if (self.has_proto2_name_):
out.putVarInt32(202)
out.putPrefixedString(self.proto2_name_)
if (self.has_proto_name_):
out.putVarInt32(218)
out.putPrefixedString(self.proto_name_)
if (self.has_proto2_extension_info_):
out.putVarInt32(234)
out.putPrefixedString(self.proto2_extension_info_)
if (self.has_proto2_file_scope_info_):
out.putVarInt32(242)
out.putPrefixedString(self.proto2_file_scope_info_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
for i in xrange(len(self.tag_)):
out.putVarInt32(19)
self.tag_[i].OutputPartial(out)
out.putVarInt32(20)
if (self.has_filename_):
out.putVarInt32(90)
out.putPrefixedString(self.filename_)
for i in xrange(len(self.enumtype_)):
out.putVarInt32(115)
self.enumtype_[i].OutputPartial(out)
out.putVarInt32(116)
if (self.has_proto2_file_descriptor_):
out.putVarInt32(186)
out.putPrefixedString(self.proto2_file_descriptor_)
if (self.has_proto2_file_main_):
out.putVarInt32(194)
out.putPrefixedString(self.proto2_file_main_)
if (self.has_proto2_name_):
out.putVarInt32(202)
out.putPrefixedString(self.proto2_name_)
if (self.has_proto_name_):
out.putVarInt32(218)
out.putPrefixedString(self.proto_name_)
if (self.has_proto2_extension_info_):
out.putVarInt32(234)
out.putPrefixedString(self.proto2_extension_info_)
if (self.has_proto2_file_scope_info_):
out.putVarInt32(242)
out.putPrefixedString(self.proto2_file_scope_info_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_name(d.getPrefixedString())
continue
if tt == 19:
self.add_tag().TryMerge(d)
continue
if tt == 90:
self.set_filename(d.getPrefixedString())
continue
if tt == 115:
self.add_enumtype().TryMerge(d)
continue
if tt == 186:
self.set_proto2_file_descriptor(d.getPrefixedString())
continue
if tt == 194:
self.set_proto2_file_main(d.getPrefixedString())
continue
if tt == 202:
self.set_proto2_name(d.getPrefixedString())
continue
if tt == 218:
self.set_proto_name(d.getPrefixedString())
continue
if tt == 234:
self.set_proto2_extension_info(d.getPrefixedString())
continue
if tt == 242:
self.set_proto2_file_scope_info(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_proto_name_: res+=prefix+("proto_name: %s\n" % self.DebugFormatString(self.proto_name_))
cnt=0
for e in self.tag_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Tag%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
cnt=0
for e in self.enumtype_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("EnumType%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_proto2_file_descriptor_: res+=prefix+("proto2_file_descriptor: %s\n" % self.DebugFormatString(self.proto2_file_descriptor_))
if self.has_proto2_file_main_: res+=prefix+("proto2_file_main: %s\n" % self.DebugFormatString(self.proto2_file_main_))
if self.has_proto2_name_: res+=prefix+("proto2_name: %s\n" % self.DebugFormatString(self.proto2_name_))
if self.has_proto2_extension_info_: res+=prefix+("proto2_extension_info: %s\n" % self.DebugFormatString(self.proto2_extension_info_))
if self.has_proto2_file_scope_info_: res+=prefix+("proto2_file_scope_info: %s\n" % self.DebugFormatString(self.proto2_file_scope_info_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 11
kname = 1
kproto_name = 27
kTagGroup = 2
kTagname = 3
kTagnumber = 4
kTagwire_type = 5
kTagdeclared_type = 6
kTaglabel = 7
kTagdefault_value = 8
kTagforeign = 9
kTagforeign_proto_name = 28
kTagflags = 10
kTagparent = 12
kTagenum_id = 13
kTagdeprecated = 26
kTagOptionGroup = 20
kTagOptionname = 21
kTagOptionvalue = 22
kEnumTypeGroup = 14
kEnumTypename = 15
kEnumTypeparent = 16
kEnumTypeTagGroup = 17
kEnumTypeTagname = 18
kEnumTypeTagvalue = 19
kEnumTypeproto2_name = 31
kEnumTypeallow_alias = 33
kproto2_file_descriptor = 23
kproto2_file_main = 24
kproto2_name = 25
kproto2_extension_info = 29
kproto2_file_scope_info = 30
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "name",
2: "Tag",
3: "name",
4: "number",
5: "wire_type",
6: "declared_type",
7: "label",
8: "default_value",
9: "foreign",
10: "flags",
11: "filename",
12: "parent",
13: "enum_id",
14: "EnumType",
15: "name",
16: "parent",
17: "Tag",
18: "name",
19: "value",
20: "Option",
21: "name",
22: "value",
23: "proto2_file_descriptor",
24: "proto2_file_main",
25: "proto2_name",
26: "deprecated",
27: "proto_name",
28: "foreign_proto_name",
29: "proto2_extension_info",
30: "proto2_file_scope_info",
31: "proto2_name",
33: "allow_alias",
}, 33)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STARTGROUP,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.NUMERIC,
14: ProtocolBuffer.Encoder.STARTGROUP,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STARTGROUP,
18: ProtocolBuffer.Encoder.STRING,
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.STARTGROUP,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.STRING,
25: ProtocolBuffer.Encoder.STRING,
26: ProtocolBuffer.Encoder.NUMERIC,
27: ProtocolBuffer.Encoder.STRING,
28: ProtocolBuffer.Encoder.STRING,
29: ProtocolBuffer.Encoder.STRING,
30: ProtocolBuffer.Encoder.STRING,
31: ProtocolBuffer.Encoder.STRING,
33: ProtocolBuffer.Encoder.NUMERIC,
}, 33, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'ProtocolDescriptor'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WhxuZXQvcHJvdG8vcHJvdG9jb2x0eXBlLnByb3RvChJQcm90b2NvbERlc2NyaXB0b3ITGghmaWxlbmFtZSALKAIwCTgCFBMaBG5hbWUgASgCMAk4AhQTGgpwcm90b19uYW1lIBsoAjAJOAEUExoDVGFnIAIoAzAKOAMUExoIVGFnLm5hbWUgAygCMAk4AmADFBMaClRhZy5udW1iZXIgBCgAMAU4AmADFBMaDVRhZy53aXJlX3R5cGUgBSgAMAU4AmADFBMaEVRhZy5kZWNsYXJlZF90eXBlIAYoADAFOAJgAxQTGglUYWcubGFiZWwgBygAMAU4AmADFBMaEVRhZy5kZWZhdWx0X3ZhbHVlIAgoAjAJOAFgAxQTGgtUYWcuZm9yZWlnbiAJKAIwCTgBYAMUExoWVGFnLmZvcmVpZ25fcHJvdG9fbmFtZSAcKAIwCTgBYAMUExoJVGFnLmZsYWdzIAooADAFOAFgAxQTGgpUYWcucGFyZW50IAwoADAFOAFCAi0xYAOjAaoBB2RlZmF1bHSyAQItMaQBFBMaC1RhZy5lbnVtX2lkIA0oADAFOAFCAi0xYAOjAaoBB2RlZmF1bHSyAQItMaQBFBMaDlRhZy5kZXByZWNhdGVkIBooADAIOAFCBWZhbHNlYAOjAaoBB2RlZmF1bHSyAQVmYWxzZaQBFBMaClRhZy5PcHRpb24gFCgDMAo4A2ADFBMaD1RhZy5PcHRpb24ubmFtZSAVKAIwCTgCYBAUExoQVGFnLk9wdGlvbi52YWx1ZSAWKAIwCTgCYBAUExoIRW51bVR5cGUgDigDMAo4AxQTGg1FbnVtVHlwZS5uYW1lIA8oAjAJOAJgExQTGg9FbnVtVHlwZS5wYXJlbnQgECgAMAU4AWATFBMaDEVudW1UeXBlLlRhZyARKAMwCjgDYBMUExoRRW51bVR5cGUuVGFnLm5hbWUgEigCMAk4AmAWFBMaEkVudW1UeXBlLlRhZy52YWx1ZSATKAAwBTgCYBYUExoURW51bVR5cGUucHJvdG8yX25hbWUgHygCMAk4AWATFBMaFEVudW1UeXBlLmFsbG93X2FsaWFzICEoADAIOAFgExQTGhZwcm90bzJfZmlsZV9kZXNjcmlwdG9yIBcoAjAJOAEUExoScHJvdG8yX2ZpbGVfbWFzdGVyIBgoAjAJOAEUExoLcHJvdG8yX25hbWUgGSgCMAk4ARQTGhVwcm90bzJfZXh0ZW5zaW9uX2luZm8gHSgCMAk4ARQTGhZwcm90bzJfZmlsZV9zY29wZV9pbmZvIB4oAjAJOAEUc3oIV2lyZVR5cGWLAZIBEFdJUkVUWVBFX05VTUVSSUOYAQCMAYsBkgEPV0lSRVRZUEVfRE9VQkxFmAEBjAGLAZIBD1dJUkVUWVBFX1NUUklOR5gBAowBiwGSARNXSVJFVFlQRV9TVEFSVEdST1VQmAEDjAGLAZIBEVdJUkVUWVBFX0VOREdST1VQmAEEjAGLAZIBDldJUkVUWVBFX0ZMT0FUmAEFjAF0c3oFTGFiZWyLAZIBDkxBQkVMX09QVElPTkFMmAEBjAGLAZIBDkxBQkVMX1JFUVVJUkVEmAECjAGLAZIBDkxBQkVMX1JFUEVBVEVEmAEDjAF0c3oMRGVjbGFyZWRUeXBliwGSAQtUWVBFX0RPVUJMRZgBAYwBiwGSAQpUWVBFX0ZMT0FUmAECjAGLAZIBClRZUEVfSU5UNjSYAQOMAYsBkgELVFlQRV9VSU5UNjSYAQSMAYsBkgEKVFlQRV9JTlQzMpgBBYwBiwGSAQxUWVBFX0ZJWEVENjSYAQaMAYsBkgEMVFlQRV9GSVhFRDMymAEHjAGLAZIBCVRZUEVfQk9PTJgBCIwBiwGSAQtUWVBFX1NUUklOR5gBCYwBiwGSAQpUWVBFX0dST1VQmAEKjAGLAZIBDFRZUEVfRk9SRUlHTpgBC4wBdLoBxhIKHG5ldC9wcm90by9wcm90b2NvbHR5cGUucHJvdG8iwgkKElByb3RvY29sRGVzY3JpcHRvchIQCghmaWxlbmFtZRgLIAIoCRIMCgRuYW1lGAEgAigJEhIKCnByb3RvX25hbWUYGyABKAkSJAoDdGFnGAIgAygKMhcuUHJvdG9jb2xEZXNjcmlwdG9yLlRhZxIuCghlbnVtdHlwZRgOIAMoCjIcLlByb3RvY29sRGVzY3JpcHRvci5FbnVtVHlwZRIeChZwcm90bzJfZmlsZV9kZXNjcmlwdG9yGBcgASgMEhoKEnByb3RvMl9maWxlX21hc3RlchgYIAEoCRITCgtwcm90bzJfbmFtZRgZIAEoCRIdChVwcm90bzJfZXh0ZW5zaW9uX2luZm8YHSABKAwSHgoWcHJvdG8yX2ZpbGVfc2NvcGVfaW5mbxgeIAEoDBrKAgoDVGFnEgwKBG5hbWUYAyACKAkSDgoGbnVtYmVyGAQgAigFEhEKCXdpcmVfdHlwZRgFIAIoBRIVCg1kZWNsYXJlZF90eXBlGAYgAigFEg0KBWxhYmVsGAcgAigFEhUKDWRlZmF1bHRfdmFsdWUYCCABKAkSDwoHZm9yZWlnbhgJIAEoCRIaChJmb3JlaWduX3Byb3RvX25hbWUYHCABKAkSDQoFZmxhZ3MYCiABKAUSEgoGcGFyZW50GAwgASgFOgItMRITCgdlbnVtX2lkGA0gASgFOgItMRIZCgpkZXByZWNhdGVkGBogASgIOgVmYWxzZRIuCgZvcHRpb24YFCADKAoyHi5Qcm90b2NvbERlc2NyaXB0b3IuVGFnLk9wdGlvbholCgZPcHRpb24SDAoEbmFtZRgVIAIoCRINCgV2YWx1ZRgWIAIoCRqlAQoIRW51bVR5cGUSDAoEbmFtZRgPIAIoCRIOCgZwYXJlbnQYECABKAUSLQoDdGFnGBEgAygKMiAuUHJvdG9jb2xEZXNjcmlwdG9yLkVudW1UeXBlLlRhZxITCgtwcm90bzJfbmFtZRgfIAEoCRITCgthbGxvd19hbGlhcxghIAEoCBoiCgNUYWcSDAoEbmFtZRgSIAIoCRINCgV2YWx1ZRgTIAIoBSKOAQoIV2lyZVR5cGUSFAoQV0lSRVRZUEVfTlVNRVJJQxAAEhMKD1dJUkVUWVBFX0RPVUJMRRABEhMKD1dJUkVUWVBFX1NUUklORxACEhcKE1dJUkVUWVBFX1NUQVJUR1JPVVAQAxIVChFXSVJFVFlQRV9FTkRHUk9VUBAEEhIKDldJUkVUWVBFX0ZMT0FUEAUiQwoFTGFiZWwSEgoOTEFCRUxfT1BUSU9OQUwQARISCg5MQUJFTF9SRVFVSVJFRBACEhIKDkxBQkVMX1JFUEVBVEVEEAMixgEKDERlY2xhcmVkVHlwZRIPCgtUWVBFX0RPVUJMRRABEg4KClRZUEVfRkxPQVQQAhIOCgpUWVBFX0lOVDY0EAMSDwoLVFlQRV9VSU5UNjQQBBIOCgpUWVBFX0lOVDMyEAUSEAoMVFlQRV9GSVhFRDY0EAYSEAoMVFlQRV9GSVhFRDMyEAcSDQoJVFlQRV9CT09MEAgSDwoLVFlQRV9TVFJJTkcQCRIOCgpUWVBFX0dST1VQEAoSEAoMVFlQRV9GT1JFSUdOEAsiSQoWUHJvdG9jb2xGaWxlRGVzY3JpcHRvchIQCghmaWxlbmFtZRgBIAIoCRIMCgR0eXBlGAIgAygJEg8KB3NlcnZpY2UYAyADKAkitgcKFVJQQ19TZXJ2aWNlRGVzY3JpcHRvchIQCghmaWxlbmFtZRgBIAIoCRIMCgRuYW1lGAIgAigJEhEKCWZ1bGxfbmFtZRgSIAEoCRIfChdmYWlsdXJlX2RldGVjdGlvbl9kZWxheRgQIAEoARIWCg5tdWx0aWNhc3Rfc3R1YhgUIAEoCBItCgZtZXRob2QYAyADKAoyHS5SUENfU2VydmljZURlc2NyaXB0b3IuTWV0aG9kEi0KBnN0cmVhbRgVIAMoCjIdLlJQQ19TZXJ2aWNlRGVzY3JpcHRvci5TdHJlYW0a8wIKBk1ldGhvZBIMCgRuYW1lGAQgAigJEhUKDWFyZ3VtZW50X3R5cGUYBSACKAkSEwoLcmVzdWx0X3R5cGUYBiACKAkSEwoLc3RyZWFtX3R5cGUYDiABKAkSEAoIcHJvdG9jb2wYByABKAkSEAoIZGVhZGxpbmUYCCABKAESHQoVZHVwbGljYXRlX3N1cHByZXNzaW9uGAkgASgIEhEKCWZhaWxfZmFzdBgKIAEoCBIgChhlbmRfdXNlcl9jcmVkc19yZXF1ZXN0ZWQYIiABKAgSFgoOY2xpZW50X2xvZ2dpbmcYCyABKAUSFgoOc2VydmVyX2xvZ2dpbmcYDCABKAUSFgoOc2VjdXJpdHlfbGV2ZWwYDSABKAkSFgoOc2VjdXJpdHlfbGFiZWwYEyABKAkSFwoPcmVzcG9uc2VfZm9ybWF0GA8gASgJEhYKDnJlcXVlc3RfZm9ybWF0GBEgASgJEhEKCWxvZ19sZXZlbBgkIAEoCRrcAgoGU3RyZWFtEgwKBG5hbWUYFiACKAkSGwoTY2xpZW50X21lc3NhZ2VfdHlwZRgXIAIoCRIbChNzZXJ2ZXJfbWVzc2FnZV90eXBlGBggAigJEh0KFWNsaWVudF9pbml0aWFsX3Rva2VucxgZIAEoAxIdChVzZXJ2ZXJfaW5pdGlhbF90b2tlbnMYGiABKAMSEgoKdG9rZW5fdW5pdBgbIAEoCRIWCg5zZWN1cml0eV9sZXZlbBgcIAEoCRIWCg5zZWN1cml0eV9sYWJlbBgdIAEoCRIWCg5jbGllbnRfbG9nZ2luZxgeIAEoBRIWCg5zZXJ2ZXJfbG9nZ2luZxgfIAEoBRIQCghkZWFkbGluZRggIAEoARIRCglmYWlsX2Zhc3QYISABKAgSIAoYZW5kX3VzZXJfY3JlZHNfcmVxdWVzdGVkGCMgASgIEhEKCWxvZ19sZXZlbBglIAEoCUJdChxjb20uZ29vZ2xlLmlvLnByb3RvY29sLnByb3RvEAEgASgBQhJQcm90b2NvbFR5cGVQcm90b3NQAZoBHWNvbS5nb29nbGUuaW8ucHJvdG9jb2wucHJvdG8y0AEB"))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
class ProtocolFileDescriptor(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
def __init__(self, contents=None):
self.type_ = []
self.service_ = []
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def type_size(self): return len(self.type_)
def type_list(self): return self.type_
def type(self, i):
return self.type_[i]
def set_type(self, i, x):
self.type_[i] = x
def add_type(self, x):
self.type_.append(x)
def clear_type(self):
self.type_ = []
def service_size(self): return len(self.service_)
def service_list(self): return self.service_
def service(self, i):
return self.service_[i]
def set_service(self, i, x):
self.service_[i] = x
def add_service(self, x):
self.service_.append(x)
def clear_service(self):
self.service_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
for i in xrange(x.type_size()): self.add_type(x.type(i))
for i in xrange(x.service_size()): self.add_service(x.service(i))
if _net_proto___parse__python is not None:
def _CMergeFromString(self, s):
_net_proto___parse__python.MergeFromString(self, 'ProtocolFileDescriptor', s)
if _net_proto___parse__python is not None:
def _CEncode(self):
return _net_proto___parse__python.Encode(self, 'ProtocolFileDescriptor')
if _net_proto___parse__python is not None:
def _CEncodePartial(self):
return _net_proto___parse__python.EncodePartial(self, 'ProtocolFileDescriptor')
if _net_proto___parse__python is not None:
def _CToASCII(self, output_format):
return _net_proto___parse__python.ToASCII(self, 'ProtocolFileDescriptor', output_format)
if _net_proto___parse__python is not None:
def ParseASCII(self, s):
_net_proto___parse__python.ParseASCII(self, 'ProtocolFileDescriptor', s)
if _net_proto___parse__python is not None:
def ParseASCIIIgnoreUnknown(self, s):
_net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'ProtocolFileDescriptor', s)
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if len(self.type_) != len(x.type_): return 0
for e1, e2 in zip(self.type_, x.type_):
if e1 != e2: return 0
if len(self.service_) != len(x.service_): return 0
for e1, e2 in zip(self.service_, x.service_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += 1 * len(self.type_)
for i in xrange(len(self.type_)): n += self.lengthString(len(self.type_[i]))
n += 1 * len(self.service_)
for i in xrange(len(self.service_)): n += self.lengthString(len(self.service_[i]))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
n += 1 * len(self.type_)
for i in xrange(len(self.type_)): n += self.lengthString(len(self.type_[i]))
n += 1 * len(self.service_)
for i in xrange(len(self.service_)): n += self.lengthString(len(self.service_[i]))
return n
def Clear(self):
self.clear_filename()
self.clear_type()
self.clear_service()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
for i in xrange(len(self.type_)):
out.putVarInt32(18)
out.putPrefixedString(self.type_[i])
for i in xrange(len(self.service_)):
out.putVarInt32(26)
out.putPrefixedString(self.service_[i])
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
for i in xrange(len(self.type_)):
out.putVarInt32(18)
out.putPrefixedString(self.type_[i])
for i in xrange(len(self.service_)):
out.putVarInt32(26)
out.putPrefixedString(self.service_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.add_type(d.getPrefixedString())
continue
if tt == 26:
self.add_service(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
cnt=0
for e in self.type_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("type%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.service_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("service%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
ktype = 2
kservice = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "type",
3: "service",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'ProtocolFileDescriptor'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WhxuZXQvcHJvdG8vcHJvdG9jb2x0eXBlLnByb3RvChZQcm90b2NvbEZpbGVEZXNjcmlwdG9yExoIZmlsZW5hbWUgASgCMAk4AhQTGgR0eXBlIAIoAjAJOAMUExoHc2VydmljZSADKAIwCTgDFMIBElByb3RvY29sRGVzY3JpcHRvcg=="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
class RPC_ServiceDescriptor_Method(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_argument_type_ = 0
argument_type_ = ""
has_result_type_ = 0
result_type_ = ""
has_stream_type_ = 0
stream_type_ = ""
has_protocol_ = 0
protocol_ = ""
has_deadline_ = 0
deadline_ = 0.0
has_duplicate_suppression_ = 0
duplicate_suppression_ = 0
has_fail_fast_ = 0
fail_fast_ = 0
has_end_user_creds_requested_ = 0
end_user_creds_requested_ = 0
has_client_logging_ = 0
client_logging_ = 0
has_server_logging_ = 0
server_logging_ = 0
has_security_level_ = 0
security_level_ = ""
has_security_label_ = 0
security_label_ = ""
has_response_format_ = 0
response_format_ = ""
has_request_format_ = 0
request_format_ = ""
has_log_level_ = 0
log_level_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def argument_type(self): return self.argument_type_
def set_argument_type(self, x):
self.has_argument_type_ = 1
self.argument_type_ = x
def clear_argument_type(self):
if self.has_argument_type_:
self.has_argument_type_ = 0
self.argument_type_ = ""
def has_argument_type(self): return self.has_argument_type_
def result_type(self): return self.result_type_
def set_result_type(self, x):
self.has_result_type_ = 1
self.result_type_ = x
def clear_result_type(self):
if self.has_result_type_:
self.has_result_type_ = 0
self.result_type_ = ""
def has_result_type(self): return self.has_result_type_
def stream_type(self): return self.stream_type_
def set_stream_type(self, x):
self.has_stream_type_ = 1
self.stream_type_ = x
def clear_stream_type(self):
if self.has_stream_type_:
self.has_stream_type_ = 0
self.stream_type_ = ""
def has_stream_type(self): return self.has_stream_type_
def protocol(self): return self.protocol_
def set_protocol(self, x):
self.has_protocol_ = 1
self.protocol_ = x
def clear_protocol(self):
if self.has_protocol_:
self.has_protocol_ = 0
self.protocol_ = ""
def has_protocol(self): return self.has_protocol_
def deadline(self): return self.deadline_
def set_deadline(self, x):
self.has_deadline_ = 1
self.deadline_ = x
def clear_deadline(self):
if self.has_deadline_:
self.has_deadline_ = 0
self.deadline_ = 0.0
def has_deadline(self): return self.has_deadline_
def duplicate_suppression(self): return self.duplicate_suppression_
def set_duplicate_suppression(self, x):
self.has_duplicate_suppression_ = 1
self.duplicate_suppression_ = x
def clear_duplicate_suppression(self):
if self.has_duplicate_suppression_:
self.has_duplicate_suppression_ = 0
self.duplicate_suppression_ = 0
def has_duplicate_suppression(self): return self.has_duplicate_suppression_
def fail_fast(self): return self.fail_fast_
def set_fail_fast(self, x):
self.has_fail_fast_ = 1
self.fail_fast_ = x
def clear_fail_fast(self):
if self.has_fail_fast_:
self.has_fail_fast_ = 0
self.fail_fast_ = 0
def has_fail_fast(self): return self.has_fail_fast_
def end_user_creds_requested(self): return self.end_user_creds_requested_
def set_end_user_creds_requested(self, x):
self.has_end_user_creds_requested_ = 1
self.end_user_creds_requested_ = x
def clear_end_user_creds_requested(self):
if self.has_end_user_creds_requested_:
self.has_end_user_creds_requested_ = 0
self.end_user_creds_requested_ = 0
def has_end_user_creds_requested(self): return self.has_end_user_creds_requested_
def client_logging(self): return self.client_logging_
def set_client_logging(self, x):
self.has_client_logging_ = 1
self.client_logging_ = x
def clear_client_logging(self):
if self.has_client_logging_:
self.has_client_logging_ = 0
self.client_logging_ = 0
def has_client_logging(self): return self.has_client_logging_
def server_logging(self): return self.server_logging_
def set_server_logging(self, x):
self.has_server_logging_ = 1
self.server_logging_ = x
def clear_server_logging(self):
if self.has_server_logging_:
self.has_server_logging_ = 0
self.server_logging_ = 0
def has_server_logging(self): return self.has_server_logging_
def security_level(self): return self.security_level_
def set_security_level(self, x):
self.has_security_level_ = 1
self.security_level_ = x
def clear_security_level(self):
if self.has_security_level_:
self.has_security_level_ = 0
self.security_level_ = ""
def has_security_level(self): return self.has_security_level_
def security_label(self): return self.security_label_
def set_security_label(self, x):
self.has_security_label_ = 1
self.security_label_ = x
def clear_security_label(self):
if self.has_security_label_:
self.has_security_label_ = 0
self.security_label_ = ""
def has_security_label(self): return self.has_security_label_
def response_format(self): return self.response_format_
def set_response_format(self, x):
self.has_response_format_ = 1
self.response_format_ = x
def clear_response_format(self):
if self.has_response_format_:
self.has_response_format_ = 0
self.response_format_ = ""
def has_response_format(self): return self.has_response_format_
def request_format(self): return self.request_format_
def set_request_format(self, x):
self.has_request_format_ = 1
self.request_format_ = x
def clear_request_format(self):
if self.has_request_format_:
self.has_request_format_ = 0
self.request_format_ = ""
def has_request_format(self): return self.has_request_format_
def log_level(self): return self.log_level_
def set_log_level(self, x):
self.has_log_level_ = 1
self.log_level_ = x
def clear_log_level(self):
if self.has_log_level_:
self.has_log_level_ = 0
self.log_level_ = ""
def has_log_level(self): return self.has_log_level_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_argument_type()): self.set_argument_type(x.argument_type())
if (x.has_result_type()): self.set_result_type(x.result_type())
if (x.has_stream_type()): self.set_stream_type(x.stream_type())
if (x.has_protocol()): self.set_protocol(x.protocol())
if (x.has_deadline()): self.set_deadline(x.deadline())
if (x.has_duplicate_suppression()): self.set_duplicate_suppression(x.duplicate_suppression())
if (x.has_fail_fast()): self.set_fail_fast(x.fail_fast())
if (x.has_end_user_creds_requested()): self.set_end_user_creds_requested(x.end_user_creds_requested())
if (x.has_client_logging()): self.set_client_logging(x.client_logging())
if (x.has_server_logging()): self.set_server_logging(x.server_logging())
if (x.has_security_level()): self.set_security_level(x.security_level())
if (x.has_security_label()): self.set_security_label(x.security_label())
if (x.has_response_format()): self.set_response_format(x.response_format())
if (x.has_request_format()): self.set_request_format(x.request_format())
if (x.has_log_level()): self.set_log_level(x.log_level())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_argument_type_ != x.has_argument_type_: return 0
if self.has_argument_type_ and self.argument_type_ != x.argument_type_: return 0
if self.has_result_type_ != x.has_result_type_: return 0
if self.has_result_type_ and self.result_type_ != x.result_type_: return 0
if self.has_stream_type_ != x.has_stream_type_: return 0
if self.has_stream_type_ and self.stream_type_ != x.stream_type_: return 0
if self.has_protocol_ != x.has_protocol_: return 0
if self.has_protocol_ and self.protocol_ != x.protocol_: return 0
if self.has_deadline_ != x.has_deadline_: return 0
if self.has_deadline_ and self.deadline_ != x.deadline_: return 0
if self.has_duplicate_suppression_ != x.has_duplicate_suppression_: return 0
if self.has_duplicate_suppression_ and self.duplicate_suppression_ != x.duplicate_suppression_: return 0
if self.has_fail_fast_ != x.has_fail_fast_: return 0
if self.has_fail_fast_ and self.fail_fast_ != x.fail_fast_: return 0
if self.has_end_user_creds_requested_ != x.has_end_user_creds_requested_: return 0
if self.has_end_user_creds_requested_ and self.end_user_creds_requested_ != x.end_user_creds_requested_: return 0
if self.has_client_logging_ != x.has_client_logging_: return 0
if self.has_client_logging_ and self.client_logging_ != x.client_logging_: return 0
if self.has_server_logging_ != x.has_server_logging_: return 0
if self.has_server_logging_ and self.server_logging_ != x.server_logging_: return 0
if self.has_security_level_ != x.has_security_level_: return 0
if self.has_security_level_ and self.security_level_ != x.security_level_: return 0
if self.has_security_label_ != x.has_security_label_: return 0
if self.has_security_label_ and self.security_label_ != x.security_label_: return 0
if self.has_response_format_ != x.has_response_format_: return 0
if self.has_response_format_ and self.response_format_ != x.response_format_: return 0
if self.has_request_format_ != x.has_request_format_: return 0
if self.has_request_format_ and self.request_format_ != x.request_format_: return 0
if self.has_log_level_ != x.has_log_level_: return 0
if self.has_log_level_ and self.log_level_ != x.log_level_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_argument_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: argument_type not set.')
if (not self.has_result_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: result_type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += self.lengthString(len(self.argument_type_))
n += self.lengthString(len(self.result_type_))
if (self.has_stream_type_): n += 1 + self.lengthString(len(self.stream_type_))
if (self.has_protocol_): n += 1 + self.lengthString(len(self.protocol_))
if (self.has_deadline_): n += 9
if (self.has_duplicate_suppression_): n += 2
if (self.has_fail_fast_): n += 2
if (self.has_end_user_creds_requested_): n += 3
if (self.has_client_logging_): n += 1 + self.lengthVarInt64(self.client_logging_)
if (self.has_server_logging_): n += 1 + self.lengthVarInt64(self.server_logging_)
if (self.has_security_level_): n += 1 + self.lengthString(len(self.security_level_))
if (self.has_security_label_): n += 2 + self.lengthString(len(self.security_label_))
if (self.has_response_format_): n += 1 + self.lengthString(len(self.response_format_))
if (self.has_request_format_): n += 2 + self.lengthString(len(self.request_format_))
if (self.has_log_level_): n += 2 + self.lengthString(len(self.log_level_))
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_argument_type_):
n += 1
n += self.lengthString(len(self.argument_type_))
if (self.has_result_type_):
n += 1
n += self.lengthString(len(self.result_type_))
if (self.has_stream_type_): n += 1 + self.lengthString(len(self.stream_type_))
if (self.has_protocol_): n += 1 + self.lengthString(len(self.protocol_))
if (self.has_deadline_): n += 9
if (self.has_duplicate_suppression_): n += 2
if (self.has_fail_fast_): n += 2
if (self.has_end_user_creds_requested_): n += 3
if (self.has_client_logging_): n += 1 + self.lengthVarInt64(self.client_logging_)
if (self.has_server_logging_): n += 1 + self.lengthVarInt64(self.server_logging_)
if (self.has_security_level_): n += 1 + self.lengthString(len(self.security_level_))
if (self.has_security_label_): n += 2 + self.lengthString(len(self.security_label_))
if (self.has_response_format_): n += 1 + self.lengthString(len(self.response_format_))
if (self.has_request_format_): n += 2 + self.lengthString(len(self.request_format_))
if (self.has_log_level_): n += 2 + self.lengthString(len(self.log_level_))
return n
def Clear(self):
self.clear_name()
self.clear_argument_type()
self.clear_result_type()
self.clear_stream_type()
self.clear_protocol()
self.clear_deadline()
self.clear_duplicate_suppression()
self.clear_fail_fast()
self.clear_end_user_creds_requested()
self.clear_client_logging()
self.clear_server_logging()
self.clear_security_level()
self.clear_security_label()
self.clear_response_format()
self.clear_request_format()
self.clear_log_level()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
out.putVarInt32(42)
out.putPrefixedString(self.argument_type_)
out.putVarInt32(50)
out.putPrefixedString(self.result_type_)
if (self.has_protocol_):
out.putVarInt32(58)
out.putPrefixedString(self.protocol_)
if (self.has_deadline_):
out.putVarInt32(65)
out.putDouble(self.deadline_)
if (self.has_duplicate_suppression_):
out.putVarInt32(72)
out.putBoolean(self.duplicate_suppression_)
if (self.has_fail_fast_):
out.putVarInt32(80)
out.putBoolean(self.fail_fast_)
if (self.has_client_logging_):
out.putVarInt32(88)
out.putVarInt32(self.client_logging_)
if (self.has_server_logging_):
out.putVarInt32(96)
out.putVarInt32(self.server_logging_)
if (self.has_security_level_):
out.putVarInt32(106)
out.putPrefixedString(self.security_level_)
if (self.has_stream_type_):
out.putVarInt32(114)
out.putPrefixedString(self.stream_type_)
if (self.has_response_format_):
out.putVarInt32(122)
out.putPrefixedString(self.response_format_)
if (self.has_request_format_):
out.putVarInt32(138)
out.putPrefixedString(self.request_format_)
if (self.has_security_label_):
out.putVarInt32(154)
out.putPrefixedString(self.security_label_)
if (self.has_end_user_creds_requested_):
out.putVarInt32(272)
out.putBoolean(self.end_user_creds_requested_)
if (self.has_log_level_):
out.putVarInt32(290)
out.putPrefixedString(self.log_level_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
if (self.has_argument_type_):
out.putVarInt32(42)
out.putPrefixedString(self.argument_type_)
if (self.has_result_type_):
out.putVarInt32(50)
out.putPrefixedString(self.result_type_)
if (self.has_protocol_):
out.putVarInt32(58)
out.putPrefixedString(self.protocol_)
if (self.has_deadline_):
out.putVarInt32(65)
out.putDouble(self.deadline_)
if (self.has_duplicate_suppression_):
out.putVarInt32(72)
out.putBoolean(self.duplicate_suppression_)
if (self.has_fail_fast_):
out.putVarInt32(80)
out.putBoolean(self.fail_fast_)
if (self.has_client_logging_):
out.putVarInt32(88)
out.putVarInt32(self.client_logging_)
if (self.has_server_logging_):
out.putVarInt32(96)
out.putVarInt32(self.server_logging_)
if (self.has_security_level_):
out.putVarInt32(106)
out.putPrefixedString(self.security_level_)
if (self.has_stream_type_):
out.putVarInt32(114)
out.putPrefixedString(self.stream_type_)
if (self.has_response_format_):
out.putVarInt32(122)
out.putPrefixedString(self.response_format_)
if (self.has_request_format_):
out.putVarInt32(138)
out.putPrefixedString(self.request_format_)
if (self.has_security_label_):
out.putVarInt32(154)
out.putPrefixedString(self.security_label_)
if (self.has_end_user_creds_requested_):
out.putVarInt32(272)
out.putBoolean(self.end_user_creds_requested_)
if (self.has_log_level_):
out.putVarInt32(290)
out.putPrefixedString(self.log_level_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
self.set_name(d.getPrefixedString())
continue
if tt == 42:
self.set_argument_type(d.getPrefixedString())
continue
if tt == 50:
self.set_result_type(d.getPrefixedString())
continue
if tt == 58:
self.set_protocol(d.getPrefixedString())
continue
if tt == 65:
self.set_deadline(d.getDouble())
continue
if tt == 72:
self.set_duplicate_suppression(d.getBoolean())
continue
if tt == 80:
self.set_fail_fast(d.getBoolean())
continue
if tt == 88:
self.set_client_logging(d.getVarInt32())
continue
if tt == 96:
self.set_server_logging(d.getVarInt32())
continue
if tt == 106:
self.set_security_level(d.getPrefixedString())
continue
if tt == 114:
self.set_stream_type(d.getPrefixedString())
continue
if tt == 122:
self.set_response_format(d.getPrefixedString())
continue
if tt == 138:
self.set_request_format(d.getPrefixedString())
continue
if tt == 154:
self.set_security_label(d.getPrefixedString())
continue
if tt == 272:
self.set_end_user_creds_requested(d.getBoolean())
continue
if tt == 290:
self.set_log_level(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_argument_type_: res+=prefix+("argument_type: %s\n" % self.DebugFormatString(self.argument_type_))
if self.has_result_type_: res+=prefix+("result_type: %s\n" % self.DebugFormatString(self.result_type_))
if self.has_stream_type_: res+=prefix+("stream_type: %s\n" % self.DebugFormatString(self.stream_type_))
if self.has_protocol_: res+=prefix+("protocol: %s\n" % self.DebugFormatString(self.protocol_))
if self.has_deadline_: res+=prefix+("deadline: %s\n" % self.DebugFormat(self.deadline_))
if self.has_duplicate_suppression_: res+=prefix+("duplicate_suppression: %s\n" % self.DebugFormatBool(self.duplicate_suppression_))
if self.has_fail_fast_: res+=prefix+("fail_fast: %s\n" % self.DebugFormatBool(self.fail_fast_))
if self.has_end_user_creds_requested_: res+=prefix+("end_user_creds_requested: %s\n" % self.DebugFormatBool(self.end_user_creds_requested_))
if self.has_client_logging_: res+=prefix+("client_logging: %s\n" % self.DebugFormatInt32(self.client_logging_))
if self.has_server_logging_: res+=prefix+("server_logging: %s\n" % self.DebugFormatInt32(self.server_logging_))
if self.has_security_level_: res+=prefix+("security_level: %s\n" % self.DebugFormatString(self.security_level_))
if self.has_security_label_: res+=prefix+("security_label: %s\n" % self.DebugFormatString(self.security_label_))
if self.has_response_format_: res+=prefix+("response_format: %s\n" % self.DebugFormatString(self.response_format_))
if self.has_request_format_: res+=prefix+("request_format: %s\n" % self.DebugFormatString(self.request_format_))
if self.has_log_level_: res+=prefix+("log_level: %s\n" % self.DebugFormatString(self.log_level_))
return res
class RPC_ServiceDescriptor_Stream(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_client_message_type_ = 0
client_message_type_ = ""
has_server_message_type_ = 0
server_message_type_ = ""
has_client_initial_tokens_ = 0
client_initial_tokens_ = 0
has_server_initial_tokens_ = 0
server_initial_tokens_ = 0
has_token_unit_ = 0
token_unit_ = ""
has_security_level_ = 0
security_level_ = ""
has_security_label_ = 0
security_label_ = ""
has_client_logging_ = 0
client_logging_ = 0
has_server_logging_ = 0
server_logging_ = 0
has_deadline_ = 0
deadline_ = 0.0
has_fail_fast_ = 0
fail_fast_ = 0
has_end_user_creds_requested_ = 0
end_user_creds_requested_ = 0
has_log_level_ = 0
log_level_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def client_message_type(self): return self.client_message_type_
def set_client_message_type(self, x):
self.has_client_message_type_ = 1
self.client_message_type_ = x
def clear_client_message_type(self):
if self.has_client_message_type_:
self.has_client_message_type_ = 0
self.client_message_type_ = ""
def has_client_message_type(self): return self.has_client_message_type_
def server_message_type(self): return self.server_message_type_
def set_server_message_type(self, x):
self.has_server_message_type_ = 1
self.server_message_type_ = x
def clear_server_message_type(self):
if self.has_server_message_type_:
self.has_server_message_type_ = 0
self.server_message_type_ = ""
def has_server_message_type(self): return self.has_server_message_type_
def client_initial_tokens(self): return self.client_initial_tokens_
def set_client_initial_tokens(self, x):
self.has_client_initial_tokens_ = 1
self.client_initial_tokens_ = x
def clear_client_initial_tokens(self):
if self.has_client_initial_tokens_:
self.has_client_initial_tokens_ = 0
self.client_initial_tokens_ = 0
def has_client_initial_tokens(self): return self.has_client_initial_tokens_
def server_initial_tokens(self): return self.server_initial_tokens_
def set_server_initial_tokens(self, x):
self.has_server_initial_tokens_ = 1
self.server_initial_tokens_ = x
def clear_server_initial_tokens(self):
if self.has_server_initial_tokens_:
self.has_server_initial_tokens_ = 0
self.server_initial_tokens_ = 0
def has_server_initial_tokens(self): return self.has_server_initial_tokens_
def token_unit(self): return self.token_unit_
def set_token_unit(self, x):
self.has_token_unit_ = 1
self.token_unit_ = x
def clear_token_unit(self):
if self.has_token_unit_:
self.has_token_unit_ = 0
self.token_unit_ = ""
def has_token_unit(self): return self.has_token_unit_
def security_level(self): return self.security_level_
def set_security_level(self, x):
self.has_security_level_ = 1
self.security_level_ = x
def clear_security_level(self):
if self.has_security_level_:
self.has_security_level_ = 0
self.security_level_ = ""
def has_security_level(self): return self.has_security_level_
def security_label(self): return self.security_label_
def set_security_label(self, x):
self.has_security_label_ = 1
self.security_label_ = x
def clear_security_label(self):
if self.has_security_label_:
self.has_security_label_ = 0
self.security_label_ = ""
def has_security_label(self): return self.has_security_label_
def client_logging(self): return self.client_logging_
def set_client_logging(self, x):
self.has_client_logging_ = 1
self.client_logging_ = x
def clear_client_logging(self):
if self.has_client_logging_:
self.has_client_logging_ = 0
self.client_logging_ = 0
def has_client_logging(self): return self.has_client_logging_
def server_logging(self): return self.server_logging_
def set_server_logging(self, x):
self.has_server_logging_ = 1
self.server_logging_ = x
def clear_server_logging(self):
if self.has_server_logging_:
self.has_server_logging_ = 0
self.server_logging_ = 0
def has_server_logging(self): return self.has_server_logging_
def deadline(self): return self.deadline_
def set_deadline(self, x):
self.has_deadline_ = 1
self.deadline_ = x
def clear_deadline(self):
if self.has_deadline_:
self.has_deadline_ = 0
self.deadline_ = 0.0
def has_deadline(self): return self.has_deadline_
def fail_fast(self): return self.fail_fast_
def set_fail_fast(self, x):
self.has_fail_fast_ = 1
self.fail_fast_ = x
def clear_fail_fast(self):
if self.has_fail_fast_:
self.has_fail_fast_ = 0
self.fail_fast_ = 0
def has_fail_fast(self): return self.has_fail_fast_
def end_user_creds_requested(self): return self.end_user_creds_requested_
def set_end_user_creds_requested(self, x):
self.has_end_user_creds_requested_ = 1
self.end_user_creds_requested_ = x
def clear_end_user_creds_requested(self):
if self.has_end_user_creds_requested_:
self.has_end_user_creds_requested_ = 0
self.end_user_creds_requested_ = 0
def has_end_user_creds_requested(self): return self.has_end_user_creds_requested_
def log_level(self): return self.log_level_
def set_log_level(self, x):
self.has_log_level_ = 1
self.log_level_ = x
def clear_log_level(self):
if self.has_log_level_:
self.has_log_level_ = 0
self.log_level_ = ""
def has_log_level(self): return self.has_log_level_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_client_message_type()): self.set_client_message_type(x.client_message_type())
if (x.has_server_message_type()): self.set_server_message_type(x.server_message_type())
if (x.has_client_initial_tokens()): self.set_client_initial_tokens(x.client_initial_tokens())
if (x.has_server_initial_tokens()): self.set_server_initial_tokens(x.server_initial_tokens())
if (x.has_token_unit()): self.set_token_unit(x.token_unit())
if (x.has_security_level()): self.set_security_level(x.security_level())
if (x.has_security_label()): self.set_security_label(x.security_label())
if (x.has_client_logging()): self.set_client_logging(x.client_logging())
if (x.has_server_logging()): self.set_server_logging(x.server_logging())
if (x.has_deadline()): self.set_deadline(x.deadline())
if (x.has_fail_fast()): self.set_fail_fast(x.fail_fast())
if (x.has_end_user_creds_requested()): self.set_end_user_creds_requested(x.end_user_creds_requested())
if (x.has_log_level()): self.set_log_level(x.log_level())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_client_message_type_ != x.has_client_message_type_: return 0
if self.has_client_message_type_ and self.client_message_type_ != x.client_message_type_: return 0
if self.has_server_message_type_ != x.has_server_message_type_: return 0
if self.has_server_message_type_ and self.server_message_type_ != x.server_message_type_: return 0
if self.has_client_initial_tokens_ != x.has_client_initial_tokens_: return 0
if self.has_client_initial_tokens_ and self.client_initial_tokens_ != x.client_initial_tokens_: return 0
if self.has_server_initial_tokens_ != x.has_server_initial_tokens_: return 0
if self.has_server_initial_tokens_ and self.server_initial_tokens_ != x.server_initial_tokens_: return 0
if self.has_token_unit_ != x.has_token_unit_: return 0
if self.has_token_unit_ and self.token_unit_ != x.token_unit_: return 0
if self.has_security_level_ != x.has_security_level_: return 0
if self.has_security_level_ and self.security_level_ != x.security_level_: return 0
if self.has_security_label_ != x.has_security_label_: return 0
if self.has_security_label_ and self.security_label_ != x.security_label_: return 0
if self.has_client_logging_ != x.has_client_logging_: return 0
if self.has_client_logging_ and self.client_logging_ != x.client_logging_: return 0
if self.has_server_logging_ != x.has_server_logging_: return 0
if self.has_server_logging_ and self.server_logging_ != x.server_logging_: return 0
if self.has_deadline_ != x.has_deadline_: return 0
if self.has_deadline_ and self.deadline_ != x.deadline_: return 0
if self.has_fail_fast_ != x.has_fail_fast_: return 0
if self.has_fail_fast_ and self.fail_fast_ != x.fail_fast_: return 0
if self.has_end_user_creds_requested_ != x.has_end_user_creds_requested_: return 0
if self.has_end_user_creds_requested_ and self.end_user_creds_requested_ != x.end_user_creds_requested_: return 0
if self.has_log_level_ != x.has_log_level_: return 0
if self.has_log_level_ and self.log_level_ != x.log_level_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_client_message_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: client_message_type not set.')
if (not self.has_server_message_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: server_message_type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += self.lengthString(len(self.client_message_type_))
n += self.lengthString(len(self.server_message_type_))
if (self.has_client_initial_tokens_): n += 2 + self.lengthVarInt64(self.client_initial_tokens_)
if (self.has_server_initial_tokens_): n += 2 + self.lengthVarInt64(self.server_initial_tokens_)
if (self.has_token_unit_): n += 2 + self.lengthString(len(self.token_unit_))
if (self.has_security_level_): n += 2 + self.lengthString(len(self.security_level_))
if (self.has_security_label_): n += 2 + self.lengthString(len(self.security_label_))
if (self.has_client_logging_): n += 2 + self.lengthVarInt64(self.client_logging_)
if (self.has_server_logging_): n += 2 + self.lengthVarInt64(self.server_logging_)
if (self.has_deadline_): n += 10
if (self.has_fail_fast_): n += 3
if (self.has_end_user_creds_requested_): n += 3
if (self.has_log_level_): n += 2 + self.lengthString(len(self.log_level_))
return n + 6
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 2
n += self.lengthString(len(self.name_))
if (self.has_client_message_type_):
n += 2
n += self.lengthString(len(self.client_message_type_))
if (self.has_server_message_type_):
n += 2
n += self.lengthString(len(self.server_message_type_))
if (self.has_client_initial_tokens_): n += 2 + self.lengthVarInt64(self.client_initial_tokens_)
if (self.has_server_initial_tokens_): n += 2 + self.lengthVarInt64(self.server_initial_tokens_)
if (self.has_token_unit_): n += 2 + self.lengthString(len(self.token_unit_))
if (self.has_security_level_): n += 2 + self.lengthString(len(self.security_level_))
if (self.has_security_label_): n += 2 + self.lengthString(len(self.security_label_))
if (self.has_client_logging_): n += 2 + self.lengthVarInt64(self.client_logging_)
if (self.has_server_logging_): n += 2 + self.lengthVarInt64(self.server_logging_)
if (self.has_deadline_): n += 10
if (self.has_fail_fast_): n += 3
if (self.has_end_user_creds_requested_): n += 3
if (self.has_log_level_): n += 2 + self.lengthString(len(self.log_level_))
return n
def Clear(self):
self.clear_name()
self.clear_client_message_type()
self.clear_server_message_type()
self.clear_client_initial_tokens()
self.clear_server_initial_tokens()
self.clear_token_unit()
self.clear_security_level()
self.clear_security_label()
self.clear_client_logging()
self.clear_server_logging()
self.clear_deadline()
self.clear_fail_fast()
self.clear_end_user_creds_requested()
self.clear_log_level()
def OutputUnchecked(self, out):
out.putVarInt32(178)
out.putPrefixedString(self.name_)
out.putVarInt32(186)
out.putPrefixedString(self.client_message_type_)
out.putVarInt32(194)
out.putPrefixedString(self.server_message_type_)
if (self.has_client_initial_tokens_):
out.putVarInt32(200)
out.putVarInt64(self.client_initial_tokens_)
if (self.has_server_initial_tokens_):
out.putVarInt32(208)
out.putVarInt64(self.server_initial_tokens_)
if (self.has_token_unit_):
out.putVarInt32(218)
out.putPrefixedString(self.token_unit_)
if (self.has_security_level_):
out.putVarInt32(226)
out.putPrefixedString(self.security_level_)
if (self.has_security_label_):
out.putVarInt32(234)
out.putPrefixedString(self.security_label_)
if (self.has_client_logging_):
out.putVarInt32(240)
out.putVarInt32(self.client_logging_)
if (self.has_server_logging_):
out.putVarInt32(248)
out.putVarInt32(self.server_logging_)
if (self.has_deadline_):
out.putVarInt32(257)
out.putDouble(self.deadline_)
if (self.has_fail_fast_):
out.putVarInt32(264)
out.putBoolean(self.fail_fast_)
if (self.has_end_user_creds_requested_):
out.putVarInt32(280)
out.putBoolean(self.end_user_creds_requested_)
if (self.has_log_level_):
out.putVarInt32(298)
out.putPrefixedString(self.log_level_)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(178)
out.putPrefixedString(self.name_)
if (self.has_client_message_type_):
out.putVarInt32(186)
out.putPrefixedString(self.client_message_type_)
if (self.has_server_message_type_):
out.putVarInt32(194)
out.putPrefixedString(self.server_message_type_)
if (self.has_client_initial_tokens_):
out.putVarInt32(200)
out.putVarInt64(self.client_initial_tokens_)
if (self.has_server_initial_tokens_):
out.putVarInt32(208)
out.putVarInt64(self.server_initial_tokens_)
if (self.has_token_unit_):
out.putVarInt32(218)
out.putPrefixedString(self.token_unit_)
if (self.has_security_level_):
out.putVarInt32(226)
out.putPrefixedString(self.security_level_)
if (self.has_security_label_):
out.putVarInt32(234)
out.putPrefixedString(self.security_label_)
if (self.has_client_logging_):
out.putVarInt32(240)
out.putVarInt32(self.client_logging_)
if (self.has_server_logging_):
out.putVarInt32(248)
out.putVarInt32(self.server_logging_)
if (self.has_deadline_):
out.putVarInt32(257)
out.putDouble(self.deadline_)
if (self.has_fail_fast_):
out.putVarInt32(264)
out.putBoolean(self.fail_fast_)
if (self.has_end_user_creds_requested_):
out.putVarInt32(280)
out.putBoolean(self.end_user_creds_requested_)
if (self.has_log_level_):
out.putVarInt32(298)
out.putPrefixedString(self.log_level_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 172: break
if tt == 178:
self.set_name(d.getPrefixedString())
continue
if tt == 186:
self.set_client_message_type(d.getPrefixedString())
continue
if tt == 194:
self.set_server_message_type(d.getPrefixedString())
continue
if tt == 200:
self.set_client_initial_tokens(d.getVarInt64())
continue
if tt == 208:
self.set_server_initial_tokens(d.getVarInt64())
continue
if tt == 218:
self.set_token_unit(d.getPrefixedString())
continue
if tt == 226:
self.set_security_level(d.getPrefixedString())
continue
if tt == 234:
self.set_security_label(d.getPrefixedString())
continue
if tt == 240:
self.set_client_logging(d.getVarInt32())
continue
if tt == 248:
self.set_server_logging(d.getVarInt32())
continue
if tt == 257:
self.set_deadline(d.getDouble())
continue
if tt == 264:
self.set_fail_fast(d.getBoolean())
continue
if tt == 280:
self.set_end_user_creds_requested(d.getBoolean())
continue
if tt == 298:
self.set_log_level(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_client_message_type_: res+=prefix+("client_message_type: %s\n" % self.DebugFormatString(self.client_message_type_))
if self.has_server_message_type_: res+=prefix+("server_message_type: %s\n" % self.DebugFormatString(self.server_message_type_))
if self.has_client_initial_tokens_: res+=prefix+("client_initial_tokens: %s\n" % self.DebugFormatInt64(self.client_initial_tokens_))
if self.has_server_initial_tokens_: res+=prefix+("server_initial_tokens: %s\n" % self.DebugFormatInt64(self.server_initial_tokens_))
if self.has_token_unit_: res+=prefix+("token_unit: %s\n" % self.DebugFormatString(self.token_unit_))
if self.has_security_level_: res+=prefix+("security_level: %s\n" % self.DebugFormatString(self.security_level_))
if self.has_security_label_: res+=prefix+("security_label: %s\n" % self.DebugFormatString(self.security_label_))
if self.has_client_logging_: res+=prefix+("client_logging: %s\n" % self.DebugFormatInt32(self.client_logging_))
if self.has_server_logging_: res+=prefix+("server_logging: %s\n" % self.DebugFormatInt32(self.server_logging_))
if self.has_deadline_: res+=prefix+("deadline: %s\n" % self.DebugFormat(self.deadline_))
if self.has_fail_fast_: res+=prefix+("fail_fast: %s\n" % self.DebugFormatBool(self.fail_fast_))
if self.has_end_user_creds_requested_: res+=prefix+("end_user_creds_requested: %s\n" % self.DebugFormatBool(self.end_user_creds_requested_))
if self.has_log_level_: res+=prefix+("log_level: %s\n" % self.DebugFormatString(self.log_level_))
return res
class RPC_ServiceDescriptor(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_name_ = 0
name_ = ""
has_full_name_ = 0
full_name_ = ""
has_failure_detection_delay_ = 0
failure_detection_delay_ = 0.0
has_multicast_stub_ = 0
multicast_stub_ = 0
def __init__(self, contents=None):
self.method_ = []
self.stream_ = []
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def full_name(self): return self.full_name_
def set_full_name(self, x):
self.has_full_name_ = 1
self.full_name_ = x
def clear_full_name(self):
if self.has_full_name_:
self.has_full_name_ = 0
self.full_name_ = ""
def has_full_name(self): return self.has_full_name_
def failure_detection_delay(self): return self.failure_detection_delay_
def set_failure_detection_delay(self, x):
self.has_failure_detection_delay_ = 1
self.failure_detection_delay_ = x
def clear_failure_detection_delay(self):
if self.has_failure_detection_delay_:
self.has_failure_detection_delay_ = 0
self.failure_detection_delay_ = 0.0
def has_failure_detection_delay(self): return self.has_failure_detection_delay_
def multicast_stub(self): return self.multicast_stub_
def set_multicast_stub(self, x):
self.has_multicast_stub_ = 1
self.multicast_stub_ = x
def clear_multicast_stub(self):
if self.has_multicast_stub_:
self.has_multicast_stub_ = 0
self.multicast_stub_ = 0
def has_multicast_stub(self): return self.has_multicast_stub_
def method_size(self): return len(self.method_)
def method_list(self): return self.method_
def method(self, i):
return self.method_[i]
def mutable_method(self, i):
return self.method_[i]
def add_method(self):
x = RPC_ServiceDescriptor_Method()
self.method_.append(x)
return x
def clear_method(self):
self.method_ = []
def stream_size(self): return len(self.stream_)
def stream_list(self): return self.stream_
def stream(self, i):
return self.stream_[i]
def mutable_stream(self, i):
return self.stream_[i]
def add_stream(self):
x = RPC_ServiceDescriptor_Stream()
self.stream_.append(x)
return x
def clear_stream(self):
self.stream_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_name()): self.set_name(x.name())
if (x.has_full_name()): self.set_full_name(x.full_name())
if (x.has_failure_detection_delay()): self.set_failure_detection_delay(x.failure_detection_delay())
if (x.has_multicast_stub()): self.set_multicast_stub(x.multicast_stub())
for i in xrange(x.method_size()): self.add_method().CopyFrom(x.method(i))
for i in xrange(x.stream_size()): self.add_stream().CopyFrom(x.stream(i))
if _net_proto___parse__python is not None:
def _CMergeFromString(self, s):
_net_proto___parse__python.MergeFromString(self, 'RPC_ServiceDescriptor', s)
if _net_proto___parse__python is not None:
def _CEncode(self):
return _net_proto___parse__python.Encode(self, 'RPC_ServiceDescriptor')
if _net_proto___parse__python is not None:
def _CEncodePartial(self):
return _net_proto___parse__python.EncodePartial(self, 'RPC_ServiceDescriptor')
if _net_proto___parse__python is not None:
def _CToASCII(self, output_format):
return _net_proto___parse__python.ToASCII(self, 'RPC_ServiceDescriptor', output_format)
if _net_proto___parse__python is not None:
def ParseASCII(self, s):
_net_proto___parse__python.ParseASCII(self, 'RPC_ServiceDescriptor', s)
if _net_proto___parse__python is not None:
def ParseASCIIIgnoreUnknown(self, s):
_net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'RPC_ServiceDescriptor', s)
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_full_name_ != x.has_full_name_: return 0
if self.has_full_name_ and self.full_name_ != x.full_name_: return 0
if self.has_failure_detection_delay_ != x.has_failure_detection_delay_: return 0
if self.has_failure_detection_delay_ and self.failure_detection_delay_ != x.failure_detection_delay_: return 0
if self.has_multicast_stub_ != x.has_multicast_stub_: return 0
if self.has_multicast_stub_ and self.multicast_stub_ != x.multicast_stub_: return 0
if len(self.method_) != len(x.method_): return 0
for e1, e2 in zip(self.method_, x.method_):
if e1 != e2: return 0
if len(self.stream_) != len(x.stream_): return 0
for e1, e2 in zip(self.stream_, x.stream_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
for p in self.method_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.stream_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthString(len(self.name_))
if (self.has_full_name_): n += 2 + self.lengthString(len(self.full_name_))
if (self.has_failure_detection_delay_): n += 10
if (self.has_multicast_stub_): n += 3
n += 2 * len(self.method_)
for i in xrange(len(self.method_)): n += self.method_[i].ByteSize()
n += 4 * len(self.stream_)
for i in xrange(len(self.stream_)): n += self.stream_[i].ByteSize()
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_full_name_): n += 2 + self.lengthString(len(self.full_name_))
if (self.has_failure_detection_delay_): n += 10
if (self.has_multicast_stub_): n += 3
n += 2 * len(self.method_)
for i in xrange(len(self.method_)): n += self.method_[i].ByteSizePartial()
n += 4 * len(self.stream_)
for i in xrange(len(self.stream_)): n += self.stream_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_filename()
self.clear_name()
self.clear_full_name()
self.clear_failure_detection_delay()
self.clear_multicast_stub()
self.clear_method()
self.clear_stream()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(18)
out.putPrefixedString(self.name_)
for i in xrange(len(self.method_)):
out.putVarInt32(27)
self.method_[i].OutputUnchecked(out)
out.putVarInt32(28)
if (self.has_failure_detection_delay_):
out.putVarInt32(129)
out.putDouble(self.failure_detection_delay_)
if (self.has_full_name_):
out.putVarInt32(146)
out.putPrefixedString(self.full_name_)
if (self.has_multicast_stub_):
out.putVarInt32(160)
out.putBoolean(self.multicast_stub_)
for i in xrange(len(self.stream_)):
out.putVarInt32(171)
self.stream_[i].OutputUnchecked(out)
out.putVarInt32(172)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_name_):
out.putVarInt32(18)
out.putPrefixedString(self.name_)
for i in xrange(len(self.method_)):
out.putVarInt32(27)
self.method_[i].OutputPartial(out)
out.putVarInt32(28)
if (self.has_failure_detection_delay_):
out.putVarInt32(129)
out.putDouble(self.failure_detection_delay_)
if (self.has_full_name_):
out.putVarInt32(146)
out.putPrefixedString(self.full_name_)
if (self.has_multicast_stub_):
out.putVarInt32(160)
out.putBoolean(self.multicast_stub_)
for i in xrange(len(self.stream_)):
out.putVarInt32(171)
self.stream_[i].OutputPartial(out)
out.putVarInt32(172)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.set_name(d.getPrefixedString())
continue
if tt == 27:
self.add_method().TryMerge(d)
continue
if tt == 129:
self.set_failure_detection_delay(d.getDouble())
continue
if tt == 146:
self.set_full_name(d.getPrefixedString())
continue
if tt == 160:
self.set_multicast_stub(d.getBoolean())
continue
if tt == 171:
self.add_stream().TryMerge(d)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("filename: %s\n" % self.DebugFormatString(self.filename_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_full_name_: res+=prefix+("full_name: %s\n" % self.DebugFormatString(self.full_name_))
if self.has_failure_detection_delay_: res+=prefix+("failure_detection_delay: %s\n" % self.DebugFormat(self.failure_detection_delay_))
if self.has_multicast_stub_: res+=prefix+("multicast_stub: %s\n" % self.DebugFormatBool(self.multicast_stub_))
cnt=0
for e in self.method_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Method%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
cnt=0
for e in self.stream_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Stream%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfilename = 1
kname = 2
kfull_name = 18
kfailure_detection_delay = 16
kmulticast_stub = 20
kMethodGroup = 3
kMethodname = 4
kMethodargument_type = 5
kMethodresult_type = 6
kMethodstream_type = 14
kMethodprotocol = 7
kMethoddeadline = 8
kMethodduplicate_suppression = 9
kMethodfail_fast = 10
kMethodend_user_creds_requested = 34
kMethodclient_logging = 11
kMethodserver_logging = 12
kMethodsecurity_level = 13
kMethodsecurity_label = 19
kMethodresponse_format = 15
kMethodrequest_format = 17
kMethodlog_level = 36
kStreamGroup = 21
kStreamname = 22
kStreamclient_message_type = 23
kStreamserver_message_type = 24
kStreamclient_initial_tokens = 25
kStreamserver_initial_tokens = 26
kStreamtoken_unit = 27
kStreamsecurity_level = 28
kStreamsecurity_label = 29
kStreamclient_logging = 30
kStreamserver_logging = 31
kStreamdeadline = 32
kStreamfail_fast = 33
kStreamend_user_creds_requested = 35
kStreamlog_level = 37
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "filename",
2: "name",
3: "Method",
4: "name",
5: "argument_type",
6: "result_type",
7: "protocol",
8: "deadline",
9: "duplicate_suppression",
10: "fail_fast",
11: "client_logging",
12: "server_logging",
13: "security_level",
14: "stream_type",
15: "response_format",
16: "failure_detection_delay",
17: "request_format",
18: "full_name",
19: "security_label",
20: "multicast_stub",
21: "Stream",
22: "name",
23: "client_message_type",
24: "server_message_type",
25: "client_initial_tokens",
26: "server_initial_tokens",
27: "token_unit",
28: "security_level",
29: "security_label",
30: "client_logging",
31: "server_logging",
32: "deadline",
33: "fail_fast",
34: "end_user_creds_requested",
35: "end_user_creds_requested",
36: "log_level",
37: "log_level",
}, 37)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STARTGROUP,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.DOUBLE,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.DOUBLE,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.STRING,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STARTGROUP,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.STRING,
25: ProtocolBuffer.Encoder.NUMERIC,
26: ProtocolBuffer.Encoder.NUMERIC,
27: ProtocolBuffer.Encoder.STRING,
28: ProtocolBuffer.Encoder.STRING,
29: ProtocolBuffer.Encoder.STRING,
30: ProtocolBuffer.Encoder.NUMERIC,
31: ProtocolBuffer.Encoder.NUMERIC,
32: ProtocolBuffer.Encoder.DOUBLE,
33: ProtocolBuffer.Encoder.NUMERIC,
34: ProtocolBuffer.Encoder.NUMERIC,
35: ProtocolBuffer.Encoder.NUMERIC,
36: ProtocolBuffer.Encoder.STRING,
37: ProtocolBuffer.Encoder.STRING,
}, 37, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'RPC_ServiceDescriptor'
_SERIALIZED_DESCRIPTOR = array.array('B')
_SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("WhxuZXQvcHJvdG8vcHJvdG9jb2x0eXBlLnByb3RvChVSUENfU2VydmljZURlc2NyaXB0b3ITGghmaWxlbmFtZSABKAIwCTgCFBMaBG5hbWUgAigCMAk4AhQTGglmdWxsX25hbWUgEigCMAk4ARQTGhdmYWlsdXJlX2RldGVjdGlvbl9kZWxheSAQKAEwATgBFBMaDm11bHRpY2FzdF9zdHViIBQoADAIOAEUExoGTWV0aG9kIAMoAzAKOAMUExoLTWV0aG9kLm5hbWUgBCgCMAk4AmAFFBMaFE1ldGhvZC5hcmd1bWVudF90eXBlIAUoAjAJOAJgBRQTGhJNZXRob2QucmVzdWx0X3R5cGUgBigCMAk4AmAFFBMaEk1ldGhvZC5zdHJlYW1fdHlwZSAOKAIwCTgBYAUUExoPTWV0aG9kLnByb3RvY29sIAcoAjAJOAFgBRQTGg9NZXRob2QuZGVhZGxpbmUgCCgBMAE4AWAFFBMaHE1ldGhvZC5kdXBsaWNhdGVfc3VwcHJlc3Npb24gCSgAMAg4AWAFFBMaEE1ldGhvZC5mYWlsX2Zhc3QgCigAMAg4AWAFFBMaH01ldGhvZC5lbmRfdXNlcl9jcmVkc19yZXF1ZXN0ZWQgIigAMAg4AWAFFBMaFU1ldGhvZC5jbGllbnRfbG9nZ2luZyALKAAwBTgBYAUUExoVTWV0aG9kLnNlcnZlcl9sb2dnaW5nIAwoADAFOAFgBRQTGhVNZXRob2Quc2VjdXJpdHlfbGV2ZWwgDSgCMAk4AWAFFBMaFU1ldGhvZC5zZWN1cml0eV9sYWJlbCATKAIwCTgBYAUUExoWTWV0aG9kLnJlc3BvbnNlX2Zvcm1hdCAPKAIwCTgBYAUUExoVTWV0aG9kLnJlcXVlc3RfZm9ybWF0IBEoAjAJOAFgBRQTGhBNZXRob2QubG9nX2xldmVsICQoAjAJOAFgBRQTGgZTdHJlYW0gFSgDMAo4AxQTGgtTdHJlYW0ubmFtZSAWKAIwCTgCYBYUExoaU3RyZWFtLmNsaWVudF9tZXNzYWdlX3R5cGUgFygCMAk4AmAWFBMaGlN0cmVhbS5zZXJ2ZXJfbWVzc2FnZV90eXBlIBgoAjAJOAJgFhQTGhxTdHJlYW0uY2xpZW50X2luaXRpYWxfdG9rZW5zIBkoADADOAFgFhQTGhxTdHJlYW0uc2VydmVyX2luaXRpYWxfdG9rZW5zIBooADADOAFgFhQTGhFTdHJlYW0udG9rZW5fdW5pdCAbKAIwCTgBYBYUExoVU3RyZWFtLnNlY3VyaXR5X2xldmVsIBwoAjAJOAFgFhQTGhVTdHJlYW0uc2VjdXJpdHlfbGFiZWwgHSgCMAk4AWAWFBMaFVN0cmVhbS5jbGllbnRfbG9nZ2luZyAeKAAwBTgBYBYUExoVU3RyZWFtLnNlcnZlcl9sb2dnaW5nIB8oADAFOAFgFhQTGg9TdHJlYW0uZGVhZGxpbmUgICgBMAE4AWAWFBMaEFN0cmVhbS5mYWlsX2Zhc3QgISgAMAg4AWAWFBMaH1N0cmVhbS5lbmRfdXNlcl9jcmVkc19yZXF1ZXN0ZWQgIygAMAg4AWAWFBMaEFN0cmVhbS5sb2dfbGV2ZWwgJSgCMAk4AWAWFMIBElByb3RvY29sRGVzY3JpcHRvcg=="))
if _net_proto___parse__python is not None:
_net_proto___parse__python.RegisterType(
_SERIALIZED_DESCRIPTOR.tostring())
if _extension_runtime:
pass
__all__ = ['ProtocolDescriptor','ProtocolDescriptor_EnumTypeTag','ProtocolDescriptor_TagOption','ProtocolDescriptor_Tag','ProtocolDescriptor_EnumType','ProtocolFileDescriptor','RPC_ServiceDescriptor','RPC_ServiceDescriptor_Method','RPC_ServiceDescriptor_Stream']
| apache-2.0 |
mapfau/xbmc | addons/metadata.demo.tv/demo.py | 5 | 10505 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import xbmcplugin,xbmcgui,xbmc,xbmcaddon
import os,sys,urllib
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
params=get_params()
action=urllib.unquote_plus(params["action"])
if action == 'find':
year = 0
title=urllib.unquote_plus(params["title"])
try:
year=int(urllib.unquote_plus(params["year"]))
except:
pass
print('Find TV show with title %s from year %i' %(title, int(year)))
liz=xbmcgui.ListItem('Demo show 1', thumbnailImage='DefaultVideo.png', offscreen=True)
liz.setProperty('relevance', '0.5')
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url="/path/to/show", listitem=liz, isFolder=True)
liz=xbmcgui.ListItem('Demo show 2', thumbnailImage='DefaultVideo.png', offscreen=True)
liz.setProperty('relevance', '0.3')
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url="/path/to/show2", listitem=liz, isFolder=True)
elif action == 'getdetails':
url=urllib.unquote_plus(params["url"])
if url == '/path/to/show':
liz=xbmcgui.ListItem('Demo show 1', offscreen=True)
liz.setProperty('video.original_title', 'Demo shåvv 1')
liz.setProperty('video.sort_title', '2')
liz.setProperty('video.ratings', '1')
liz.setProperty('video.rating1.value', '5')
liz.setProperty('video.rating1.votes', '100')
liz.setProperty('video.user_rating', '5')
liz.setProperty('video.unique_id', '123')
liz.setProperty('video.plot_outline', 'Outline yo')
liz.setProperty('video.plot', 'Plot yo')
liz.setProperty('video.tag_line', 'Tag yo')
liz.setProperty('video.duration_minutes', '110')
liz.setProperty('video.mpaa', 'T')
liz.setProperty('video.premiere_year', '2007')
liz.setProperty('video.status', 'Cancelled')
liz.setProperty('video.first_aired', '2007-01-01')
liz.setProperty('video.trailer', '/home/akva/Videos/porn/bukkake.mkv')
liz.setProperty('video.thumbs', '2')
liz.setProperty('video.thumb1.url', 'DefaultBackFanart.png')
liz.setProperty('video.thumb1.aspect', '1.78')
liz.setProperty('video.thumb2.url', '/home/akva/Pictures/hawaii-shirt.png')
liz.setProperty('video.thumb2.aspect', '2.35')
liz.setProperty('video.genre','Action / Comedy')
liz.setProperty('video.country', 'Norway / Sweden / China')
liz.setProperty('video.writing_credits', 'None / Want / To Admit It')
liz.setProperty('video.director', 'spiff / spiff2')
liz.setProperty('video.seasons', '2')
liz.setProperty('video.season1.name', 'Horrible')
liz.setProperty('video.season2.name', 'Crap')
liz.setProperty('video.actors', '2')
liz.setProperty('video.actor1.name', 'spiff')
liz.setProperty('video.actor1.role', 'himself')
liz.setProperty('video.actor1.sort_order', '2')
liz.setProperty('video.actor1.thumb', '/home/akva/Pictures/fish.jpg')
liz.setProperty('video.actor1.thumb_aspect', '1.33')
liz.setProperty('video.actor2.name', 'monkey')
liz.setProperty('video.actor2.role', 'orange')
liz.setProperty('video.actor2.sort_order', '1')
liz.setProperty('video.actor1.thumb_aspect', '1.78')
liz.setProperty('video.actor2.thumb', '/home/akva/Pictures/coffee.jpg')
liz.setProperty('video.tag', 'Porn / Umomz')
liz.setProperty('video.studio', 'Studio1 / Studio2')
liz.setProperty('video.episode_guide_url', '/path/to/show/guide')
liz.setProperty('video.fanarts', '2')
liz.setProperty('video.fanart1.url', 'DefaultBackFanart.png')
liz.setProperty('video.fanart1.preview', 'DefaultBackFanart.png')
liz.setProperty('video.fanart1.dim', '720')
liz.setProperty('video.fanart2.url', '/home/akva/Pictures/hawaii-shirt.png')
liz.setProperty('video.fanart2.preview', '/home/akva/Pictures/hawaii-shirt.png')
liz.setProperty('video.fanart2.dim', '1080')
liz.setProperty('video.date_added', '2016-01-01')
xbmcplugin.setResolvedUrl(handle=int(sys.argv[1]), succeeded=True, listitem=liz)
elif action == 'getepisodelist':
url=urllib.unquote_plus(params["url"])
print('in here yo ' + url)
if url == '/path/to/show/guide':
liz=xbmcgui.ListItem('Demo Episode 1x1', offscreen=True)
liz.setProperty('video.episode', '1')
liz.setProperty('video.season', '1')
liz.setProperty('video.aired', '2015-01-01')
liz.setProperty('video.id', '1')
liz.setProperty('video.url', '/path/to/episode1')
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url="/path/to/episode1", listitem=liz, isFolder=False)
liz=xbmcgui.ListItem('Demo Episode 2x2', offscreen=True)
liz.setProperty('video.episode', '2')
#liz.setProperty('video.sub_episode', '1')
liz.setProperty('video.season', '2')
liz.setProperty('video.aired', '2014-01-01')
liz.setProperty('video.id', '2')
liz.setProperty('video.url', '/path/to/episode2')
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url="/path/to/episode2", listitem=liz, isFolder=False)
elif action == 'getepisodedetails':
url=urllib.unquote_plus(params["url"])
if url == '/path/to/episode1':
liz=xbmcgui.ListItem('Demo Episode 1', offscreen=True)
liz.setProperty('video.original_title', 'Demo æpisod 1x1')
liz.setProperty('video.sort_title', '2')
liz.setProperty('video.episode', '1')
liz.setProperty('video.season', '1')
liz.setProperty('video.ratings', '1')
liz.setProperty('video.rating1.value', '5')
liz.setProperty('video.rating1.votes', '100')
liz.setProperty('video.user_rating', '5')
liz.setProperty('video.unique_id', '123')
liz.setProperty('video.plot_outline', 'Outline yo')
liz.setProperty('video.plot', 'Plot yo')
liz.setProperty('video.tag_line', 'Tag yo')
liz.setProperty('video.duration_minutes', '110')
liz.setProperty('video.mpaa', 'T')
liz.setProperty('video.first_aired', '2007-01-01')
liz.setProperty('video.thumbs', '2')
liz.setProperty('video.thumb1.url', 'DefaultBackFanart.png')
liz.setProperty('video.thumb1.aspect', '1.78')
liz.setProperty('video.thumb2.url', '/home/akva/Pictures/hawaii-shirt.png')
liz.setProperty('video.thumb2.aspect', '2.35')
liz.setProperty('video.genre','Action / Comedy')
liz.setProperty('video.country', 'Norway / Sweden / China')
liz.setProperty('video.writing_credits', 'None / Want / To Admit It')
liz.setProperty('video.director', 'spiff / spiff2')
liz.setProperty('video.actors', '2')
liz.setProperty('video.actor1.name', 'spiff')
liz.setProperty('video.actor1.role', 'himself')
liz.setProperty('video.actor1.sort_order', '2')
liz.setProperty('video.actor1.thumb', '/home/akva/Pictures/fish.jpg')
liz.setProperty('video.actor1.thumb_aspect', 'poster')
liz.setProperty('video.actor2.name', 'monkey')
liz.setProperty('video.actor2.role', 'orange')
liz.setProperty('video.actor2.sort_order', '1')
liz.setProperty('video.actor1.thumb_aspect', '1.78')
liz.setProperty('video.actor2.thumb', '/home/akva/Pictures/coffee.jpg')
liz.setProperty('video.date_added', '2016-01-01')
xbmcplugin.setResolvedUrl(handle=int(sys.argv[1]), succeeded=True, listitem=liz)
elif url == '/path/to/episode2':
liz=xbmcgui.ListItem('Demo Episode 2', offscreen=True)
liz.setProperty('video.original_title', 'Demo æpisod 2x2')
liz.setProperty('video.sort_title', '1')
liz.setProperty('video.episode', '2')
liz.setProperty('video.season', '2')
liz.setProperty('video.ratings', '1')
liz.setProperty('video.rating1.value', '5')
liz.setProperty('video.rating1.votes', '100')
liz.setProperty('video.user_rating', '5')
liz.setProperty('video.unique_id', '123')
liz.setProperty('video.plot_outline', 'Outline yo')
liz.setProperty('video.plot', 'Plot yo')
liz.setProperty('video.tag_line', 'Tag yo')
liz.setProperty('video.duration_minutes', '110')
liz.setProperty('video.mpaa', 'T')
liz.setProperty('video.first_aired', '2007-01-01')
liz.setProperty('video.thumbs', '2')
liz.setProperty('video.thumb1.url', 'DefaultBackFanart.png')
liz.setProperty('video.thumb1.aspect', '1.78')
liz.setProperty('video.thumb2.url', '/home/akva/Pictures/hawaii-shirt.png')
liz.setProperty('video.thumb2.aspect', '2.35')
liz.setProperty('video.genre','Action / Comedy')
liz.setProperty('video.country', 'Norway / Sweden / China')
liz.setProperty('video.writing_credits', 'None / Want / To Admit It')
liz.setProperty('video.director', 'spiff / spiff2')
liz.setProperty('video.actors', '2')
liz.setProperty('video.actor1.name', 'spiff')
liz.setProperty('video.actor1.role', 'himself')
liz.setProperty('video.actor1.sort_order', '2')
liz.setProperty('video.actor1.thumb', '/home/akva/Pictures/fish.jpg')
liz.setProperty('video.actor1.thumb_aspect', 'poster')
liz.setProperty('video.actor2.name', 'monkey')
liz.setProperty('video.actor2.role', 'orange')
liz.setProperty('video.actor2.sort_order', '1')
liz.setProperty('video.actor1.thumb_aspect', '1.78')
liz.setProperty('video.actor2.thumb', '/home/akva/Pictures/coffee.jpg')
liz.setProperty('video.date_added', '2016-01-01')
xbmcplugin.setResolvedUrl(handle=int(sys.argv[1]), succeeded=True, listitem=liz)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
| gpl-2.0 |
pleaseproject/python-for-android | python3-alpha/python3-src/Lib/lib2to3/btm_utils.py | 374 | 10011 | "Utility functions used by the btm_matcher module"
from . import pytree
from .pgen2 import grammar, token
from .pygram import pattern_symbols, python_symbols
syms = pattern_symbols
pysyms = python_symbols
tokens = grammar.opmap
token_labels = token
TYPE_ANY = -1
TYPE_ALTERNATIVES = -2
TYPE_GROUP = -3
class MinNode(object):
"""This class serves as an intermediate representation of the
pattern tree during the conversion to sets of leaf-to-root
subpatterns"""
def __init__(self, type=None, name=None):
self.type = type
self.name = name
self.children = []
self.leaf = False
self.parent = None
self.alternatives = []
self.group = []
def __repr__(self):
return str(self.type) + ' ' + str(self.name)
def leaf_to_root(self):
"""Internal method. Returns a characteristic path of the
pattern tree. This method must be run for all leaves until the
linear subpatterns are merged into a single"""
node = self
subp = []
while node:
if node.type == TYPE_ALTERNATIVES:
node.alternatives.append(subp)
if len(node.alternatives) == len(node.children):
#last alternative
subp = [tuple(node.alternatives)]
node.alternatives = []
node = node.parent
continue
else:
node = node.parent
subp = None
break
if node.type == TYPE_GROUP:
node.group.append(subp)
#probably should check the number of leaves
if len(node.group) == len(node.children):
subp = get_characteristic_subpattern(node.group)
node.group = []
node = node.parent
continue
else:
node = node.parent
subp = None
break
if node.type == token_labels.NAME and node.name:
#in case of type=name, use the name instead
subp.append(node.name)
else:
subp.append(node.type)
node = node.parent
return subp
def get_linear_subpattern(self):
"""Drives the leaf_to_root method. The reason that
leaf_to_root must be run multiple times is because we need to
reject 'group' matches; for example the alternative form
(a | b c) creates a group [b c] that needs to be matched. Since
matching multiple linear patterns overcomes the automaton's
capabilities, leaf_to_root merges each group into a single
choice based on 'characteristic'ity,
i.e. (a|b c) -> (a|b) if b more characteristic than c
Returns: The most 'characteristic'(as defined by
get_characteristic_subpattern) path for the compiled pattern
tree.
"""
for l in self.leaves():
subp = l.leaf_to_root()
if subp:
return subp
def leaves(self):
"Generator that returns the leaves of the tree"
for child in self.children:
for x in child.leaves():
yield x
if not self.children:
yield self
def reduce_tree(node, parent=None):
"""
Internal function. Reduces a compiled pattern tree to an
intermediate representation suitable for feeding the
automaton. This also trims off any optional pattern elements(like
[a], a*).
"""
new_node = None
#switch on the node type
if node.type == syms.Matcher:
#skip
node = node.children[0]
if node.type == syms.Alternatives :
#2 cases
if len(node.children) <= 2:
#just a single 'Alternative', skip this node
new_node = reduce_tree(node.children[0], parent)
else:
#real alternatives
new_node = MinNode(type=TYPE_ALTERNATIVES)
#skip odd children('|' tokens)
for child in node.children:
if node.children.index(child)%2:
continue
reduced = reduce_tree(child, new_node)
if reduced is not None:
new_node.children.append(reduced)
elif node.type == syms.Alternative:
if len(node.children) > 1:
new_node = MinNode(type=TYPE_GROUP)
for child in node.children:
reduced = reduce_tree(child, new_node)
if reduced:
new_node.children.append(reduced)
if not new_node.children:
# delete the group if all of the children were reduced to None
new_node = None
else:
new_node = reduce_tree(node.children[0], parent)
elif node.type == syms.Unit:
if (isinstance(node.children[0], pytree.Leaf) and
node.children[0].value == '('):
#skip parentheses
return reduce_tree(node.children[1], parent)
if ((isinstance(node.children[0], pytree.Leaf) and
node.children[0].value == '[')
or
(len(node.children)>1 and
hasattr(node.children[1], "value") and
node.children[1].value == '[')):
#skip whole unit if its optional
return None
leaf = True
details_node = None
alternatives_node = None
has_repeater = False
repeater_node = None
has_variable_name = False
for child in node.children:
if child.type == syms.Details:
leaf = False
details_node = child
elif child.type == syms.Repeater:
has_repeater = True
repeater_node = child
elif child.type == syms.Alternatives:
alternatives_node = child
if hasattr(child, 'value') and child.value == '=': # variable name
has_variable_name = True
#skip variable name
if has_variable_name:
#skip variable name, '='
name_leaf = node.children[2]
if hasattr(name_leaf, 'value') and name_leaf.value == '(':
# skip parenthesis
name_leaf = node.children[3]
else:
name_leaf = node.children[0]
#set node type
if name_leaf.type == token_labels.NAME:
#(python) non-name or wildcard
if name_leaf.value == 'any':
new_node = MinNode(type=TYPE_ANY)
else:
if hasattr(token_labels, name_leaf.value):
new_node = MinNode(type=getattr(token_labels, name_leaf.value))
else:
new_node = MinNode(type=getattr(pysyms, name_leaf.value))
elif name_leaf.type == token_labels.STRING:
#(python) name or character; remove the apostrophes from
#the string value
name = name_leaf.value.strip("'")
if name in tokens:
new_node = MinNode(type=tokens[name])
else:
new_node = MinNode(type=token_labels.NAME, name=name)
elif name_leaf.type == syms.Alternatives:
new_node = reduce_tree(alternatives_node, parent)
#handle repeaters
if has_repeater:
if repeater_node.children[0].value == '*':
#reduce to None
new_node = None
elif repeater_node.children[0].value == '+':
#reduce to a single occurence i.e. do nothing
pass
else:
#TODO: handle {min, max} repeaters
raise NotImplementedError
pass
#add children
if details_node and new_node is not None:
for child in details_node.children[1:-1]:
#skip '<', '>' markers
reduced = reduce_tree(child, new_node)
if reduced is not None:
new_node.children.append(reduced)
if new_node:
new_node.parent = parent
return new_node
def get_characteristic_subpattern(subpatterns):
"""Picks the most characteristic from a list of linear patterns
Current order used is:
names > common_names > common_chars
"""
if not isinstance(subpatterns, list):
return subpatterns
if len(subpatterns)==1:
return subpatterns[0]
# first pick out the ones containing variable names
subpatterns_with_names = []
subpatterns_with_common_names = []
common_names = ['in', 'for', 'if' , 'not', 'None']
subpatterns_with_common_chars = []
common_chars = "[]().,:"
for subpattern in subpatterns:
if any(rec_test(subpattern, lambda x: type(x) is str)):
if any(rec_test(subpattern,
lambda x: isinstance(x, str) and x in common_chars)):
subpatterns_with_common_chars.append(subpattern)
elif any(rec_test(subpattern,
lambda x: isinstance(x, str) and x in common_names)):
subpatterns_with_common_names.append(subpattern)
else:
subpatterns_with_names.append(subpattern)
if subpatterns_with_names:
subpatterns = subpatterns_with_names
elif subpatterns_with_common_names:
subpatterns = subpatterns_with_common_names
elif subpatterns_with_common_chars:
subpatterns = subpatterns_with_common_chars
# of the remaining subpatterns pick out the longest one
return max(subpatterns, key=len)
def rec_test(sequence, test_func):
"""Tests test_func on all items of sequence and items of included
sub-iterables"""
for x in sequence:
if isinstance(x, (list, tuple)):
for y in rec_test(x, test_func):
yield y
else:
yield test_func(x)
| apache-2.0 |
squall1988/lquant | backtest/finance/risk/cumulative.py | 1 | 17396 | #
# Copyright 2014 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import logbook
import math
import numpy as np
import backtest.utils.math_utils as zp_math
import pandas as pd
from pandas.tseries.tools import normalize_date
from six import iteritems
from . risk import (
alpha,
check_entry,
choose_treasury,
downside_risk,
sharpe_ratio,
sortino_ratio,
)
from __init__ import VERSION_LABEL
log = logbook.Logger('Risk Cumulative')
choose_treasury = functools.partial(choose_treasury, lambda *args: '10year',
compound=False)
def information_ratio(algo_volatility, algorithm_return, benchmark_return):
"""
http://en.wikipedia.org/wiki/Information_ratio
Args:
algorithm_returns (np.array-like):
All returns during algorithm lifetime.
benchmark_returns (np.array-like):
All benchmark returns during algo lifetime.
Returns:
float. Information ratio.
"""
if zp_math.tolerant_equals(algo_volatility, 0):
return np.nan
# The square of the annualization factor is in the volatility,
# because the volatility is also annualized,
# i.e. the sqrt(annual factor) is in the volatility's numerator.
# So to have the the correct annualization factor for the
# Sharpe value's numerator, which should be the sqrt(annual factor).
# The square of the sqrt of the annual factor, i.e. the annual factor
# itself, is needed in the numerator to factor out the division by
# its square root.
return (algorithm_return - benchmark_return) / algo_volatility
class RiskMetricsCumulative(object):
"""
:Usage:
Instantiate RiskMetricsCumulative once.
Call update() method on each dt to update the metrics.
"""
METRIC_NAMES = (
'alpha',
'beta',
'sharpe',
'algorithm_volatility',
'benchmark_volatility',
'downside_risk',
'sortino',
'information',
)
def __init__(self, sim_params, env,
create_first_day_stats=False):
self.treasury_curves = env.treasury_curves
self.start_date = sim_params.period_start.replace(
hour=0, minute=0, second=0, microsecond=0
)
self.end_date = sim_params.period_end.replace(
hour=0, minute=0, second=0, microsecond=0
)
self.trading_days = env.days_in_range(self.start_date, self.end_date)
# Hold on to the trading day before the start,
# used for index of the zero return value when forcing returns
# on the first day.
self.day_before_start = self.start_date - env.trading_days.freq
last_day = normalize_date(sim_params.period_end)
if last_day not in self.trading_days:
last_day = pd.tseries.index.DatetimeIndex(
[last_day]
)
self.trading_days = self.trading_days.append(last_day)
self.sim_params = sim_params
self.env = env
self.create_first_day_stats = create_first_day_stats
cont_index = self.trading_days
self.cont_index = cont_index
self.cont_len = len(self.cont_index)
empty_cont = np.full(self.cont_len, np.nan)
self.algorithm_returns_cont = empty_cont.copy()
self.benchmark_returns_cont = empty_cont.copy()
self.algorithm_cumulative_leverages_cont = empty_cont.copy()
self.mean_returns_cont = empty_cont.copy()
self.annualized_mean_returns_cont = empty_cont.copy()
self.mean_benchmark_returns_cont = empty_cont.copy()
self.annualized_mean_benchmark_returns_cont = empty_cont.copy()
# The returns at a given time are read and reset from the respective
# returns container.
self.algorithm_returns = None
self.benchmark_returns = None
self.mean_returns = None
self.annualized_mean_returns = None
self.mean_benchmark_returns = None
self.annualized_mean_benchmark_returns = None
self.algorithm_cumulative_returns = empty_cont.copy()
self.benchmark_cumulative_returns = empty_cont.copy()
self.algorithm_cumulative_leverages = empty_cont.copy()
self.excess_returns = empty_cont.copy()
self.latest_dt_loc = 0
self.latest_dt = cont_index[0]
self.benchmark_volatility = empty_cont.copy()
self.algorithm_volatility = empty_cont.copy()
self.beta = empty_cont.copy()
self.alpha = empty_cont.copy()
self.sharpe = empty_cont.copy()
self.downside_risk = empty_cont.copy()
self.sortino = empty_cont.copy()
self.information = empty_cont.copy()
self.drawdowns = empty_cont.copy()
self.max_drawdowns = empty_cont.copy()
self.max_drawdown = 0
self.max_leverages = empty_cont.copy()
self.max_leverage = 0
self.current_max = -np.inf
self.daily_treasury = pd.Series(index=self.trading_days)
self.treasury_period_return = np.nan
self.num_trading_days = 0
def update(self, dt, algorithm_returns, benchmark_returns, leverage):
# Keep track of latest dt for use in to_dict and other methods
# that report current state.
self.latest_dt = dt
dt_loc = self.cont_index.get_loc(dt)
self.latest_dt_loc = dt_loc
self.algorithm_returns_cont[dt_loc] = algorithm_returns
self.algorithm_returns = self.algorithm_returns_cont[:dt_loc + 1]
self.num_trading_days = len(self.algorithm_returns)
if self.create_first_day_stats:
if len(self.algorithm_returns) == 1:
self.algorithm_returns = np.append(0.0, self.algorithm_returns)
self.algorithm_cumulative_returns[dt_loc] = \
self.calculate_cumulative_returns(self.algorithm_returns)
algo_cumulative_returns_to_date = \
self.algorithm_cumulative_returns[:dt_loc + 1]
self.mean_returns_cont[dt_loc] = \
algo_cumulative_returns_to_date[dt_loc] / self.num_trading_days
self.mean_returns = self.mean_returns_cont[:dt_loc + 1]
self.annualized_mean_returns_cont[dt_loc] = \
self.mean_returns_cont[dt_loc] * 252
self.annualized_mean_returns = \
self.annualized_mean_returns_cont[:dt_loc + 1]
if self.create_first_day_stats:
if len(self.mean_returns) == 1:
self.mean_returns = np.append(0.0, self.mean_returns)
self.annualized_mean_returns = np.append(
0.0, self.annualized_mean_returns)
self.benchmark_returns_cont[dt_loc] = benchmark_returns
self.benchmark_returns = self.benchmark_returns_cont[:dt_loc + 1]
if self.create_first_day_stats:
if len(self.benchmark_returns) == 1:
self.benchmark_returns = np.append(0.0, self.benchmark_returns)
self.benchmark_cumulative_returns[dt_loc] = \
self.calculate_cumulative_returns(self.benchmark_returns)
benchmark_cumulative_returns_to_date = \
self.benchmark_cumulative_returns[:dt_loc + 1]
self.mean_benchmark_returns_cont[dt_loc] = \
benchmark_cumulative_returns_to_date[dt_loc] / \
self.num_trading_days
self.mean_benchmark_returns = self.mean_benchmark_returns_cont[:dt_loc]
self.annualized_mean_benchmark_returns_cont[dt_loc] = \
self.mean_benchmark_returns_cont[dt_loc] * 252
self.annualized_mean_benchmark_returns = \
self.annualized_mean_benchmark_returns_cont[:dt_loc + 1]
self.algorithm_cumulative_leverages_cont[dt_loc] = leverage
self.algorithm_cumulative_leverages = \
self.algorithm_cumulative_leverages_cont[:dt_loc + 1]
if self.create_first_day_stats:
if len(self.algorithm_cumulative_leverages) == 1:
self.algorithm_cumulative_leverages = np.append(
0.0,
self.algorithm_cumulative_leverages)
if not len(self.algorithm_returns) and len(self.benchmark_returns):
message = "Mismatch between benchmark_returns ({bm_count}) and \
algorithm_returns ({algo_count}) in range {start} : {end} on {dt}"
message = message.format(
bm_count=len(self.benchmark_returns),
algo_count=len(self.algorithm_returns),
start=self.start_date,
end=self.end_date,
dt=dt
)
raise Exception(message)
self.update_current_max()
self.benchmark_volatility[dt_loc] = \
self.calculate_volatility(self.benchmark_returns)
self.algorithm_volatility[dt_loc] = \
self.calculate_volatility(self.algorithm_returns)
# caching the treasury rates for the minutely case is a
# big speedup, because it avoids searching the treasury
# curves on every minute.
# In both minutely and daily, the daily curve is always used.
treasury_end = dt.replace(hour=0, minute=0)
if np.isnan(self.daily_treasury[treasury_end]):
treasury_period_return = choose_treasury(
self.treasury_curves,
self.start_date,
treasury_end,
self.env,
)
self.daily_treasury[treasury_end] = treasury_period_return
self.treasury_period_return = self.daily_treasury[treasury_end]
self.excess_returns[dt_loc] = (
self.algorithm_cumulative_returns[dt_loc] -
self.treasury_period_return)
self.beta[dt_loc] = self.calculate_beta()
self.alpha[dt_loc] = self.calculate_alpha()
self.sharpe[dt_loc] = self.calculate_sharpe()
self.downside_risk[dt_loc] = \
self.calculate_downside_risk()
self.sortino[dt_loc] = self.calculate_sortino()
self.information[dt_loc] = self.calculate_information()
self.max_drawdown = self.calculate_max_drawdown()
self.max_drawdowns[dt_loc] = self.max_drawdown
self.max_leverage = self.calculate_max_leverage()
self.max_leverages[dt_loc] = self.max_leverage
def to_dict(self):
"""
Creates a dictionary representing the state of the risk report.
Returns a dict object of the form:
"""
dt = self.latest_dt
dt_loc = self.latest_dt_loc
period_label = dt.strftime("%Y-%m")
rval = {
'trading_days': self.num_trading_days,
'benchmark_volatility':
self.benchmark_volatility[dt_loc],
'algo_volatility':
self.algorithm_volatility[dt_loc],
'treasury_period_return': self.treasury_period_return,
# Though the two following keys say period return,
# they would be more accurately called the cumulative return.
# However, the keys need to stay the same, for now, for backwards
# compatibility with existing consumers.
'algorithm_period_return':
self.algorithm_cumulative_returns[dt_loc],
'benchmark_period_return':
self.benchmark_cumulative_returns[dt_loc],
'beta': self.beta[dt_loc],
'alpha': self.alpha[dt_loc],
'sharpe': self.sharpe[dt_loc],
'sortino': self.sortino[dt_loc],
'information': self.information[dt_loc],
'excess_return': self.excess_returns[dt_loc],
'max_drawdown': self.max_drawdown,
'max_leverage': self.max_leverage,
'period_label': period_label
}
return {k: (None if check_entry(k, v) else v)
for k, v in iteritems(rval)}
def __repr__(self):
statements = []
for metric in self.METRIC_NAMES:
value = getattr(self, metric)[-1]
if isinstance(value, list):
if len(value) == 0:
value = np.nan
else:
value = value[-1]
statements.append("{m}:{v}".format(m=metric, v=value))
return '\n'.join(statements)
def calculate_cumulative_returns(self, returns):
return (1. + returns).prod() - 1
def update_current_max(self):
if len(self.algorithm_cumulative_returns) == 0:
return
current_cumulative_return = \
self.algorithm_cumulative_returns[self.latest_dt_loc]
if self.current_max < current_cumulative_return:
self.current_max = current_cumulative_return
def calculate_max_drawdown(self):
if len(self.algorithm_cumulative_returns) == 0:
return self.max_drawdown
# The drawdown is defined as: (high - low) / high
# The above factors out to: 1.0 - (low / high)
#
# Instead of explicitly always using the low, use the current total
# return value, and test that against the max drawdown, which will
# exceed the previous max_drawdown iff the current return is lower than
# the previous low in the current drawdown window.
cur_drawdown = 1.0 - (
(1.0 + self.algorithm_cumulative_returns[self.latest_dt_loc])
/
(1.0 + self.current_max))
self.drawdowns[self.latest_dt_loc] = cur_drawdown
if self.max_drawdown < cur_drawdown:
return cur_drawdown
else:
return self.max_drawdown
def calculate_max_leverage(self):
# The leverage is defined as: the gross_exposure/net_liquidation
# gross_exposure = long_exposure + abs(short_exposure)
# net_liquidation = ending_cash + long_exposure + short_exposure
cur_leverage = self.algorithm_cumulative_leverages_cont[
self.latest_dt_loc]
return max(cur_leverage, self.max_leverage)
def calculate_sharpe(self):
"""
http://en.wikipedia.org/wiki/Sharpe_ratio
"""
return sharpe_ratio(
self.algorithm_volatility[self.latest_dt_loc],
self.annualized_mean_returns_cont[self.latest_dt_loc],
self.daily_treasury[self.latest_dt.date()])
def calculate_sortino(self):
"""
http://en.wikipedia.org/wiki/Sortino_ratio
"""
return sortino_ratio(
self.annualized_mean_returns_cont[self.latest_dt_loc],
self.daily_treasury[self.latest_dt.date()],
self.downside_risk[self.latest_dt_loc])
def calculate_information(self):
"""
http://en.wikipedia.org/wiki/Information_ratio
"""
return information_ratio(
self.algorithm_volatility[self.latest_dt_loc],
self.annualized_mean_returns_cont[self.latest_dt_loc],
self.annualized_mean_benchmark_returns_cont[self.latest_dt_loc])
def calculate_alpha(self):
"""
http://en.wikipedia.org/wiki/Alpha_(investment)
"""
return alpha(
self.annualized_mean_returns_cont[self.latest_dt_loc],
self.treasury_period_return,
self.annualized_mean_benchmark_returns_cont[self.latest_dt_loc],
self.beta[self.latest_dt_loc])
def calculate_volatility(self, daily_returns):
if len(daily_returns) <= 1:
return 0.0
return np.std(daily_returns, ddof=1) * math.sqrt(252)
def calculate_downside_risk(self):
return downside_risk(self.algorithm_returns,
self.mean_returns,
252)
def calculate_beta(self):
"""
.. math::
\\beta_a = \\frac{\mathrm{Cov}(r_a,r_p)}{\mathrm{Var}(r_p)}
http://en.wikipedia.org/wiki/Beta_(finance)
"""
# it doesn't make much sense to calculate beta for less than two
# values, so return none.
if len(self.algorithm_returns) < 2:
return 0.0
returns_matrix = np.vstack([self.algorithm_returns,
self.benchmark_returns])
C = np.cov(returns_matrix, ddof=1)
algorithm_covariance = C[0][1]
benchmark_variance = C[1][1]
beta = algorithm_covariance / benchmark_variance
return beta
def __getstate__(self):
state_dict = {k: v for k, v in iteritems(self.__dict__)
if not k.startswith('_')}
STATE_VERSION = 3
state_dict[VERSION_LABEL] = STATE_VERSION
return state_dict
def __setstate__(self, state):
OLDEST_SUPPORTED_STATE = 3
version = state.pop(VERSION_LABEL)
if version < OLDEST_SUPPORTED_STATE:
raise BaseException("RiskMetricsCumulative \
saved state is too old.")
self.__dict__.update(state)
| bsd-2-clause |
ajyoon/brown | tests/test_core/test_font.py | 1 | 2603 | import unittest
from brown.core import brown
from brown.core.font import Font
class TestFont(unittest.TestCase):
def setUp(self):
brown.setup()
def test_init(self):
test_font = Font('Bravura', 12, 2, False)
assert(test_font.family_name == 'Bravura')
assert(test_font.size == 12)
assert(test_font.weight == 2)
assert(test_font.italic is False)
assert(test_font._interface.family_name == 'Bravura')
assert(test_font._interface.size == 12)
assert(test_font._interface.weight == 2)
assert(test_font._interface.italic is False)
def test_default_init_values(self):
# API default values canary
test_font = Font('Bravura', 12)
assert(test_font.weight is None)
assert(test_font.italic is False)
def test_deriving(self):
test_font = Font('Bravura', 12, 2, False)
deriving_family_name = Font.deriving(test_font,
size=14,
weight=1,
italic=True)
assert(deriving_family_name.family_name == 'Bravura')
assert(deriving_family_name.size == 14)
assert(deriving_family_name.weight == 1)
assert(deriving_family_name.italic is True)
deriving_size = Font.deriving(test_font,
family_name='Cormorant Garamond',
weight=1,
italic=True)
assert(deriving_size.family_name == 'Cormorant Garamond')
assert(deriving_size.size == 12)
assert(deriving_size.weight == 1)
assert(deriving_size.italic is True)
deriving_weight = Font.deriving(test_font,
family_name='Cormorant Garamond',
size=14,
italic=True)
assert(deriving_weight.family_name == 'Cormorant Garamond')
assert(deriving_weight.size == 14)
assert(deriving_weight.weight == 2)
assert(deriving_weight.italic is True)
deriving_italic = Font.deriving(test_font,
family_name='Cormorant Garamond',
size=14,
weight=2)
assert(deriving_italic.family_name == 'Cormorant Garamond')
assert(deriving_italic.size == 14)
assert(deriving_italic.weight == 2)
assert(deriving_italic.italic is False)
| gpl-3.0 |
SunPowered/pypi_py3 | setup.py | 1 | 1085 | """ A simple setuptools file """
from setuptools import setup
setup(
name="PyPi_py3",
version='0.1 dev',
description='Ensure proper python3 adherence among pypi package maintainers',
long_description="""Inspired by Guido's talk at PyCon2015, many packages on the pypi repository are still
not Python3 supported. This project intends to inform these maintainers and provide
a mechanism for newer, fresher developer blood to contribute and push forward. """,
url="https://github.com/SunPowered/pypi_py3",
author="Tim van Boxtel",
author_email="tim@vanboxtel.ca",
license="MIT",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Environment :: Console",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Utilities"
],
keywords=["python3", "pypi"],
packages=["pypi_py3"]
)
| mit |
aavrug/askbot-devel | askbot/migrations/0015_rename_forum_contenttypes_to_askbot.py | 20 | 26626 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
content_types = orm['contenttypes.ContentType'].objects.filter(app_label='forum')
content_types.update(app_label='askbot')
pass
def backwards(self, orm):
"Write your backwards methods here."
content_types = orm['contenttypes.ContentType'].objects.filter(app_label='askbot')
content_types.update(app_label='forum')
pass
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.Badge']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badge': {
'Meta': {'unique_together': "(('name', 'type'),)", 'object_name': 'Badge', 'db_table': "u'badge'"},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "'Award'", 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'askbot.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.flaggeditem': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'FlaggedItem', 'db_table': "u'flagged_item'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'flagged_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flaggeditems'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.validationhash': {
'Meta': {'unique_together': "(('user', 'type'),)", 'object_name': 'ValidationHash'},
'expiration': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2010, 6, 13, 23, 48, 5, 784060)'}),
'hash_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'seed': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
| gpl-3.0 |
PulsePod/evepod | lib/python2.7/site-packages/werkzeug/contrib/fixers.py | 464 | 9949 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.fixers
~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module includes various helpers that fix bugs in web servers. They may
be necessary for some versions of a buggy web server but not others. We try
to stay updated with the status of the bugs as good as possible but you have
to make sure whether they fix the problem you encounter.
If you notice bugs in webservers not fixed in this module consider
contributing a patch.
:copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from werkzeug.http import parse_options_header, parse_cache_control_header, \
parse_set_header
from werkzeug.useragents import UserAgent
from werkzeug.datastructures import Headers, ResponseCacheControl
class CGIRootFix(object):
"""Wrap the application in this middleware if you are using FastCGI or CGI
and you have problems with your app root being set to the cgi script's path
instead of the path users are going to visit
.. versionchanged:: 0.9
Added `app_root` parameter and renamed from `LighttpdCGIRootFix`.
:param app: the WSGI application
:param app_root: Defaulting to ``'/'``, you can set this to something else
if your app is mounted somewhere else.
"""
def __init__(self, app, app_root='/'):
self.app = app
self.app_root = app_root
def __call__(self, environ, start_response):
# only set PATH_INFO for older versions of Lighty or if no
# server software is provided. That's because the test was
# added in newer Werkzeug versions and we don't want to break
# people's code if they are using this fixer in a test that
# does not set the SERVER_SOFTWARE key.
if 'SERVER_SOFTWARE' not in environ or \
environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28':
environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
environ.get('PATH_INFO', '')
environ['SCRIPT_NAME'] = self.app_root.strip('/')
return self.app(environ, start_response)
# backwards compatibility
LighttpdCGIRootFix = CGIRootFix
class PathInfoFromRequestUriFix(object):
"""On windows environment variables are limited to the system charset
which makes it impossible to store the `PATH_INFO` variable in the
environment without loss of information on some systems.
This is for example a problem for CGI scripts on a Windows Apache.
This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`,
`REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the
fix can only be applied if the webserver supports either of these
variables.
:param app: the WSGI application
"""
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL':
if key not in environ:
continue
request_uri = unquote(environ[key])
script_name = unquote(environ.get('SCRIPT_NAME', ''))
if request_uri.startswith(script_name):
environ['PATH_INFO'] = request_uri[len(script_name):] \
.split('?', 1)[0]
break
return self.app(environ, start_response)
class ProxyFix(object):
"""This middleware can be applied to add HTTP proxy support to an
application that was not designed with HTTP proxies in mind. It
sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers.
If you have more than one proxy server in front of your app, set
`num_proxies` accordingly.
Do not use this middleware in non-proxy setups for security reasons.
The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in
the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and
`werkzeug.proxy_fix.orig_http_host`.
:param app: the WSGI application
:param num_proxies: the number of proxy servers in front of the app.
"""
def __init__(self, app, num_proxies=1):
self.app = app
self.num_proxies = num_proxies
def get_remote_addr(self, forwarded_for):
"""Selects the new remote addr from the given list of ips in
X-Forwarded-For. By default it picks the one that the `num_proxies`
proxy server provides. Before 0.9 it would always pick the first.
.. versionadded:: 0.8
"""
if len(forwarded_for) >= self.num_proxies:
return forwarded_for[-1 * self.num_proxies]
def __call__(self, environ, start_response):
getter = environ.get
forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '')
forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',')
forwarded_host = getter('HTTP_X_FORWARDED_HOST', '')
environ.update({
'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'),
'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'),
'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST')
})
forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x]
remote_addr = self.get_remote_addr(forwarded_for)
if remote_addr is not None:
environ['REMOTE_ADDR'] = remote_addr
if forwarded_host:
environ['HTTP_HOST'] = forwarded_host
if forwarded_proto:
environ['wsgi.url_scheme'] = forwarded_proto
return self.app(environ, start_response)
class HeaderRewriterFix(object):
"""This middleware can remove response headers and add others. This
is for example useful to remove the `Date` header from responses if you
are using a server that adds that header, no matter if it's present or
not or to add `X-Powered-By` headers::
app = HeaderRewriterFix(app, remove_headers=['Date'],
add_headers=[('X-Powered-By', 'WSGI')])
:param app: the WSGI application
:param remove_headers: a sequence of header keys that should be
removed.
:param add_headers: a sequence of ``(key, value)`` tuples that should
be added.
"""
def __init__(self, app, remove_headers=None, add_headers=None):
self.app = app
self.remove_headers = set(x.lower() for x in (remove_headers or ()))
self.add_headers = list(add_headers or ())
def __call__(self, environ, start_response):
def rewriting_start_response(status, headers, exc_info=None):
new_headers = []
for key, value in headers:
if key.lower() not in self.remove_headers:
new_headers.append((key, value))
new_headers += self.add_headers
return start_response(status, new_headers, exc_info)
return self.app(environ, rewriting_start_response)
class InternetExplorerFix(object):
"""This middleware fixes a couple of bugs with Microsoft Internet
Explorer. Currently the following fixes are applied:
- removing of `Vary` headers for unsupported mimetypes which
causes troubles with caching. Can be disabled by passing
``fix_vary=False`` to the constructor.
see: http://support.microsoft.com/kb/824847/en-us
- removes offending headers to work around caching bugs in
Internet Explorer if `Content-Disposition` is set. Can be
disabled by passing ``fix_attach=False`` to the constructor.
If it does not detect affected Internet Explorer versions it won't touch
the request / response.
"""
# This code was inspired by Django fixers for the same bugs. The
# fix_vary and fix_attach fixers were originally implemented in Django
# by Michael Axiak and is available as part of the Django project:
# http://code.djangoproject.com/ticket/4148
def __init__(self, app, fix_vary=True, fix_attach=True):
self.app = app
self.fix_vary = fix_vary
self.fix_attach = fix_attach
def fix_headers(self, environ, headers, status=None):
if self.fix_vary:
header = headers.get('content-type', '')
mimetype, options = parse_options_header(header)
if mimetype not in ('text/html', 'text/plain', 'text/sgml'):
headers.pop('vary', None)
if self.fix_attach and 'content-disposition' in headers:
pragma = parse_set_header(headers.get('pragma', ''))
pragma.discard('no-cache')
header = pragma.to_header()
if not header:
headers.pop('pragma', '')
else:
headers['Pragma'] = header
header = headers.get('cache-control', '')
if header:
cc = parse_cache_control_header(header,
cls=ResponseCacheControl)
cc.no_cache = None
cc.no_store = False
header = cc.to_header()
if not header:
headers.pop('cache-control', '')
else:
headers['Cache-Control'] = header
def run_fixed(self, environ, start_response):
def fixing_start_response(status, headers, exc_info=None):
headers = Headers(headers)
self.fix_headers(environ, headers, status)
return start_response(status, headers.to_wsgi_list(), exc_info)
return self.app(environ, fixing_start_response)
def __call__(self, environ, start_response):
ua = UserAgent(environ)
if ua.browser != 'msie':
return self.app(environ, start_response)
return self.run_fixed(environ, start_response)
| apache-2.0 |
Plain-Andy-legacy/android_external_chromium_org | tools/site_compare/commands/compare2.py | 189 | 6517 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""SiteCompare command to invoke the same page in two versions of a browser.
Does the easiest compatibility test: equality comparison between two different
versions of the same browser. Invoked with a series of command line options
that specify which URLs to check, which browser to use, where to store results,
etc.
"""
import os # Functions for walking the directory tree
import tempfile # Get a temporary directory to hold intermediates
import command_line
import drivers # Functions for driving keyboard/mouse/windows, OS-specific
import operators # Functions that, given two bitmaps as input, produce
# output depending on the performance of an operation
import scrapers # Functions that know how to capture a render from
# particular browsers
def CreateCommand(cmdline):
"""Inserts the command and arguments into a command line for parsing."""
cmd = cmdline.AddCommand(
["compare2"],
"Compares the output of two browsers on the same URL or list of URLs",
ValidateCompare2,
ExecuteCompare2)
cmd.AddArgument(
["-b1", "--browser1"], "Full path to first browser's executable",
type="readfile", metaname="PATH", required=True)
cmd.AddArgument(
["-b2", "--browser2"], "Full path to second browser's executable",
type="readfile", metaname="PATH", required=True)
cmd.AddArgument(
["-b", "--browser"], "Which browser to use", type="string",
default="chrome")
cmd.AddArgument(
["-b1v", "--browser1ver"], "Version of first browser", metaname="VERSION")
cmd.AddArgument(
["-b2v", "--browser2ver"], "Version of second browser", metaname="VERSION")
cmd.AddArgument(
["-b1n", "--browser1name"], "Optional name for first browser (used in "
"directory to hold intermediate files)", metaname="NAME")
cmd.AddArgument(
["-b2n", "--browser2name"], "Optional name for second browser (used in "
"directory to hold intermediate files)", metaname="NAME")
cmd.AddArgument(
["-o", "--outdir"], "Directory to store scrape files", metaname="DIR")
cmd.AddArgument(
["-u", "--url"], "URL to compare")
cmd.AddArgument(
["-l", "--list"], "List of URLs to compare", type="readfile")
cmd.AddMutualExclusion(["--url", "--list"])
cmd.AddArgument(
["-s", "--startline"], "First line of URL list", type="int")
cmd.AddArgument(
["-e", "--endline"], "Last line of URL list (exclusive)", type="int")
cmd.AddArgument(
["-c", "--count"], "Number of lines of URL file to use", type="int")
cmd.AddDependency("--startline", "--list")
cmd.AddRequiredGroup(["--url", "--list"])
cmd.AddDependency("--endline", "--list")
cmd.AddDependency("--count", "--list")
cmd.AddMutualExclusion(["--count", "--endline"])
cmd.AddDependency("--count", "--startline")
cmd.AddArgument(
["-t", "--timeout"], "Amount of time (seconds) to wait for browser to "
"finish loading",
type="int", default=60)
cmd.AddArgument(
["-log", "--logfile"], "File to write output", type="string", required=True)
cmd.AddArgument(
["-sz", "--size"], "Browser window size", default=(800, 600), type="coords")
cmd.AddArgument(
["-m", "--maskdir"], "Path that holds masks to use for comparison")
cmd.AddArgument(
["-d", "--diffdir"], "Path to hold the difference of comparisons that fail")
def ValidateCompare2(command):
"""Validate the arguments to compare2. Raises ParseError if failed."""
executables = [".exe", ".com", ".bat"]
if (os.path.splitext(command["--browser1"])[1].lower() not in executables or
os.path.splitext(command["--browser2"])[1].lower() not in executables):
raise command_line.ParseError("Browser filename must be an executable")
def ExecuteCompare2(command):
"""Executes the Compare2 command."""
if command["--url"]:
url_list = [command["--url"]]
else:
startline = command["--startline"]
if command["--count"]:
endline = startline+command["--count"]
else:
endline = command["--endline"]
url_list = [url.strip() for url in
open(command["--list"], "r").readlines()[startline:endline]]
log_file = open(command["--logfile"], "w")
outdir = command["--outdir"]
if not outdir: outdir = tempfile.gettempdir()
scrape_info_list = []
class ScrapeInfo(object):
"""Helper class to hold information about a scrape."""
__slots__ = ["browser_path", "scraper", "outdir", "result"]
for index in xrange(1, 3):
scrape_info = ScrapeInfo()
scrape_info.browser_path = command["--browser%d" % index]
scrape_info.scraper = scrapers.GetScraper(
(command["--browser"], command["--browser%dver" % index]))
if command["--browser%dname" % index]:
scrape_info.outdir = os.path.join(outdir,
command["--browser%dname" % index])
else:
scrape_info.outdir = os.path.join(outdir, str(index))
drivers.windowing.PreparePath(scrape_info.outdir)
scrape_info_list.append(scrape_info)
compare = operators.GetOperator("equals_with_mask")
for url in url_list:
success = True
for scrape_info in scrape_info_list:
scrape_info.result = scrape_info.scraper.Scrape(
[url], scrape_info.outdir, command["--size"], (0, 0),
command["--timeout"], path=scrape_info.browser_path)
if not scrape_info.result:
scrape_info.result = "success"
else:
success = False
result = "unknown"
if success:
result = "equal"
file1 = drivers.windowing.URLtoFilename(
url, scrape_info_list[0].outdir, ".bmp")
file2 = drivers.windowing.URLtoFilename(
url, scrape_info_list[1].outdir, ".bmp")
comparison_result = compare.Compare(file1, file2,
maskdir=command["--maskdir"])
if comparison_result is not None:
result = "not-equal"
if command["--diffdir"]:
comparison_result[1].save(
drivers.windowing.URLtoFilename(url, command["--diffdir"], ".bmp"))
# TODO(jhaas): maybe use the logging module rather than raw file writes
log_file.write("%s %s %s %s\n" % (url,
scrape_info_list[0].result,
scrape_info_list[1].result,
result))
| bsd-3-clause |
SmokinCaterpillar/pypet | pypet/tests/integration/pipeline_test.py | 2 | 10281 | __author__ = 'Robert Meyer'
import os
import logging
import platform
from pypet.tests.testutils.ioutils import unittest
from pypet.trajectory import Trajectory
from pypet.environment import Environment
from pypet.parameter import Parameter
from pypet.tests.testutils.ioutils import run_suite, make_temp_dir, \
get_root_logger, parse_args, get_log_config
from pypet.tests.testutils.data import TrajectoryComparator
from pypet.utils.mpwrappers import QueueStorageServiceSender, PipeStorageServiceSender, LockWrapper, \
ReferenceWrapper
import time
class Multiply(object):
def __init__(self):
self.var=42
def __call__(self, traj, i, w=0):
if traj.v_idx == 0:
# to shuffle results
time.sleep(0.2)
z = traj.x * traj.y + i + w
zres = traj.f_add_result('z', z)
g=traj.res.f_add_group('I.link.to.$')
g.f_add_link('z', zres)
if 'jjj.kkk' not in traj:
h = traj.res.f_add_group('jjj.kkk')
else:
h = traj.jjj.kkk
h.f_add_link('$', zres)
return z
class CustomParameter(Parameter):
def __init__(self, *args, **kwargs):
super(CustomParameter, self).__init__(*args, **kwargs)
def postproc(traj, results, idx):
get_root_logger().info(idx)
if isinstance(traj.v_storage_service, (LockWrapper, ReferenceWrapper)):
traj.f_load_skeleton()
if isinstance(traj.v_storage_service, (QueueStorageServiceSender, PipeStorageServiceSender)):
try:
traj.f_load()
raise RuntimeError('Should not load')
except NotImplementedError:
pass
if len(results) <= 4 and len(traj) == 4:
return {'x':[1,2], 'y':[1,2]}
if len(results) <= 6 and len(traj) == 6:
traj.f_expand({'x':[2,3], 'y':[0,1]})
def postproc_with_iter_args(traj, results, idx):
get_root_logger().info(idx)
if isinstance(traj.v_storage_service, (LockWrapper, ReferenceWrapper)):
traj.f_load_skeleton()
if len(results) <= 4 and len(traj) == 4:
assert idx == 42
return {'x':[1,2], 'y':[1,2]}, ([5,6],), {'w':[7,8]}, (43,)
if len(results) <= 6 and len(traj) == 6:
assert idx == 43
traj.f_expand({'x':[2,3], 'y':[0,1]})
return {}, ([7,8],), {'w':[9,10]}
def mypipelin_with_iter_args(traj):
traj.f_add_parameter('x', 1, comment='1st')
traj.f_add_parameter('y', 1, comment='1st')
exp_dict = {'x':[1, 2, 3, 4],
'y':[1, 2, 3, 4]}
traj.f_explore(exp_dict)
return (Multiply(), ([22,23,24,25],), {'w': [5,6,7,8] }), (postproc_with_iter_args, (42,))
def mypipeline(traj):
traj.f_add_parameter('x', 1, comment='1st')
traj.f_add_parameter('y', 1, comment='1st')
exp_dict = {'x':[1, 2, 3, 4],
'y':[1, 2, 3, 4]}
traj.f_explore(exp_dict)
return (Multiply(), (22,)), (postproc, (42,))
class TestPostProc(TrajectoryComparator):
tags = 'integration', 'hdf5', 'environment', 'postproc'
def setUp(self):
self.env_kwargs={'add_time': True}
def make_environment(self, filename, trajname='Test', log=True, **kwargs):
#self.filename = '../../experiments/tests/HDF5/test.hdf5'
filename = make_temp_dir(filename)
logfolder = make_temp_dir(os.path.join('experiments',
'tests',
'Log'))
cntfolder = make_temp_dir(os.path.join('experiments',
'tests',
'cnt'))
if log:
log_config = get_log_config()
else:
log_config = None
env = Environment(trajectory=trajname,
# log_levels=logging.INFO,
# log_config=None,
log_config=log_config,
dynamic_imports=[CustomParameter],
filename=filename, log_stdout=False,
**self.env_kwargs)
return env, filename, logfolder, cntfolder
def test_postprocessing(self):
filename = 'testpostproc.hdf5'
env1 = self.make_environment(filename, 'k1')[0]
env2 = self.make_environment(filename, 'k2', log=False)[0]
traj1 = env1.v_trajectory
traj2 = env2.v_trajectory
trajs = [traj1, traj2]
traj1.f_add_result('test.run_00000000.f', 555)
traj2.f_add_result('test.run_00000000.f', 555)
traj1.f_add_link('linking', traj1.f_get('f'))
traj2.f_add_link('linking', traj2.f_get('f'))
for traj in trajs:
traj.f_add_parameter('x', 1, comment='1st')
traj.f_add_parameter('y', 1, comment='2nd')
exp_dict2 = {'x':[1, 2, 3, 4, 1, 2, 2, 3],
'y':[1, 2, 3, 4, 1, 2, 0, 1]}
traj2.f_explore(exp_dict2)
exp_dict1 = {'x':[1, 2, 3, 4],
'y':[1, 2, 3, 4]}
traj1.f_explore(exp_dict1)
env2.f_run(Multiply(), 22)
env1.f_add_postprocessing(postproc, 42)
env1.f_run(Multiply(), 22)
traj1.f_load(load_data=2)
traj2.f_load(load_data=2)
self.compare_trajectories(traj1, traj2)
env1.f_disable_logging()
env2.f_disable_logging()
def test_postprocessing_iter_args(self):
filename = 'testpostproc.hdf5'
env1 = self.make_environment(filename, 'k1')[0]
env2 = self.make_environment(filename, 'k2', log=False)[0]
traj1 = env1.v_trajectory
traj2 = env2.v_trajectory
trajs = [traj1, traj2]
traj1.f_add_result('test.run_00000000.f', 555)
traj2.f_add_result('test.run_00000000.f', 555)
traj1.f_add_link('linking', traj1.f_get('f'))
traj2.f_add_link('linking', traj2.f_get('f'))
for traj in trajs:
traj.f_add_parameter('x', 1, comment='1st')
traj.f_add_parameter('y', 1, comment='2nd')
exp_dict2 = {'x':[1, 2, 3, 4, 1, 2, 2, 3],
'y':[1, 2, 3, 4, 1, 2, 0, 1]}
traj2.f_explore(exp_dict2)
exp_dict1 = {'x':[1, 2, 3, 4],
'y':[1, 2, 3, 4]}
traj1.f_explore(exp_dict1)
env2.f_run_map(Multiply(), [22,23,24,25,5,6,7,8], w=[5,6,7,8,7,8,9,10])
env1.f_add_postprocessing(postproc_with_iter_args, 42)
env1.f_run_map(Multiply(), [22,23,24,25], w=[5,6,7,8])
traj1.f_load(load_data=2)
traj2.f_load(load_data=2)
self.compare_trajectories(traj1, traj2)
env1.f_disable_logging()
env2.f_disable_logging()
def test_pipeline(self):
filename = 'testpostprocpipe.hdf5'
env1, filename, _, _ = self.make_environment(filename, 'k1')
env2 = self.make_environment(filename, 'k2', log=False)[0]
traj1 = env1.v_trajectory
traj2 = env2.v_trajectory
trajs = [traj1, traj2]
traj2.f_add_parameter('x', 1, comment='1st')
traj2.f_add_parameter('y', 1, comment='1st')
exp_dict2 = {'x':[1, 2, 3, 4, 1, 2, 2, 3],
'y':[1, 2, 3, 4, 1, 2, 0, 1]}
traj2.f_explore(exp_dict2)
res1 = env1.pipeline(pipeline=mypipeline)
self.are_results_in_order(res1)
res2 = env2.f_run(Multiply(), 22)
self.are_results_in_order(res2)
traj_name = traj1.v_name
traj1 = Trajectory(traj_name, add_time=False, filename=filename)
traj1.f_load(load_data=2)
traj2.f_load(load_data=2)
self.compare_trajectories(traj1, traj2)
env1.f_disable_logging()
env2.f_disable_logging()
def test_pipeline_with_iter_args(self):
filename = 'testpostprocpipe.hdf5'
env1 = self.make_environment(filename, 'k1')[0]
env2 = self.make_environment(filename, 'k2', log=False)[0]
traj1 = env1.v_trajectory
traj2 = env2.v_trajectory
trajs = [traj1, traj2]
traj2.f_add_parameter('x', 1, comment='1st')
traj2.f_add_parameter('y', 1, comment='1st')
exp_dict2 = {'x':[1, 2, 3, 4, 1, 2, 2, 3],
'y':[1, 2, 3, 4, 1, 2, 0, 1]}
traj2.f_explore(exp_dict2)
res1 = env1.pipeline_map(pipeline=mypipelin_with_iter_args)
self.are_results_in_order(res1)
res2 = env2.f_run_map(Multiply(), [22,23,24,25,5,6,7,8], w=[5,6,7,8,7,8,9,10])
self.are_results_in_order(res2)
traj1.f_load(load_data=2)
traj2.f_load(load_data=2)
self.compare_trajectories(traj1, traj2)
env1.f_disable_logging()
env2.f_disable_logging()
class TestMPPostProc(TestPostProc):
tags = 'integration', 'hdf5', 'environment', 'postproc', 'multiproc'
def setUp(self):
self.env_kwargs={'multiproc':True, 'ncores': 3, 'add_time': True}
class TestMPImmediatePostProcLock(TestPostProc):
tags = 'integration', 'hdf5', 'environment', 'postproc', 'multiproc', 'lock'
def setUp(self):
self.env_kwargs={'multiproc':True, 'ncores': 2, 'immediate_postproc' : True,
'add_time': True}
class TestMPImmediatePostProcQueue(TestPostProc):
tags = 'integration', 'hdf5', 'environment', 'postproc', 'multiproc', 'queue'
def setUp(self):
self.env_kwargs={'multiproc':True, 'ncores': 2, 'immediate_postproc' : True,
'wrap_mode': 'QUEUE', 'add_time': True}
class TestMPImmediatePostProcLocal(TestPostProc):
tags = 'integration', 'hdf5', 'environment', 'postproc', 'multiproc', 'local'
def setUp(self):
self.env_kwargs={'multiproc':True, 'ncores': 2, 'immediate_postproc' : True,
'wrap_mode': 'LOCAL', 'add_time': True}
@unittest.skipIf(platform.system() == 'Windows', 'Pipes cannot be pickled!')
class TestMPImmediatePostProcPipe(TestPostProc):
tags = 'integration', 'hdf5', 'environment', 'postproc', 'multiproc', 'pipe'
def setUp(self):
self.env_kwargs={'multiproc':True, 'ncores': 2, 'immediate_postproc' : True,
'wrap_mode': 'PIPE', 'add_time': True}
if __name__ == '__main__':
opt_args = parse_args()
run_suite(**opt_args) | bsd-3-clause |
hcs/mailman | src/mailman/interfaces/roster.py | 3 | 2015 | # Copyright (C) 2007-2012 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""Interface for a roster of members."""
from __future__ import absolute_import, unicode_literals
__metaclass__ = type
__all__ = [
'IRoster',
]
from zope.interface import Interface, Attribute
class IRoster(Interface):
"""A roster is a collection of `IMembers`."""
name = Attribute(
"""The name for this roster.
Rosters are considered equal if they have the same name.""")
members = Attribute(
"""An iterator over all the IMembers managed by this roster.""")
member_count = Attribute(
"""The number of members managed by this roster.""")
users = Attribute(
"""An iterator over all the IUsers reachable by this roster.
This returns all the users for all the members managed by this roster.
""")
addresses = Attribute(
"""An iterator over all the IAddresses reachable by this roster.
This returns all the addresses for all the users for all the members
managed by this roster.
""")
def get_member(address):
"""Get the member for the given address.
:param address: The email address to search for.
:type address: text
:return: The member if found, otherwise None
:rtype: `IMember` or None
"""
| gpl-3.0 |
molobrakos/home-assistant | homeassistant/components/ads/__init__.py | 5 | 9643 | """Support for Automation Device Specification (ADS)."""
import threading
import struct
import logging
import ctypes
from collections import namedtuple
import asyncio
import async_timeout
import voluptuous as vol
from homeassistant.const import (
CONF_DEVICE, CONF_IP_ADDRESS, CONF_PORT, EVENT_HOMEASSISTANT_STOP)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DATA_ADS = 'data_ads'
# Supported Types
ADSTYPE_BOOL = 'bool'
ADSTYPE_BYTE = 'byte'
ADSTYPE_DINT = 'dint'
ADSTYPE_INT = 'int'
ADSTYPE_UDINT = 'udint'
ADSTYPE_UINT = 'uint'
CONF_ADS_FACTOR = 'factor'
CONF_ADS_TYPE = 'adstype'
CONF_ADS_VALUE = 'value'
CONF_ADS_VAR = 'adsvar'
CONF_ADS_VAR_BRIGHTNESS = 'adsvar_brightness'
STATE_KEY_STATE = 'state'
STATE_KEY_BRIGHTNESS = 'brightness'
DOMAIN = 'ads'
SERVICE_WRITE_DATA_BY_NAME = 'write_data_by_name'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_DEVICE): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_IP_ADDRESS): cv.string,
})
}, extra=vol.ALLOW_EXTRA)
SCHEMA_SERVICE_WRITE_DATA_BY_NAME = vol.Schema({
vol.Required(CONF_ADS_TYPE):
vol.In([ADSTYPE_INT, ADSTYPE_UINT, ADSTYPE_BYTE, ADSTYPE_BOOL,
ADSTYPE_DINT, ADSTYPE_UDINT]),
vol.Required(CONF_ADS_VALUE): vol.Coerce(int),
vol.Required(CONF_ADS_VAR): cv.string,
})
def setup(hass, config):
"""Set up the ADS component."""
import pyads
conf = config[DOMAIN]
net_id = conf.get(CONF_DEVICE)
ip_address = conf.get(CONF_IP_ADDRESS)
port = conf.get(CONF_PORT)
client = pyads.Connection(net_id, port, ip_address)
AdsHub.ADS_TYPEMAP = {
ADSTYPE_BOOL: pyads.PLCTYPE_BOOL,
ADSTYPE_BYTE: pyads.PLCTYPE_BYTE,
ADSTYPE_DINT: pyads.PLCTYPE_DINT,
ADSTYPE_INT: pyads.PLCTYPE_INT,
ADSTYPE_UDINT: pyads.PLCTYPE_UDINT,
ADSTYPE_UINT: pyads.PLCTYPE_UINT,
}
AdsHub.ADSError = pyads.ADSError
AdsHub.PLCTYPE_BOOL = pyads.PLCTYPE_BOOL
AdsHub.PLCTYPE_BYTE = pyads.PLCTYPE_BYTE
AdsHub.PLCTYPE_DINT = pyads.PLCTYPE_DINT
AdsHub.PLCTYPE_INT = pyads.PLCTYPE_INT
AdsHub.PLCTYPE_UDINT = pyads.PLCTYPE_UDINT
AdsHub.PLCTYPE_UINT = pyads.PLCTYPE_UINT
try:
ads = AdsHub(client)
except pyads.ADSError:
_LOGGER.error(
"Could not connect to ADS host (netid=%s, ip=%s, port=%s)",
net_id, ip_address, port)
return False
hass.data[DATA_ADS] = ads
hass.bus.listen(EVENT_HOMEASSISTANT_STOP, ads.shutdown)
def handle_write_data_by_name(call):
"""Write a value to the connected ADS device."""
ads_var = call.data.get(CONF_ADS_VAR)
ads_type = call.data.get(CONF_ADS_TYPE)
value = call.data.get(CONF_ADS_VALUE)
try:
ads.write_by_name(ads_var, value, ads.ADS_TYPEMAP[ads_type])
except pyads.ADSError as err:
_LOGGER.error(err)
hass.services.register(
DOMAIN, SERVICE_WRITE_DATA_BY_NAME, handle_write_data_by_name,
schema=SCHEMA_SERVICE_WRITE_DATA_BY_NAME)
return True
# Tuple to hold data needed for notification
NotificationItem = namedtuple(
'NotificationItem', 'hnotify huser name plc_datatype callback'
)
class AdsHub:
"""Representation of an ADS connection."""
def __init__(self, ads_client):
"""Initialize the ADS hub."""
self._client = ads_client
self._client.open()
# All ADS devices are registered here
self._devices = []
self._notification_items = {}
self._lock = threading.Lock()
def shutdown(self, *args, **kwargs):
"""Shutdown ADS connection."""
import pyads
_LOGGER.debug("Shutting down ADS")
for notification_item in self._notification_items.values():
_LOGGER.debug(
"Deleting device notification %d, %d",
notification_item.hnotify, notification_item.huser)
try:
self._client.del_device_notification(
notification_item.hnotify,
notification_item.huser
)
except pyads.ADSError as err:
_LOGGER.error(err)
try:
self._client.close()
except pyads.ADSError as err:
_LOGGER.error(err)
def register_device(self, device):
"""Register a new device."""
self._devices.append(device)
def write_by_name(self, name, value, plc_datatype):
"""Write a value to the device."""
import pyads
with self._lock:
try:
return self._client.write_by_name(name, value, plc_datatype)
except pyads.ADSError as err:
_LOGGER.error("Error writing %s: %s", name, err)
def read_by_name(self, name, plc_datatype):
"""Read a value from the device."""
import pyads
with self._lock:
try:
return self._client.read_by_name(name, plc_datatype)
except pyads.ADSError as err:
_LOGGER.error("Error reading %s: %s", name, err)
def add_device_notification(self, name, plc_datatype, callback):
"""Add a notification to the ADS devices."""
import pyads
attr = pyads.NotificationAttrib(ctypes.sizeof(plc_datatype))
with self._lock:
try:
hnotify, huser = self._client.add_device_notification(
name, attr, self._device_notification_callback)
except pyads.ADSError as err:
_LOGGER.error("Error subscribing to %s: %s", name, err)
else:
hnotify = int(hnotify)
self._notification_items[hnotify] = NotificationItem(
hnotify, huser, name, plc_datatype, callback)
_LOGGER.debug(
"Added device notification %d for variable %s",
hnotify, name)
def _device_notification_callback(self, notification, name):
"""Handle device notifications."""
contents = notification.contents
hnotify = int(contents.hNotification)
_LOGGER.debug("Received notification %d", hnotify)
data = contents.data
try:
with self._lock:
notification_item = self._notification_items[hnotify]
except KeyError:
_LOGGER.error("Unknown device notification handle: %d", hnotify)
return
# Parse data to desired datatype
if notification_item.plc_datatype == self.PLCTYPE_BOOL:
value = bool(struct.unpack('<?', bytearray(data)[:1])[0])
elif notification_item.plc_datatype == self.PLCTYPE_INT:
value = struct.unpack('<h', bytearray(data)[:2])[0]
elif notification_item.plc_datatype == self.PLCTYPE_BYTE:
value = struct.unpack('<B', bytearray(data)[:1])[0]
elif notification_item.plc_datatype == self.PLCTYPE_UINT:
value = struct.unpack('<H', bytearray(data)[:2])[0]
elif notification_item.plc_datatype == self.PLCTYPE_DINT:
value = struct.unpack('<i', bytearray(data)[:4])[0]
elif notification_item.plc_datatype == self.PLCTYPE_UDINT:
value = struct.unpack('<I', bytearray(data)[:4])[0]
else:
value = bytearray(data)
_LOGGER.warning("No callback available for this datatype")
notification_item.callback(notification_item.name, value)
class AdsEntity(Entity):
"""Representation of ADS entity."""
def __init__(self, ads_hub, name, ads_var):
"""Initialize ADS binary sensor."""
self._name = name
self._unique_id = ads_var
self._state_dict = {}
self._state_dict[STATE_KEY_STATE] = None
self._ads_hub = ads_hub
self._ads_var = ads_var
self._event = None
async def async_initialize_device(
self, ads_var, plctype, state_key=STATE_KEY_STATE, factor=None):
"""Register device notification."""
def update(name, value):
"""Handle device notifications."""
_LOGGER.debug('Variable %s changed its value to %d', name, value)
if factor is None:
self._state_dict[state_key] = value
else:
self._state_dict[state_key] = value / factor
asyncio.run_coroutine_threadsafe(async_event_set(), self.hass.loop)
self.schedule_update_ha_state()
async def async_event_set():
"""Set event in async context."""
self._event.set()
self._event = asyncio.Event()
await self.hass.async_add_executor_job(
self._ads_hub.add_device_notification,
ads_var, plctype, update)
try:
with async_timeout.timeout(10):
await self._event.wait()
except asyncio.TimeoutError:
_LOGGER.debug('Variable %s: Timeout during first update',
ads_var)
@property
def name(self):
"""Return the default name of the binary sensor."""
return self._name
@property
def unique_id(self):
"""Return an unique identifier for this entity."""
return self._unique_id
@property
def should_poll(self):
"""Return False because entity pushes its state to HA."""
return False
@property
def available(self):
"""Return False if state has not been updated yet."""
return self._state_dict[STATE_KEY_STATE] is not None
| apache-2.0 |
rsignell-usgs/notebook | system-test/Theme_2_Extreme_Events/Scenario_2A/ModelDataCompare_Waves/Model_Obs_Compare_Waves.py | 3 | 12427 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <markdowncell>
# ># IOOS System Test: [Extreme Events Theme:](https://github.com/ioos/system-test/wiki/Development-of-Test-Themes#theme-2-extreme-events) Coastal Inundation
# <markdowncell>
# ### Can we compare observed and modeled wave parameters?
# This notebook is based on [IOOS System Test: Inundation](http://nbviewer.ipython.org/github/ioos/system-test/blob/master/Theme_2_Extreme_Events/Scenario_2A_Coastal_Inundation/Scenario_2A_Water_Level_Signell.ipynb)
#
# Methodology:
# * Define temporal and spatial bounds of interest, as well as parameters of interest
# * Search for available service endpoints in the NGDC CSW catalog meeting search criteria
# * Extract OPeNDAP data endpoints from model datasets and SOS endpoints from observational datasets
# * Obtain observation data sets from stations within the spatial boundaries
# * Plot observation stations on a map (red marker if not enough data)
# * Using DAP (model) endpoints find all available models data sets that fall in the area of interest, for the specified time range, and extract a model grid cell closest to all the given station locations
# * Plot modelled and observed time series wave data on same axes for comparison
#
# <headingcell level=4>
# import required libraries
# <codecell>
import datetime as dt
from warnings import warn
from io import BytesIO
import folium
import netCDF4
from IPython.display import HTML
import iris
from iris.exceptions import CoordinateNotFoundError, ConstraintMismatchError
import matplotlib.pyplot as plt
from owslib.csw import CatalogueServiceWeb
from owslib import fes
import pandas as pd
from pyoos.collectors.ndbc.ndbc_sos import NdbcSos
import requests
from utilities import (fes_date_filter, collector2df, find_timevar, find_ij, nearxy, service_urls, mod_df,
get_coordinates, get_station_longName, inline_map)
# <headingcell level=4>
# Speficy Temporal and Spatial conditions
# <codecell>
bounding_box_type = "box"
# Bounding Box [lon_min, lat_min, lon_max, lat_max]
area = {'Hawaii': [-160.0, 18.0, -154., 23.0],
'Gulf of Maine': [-72.0, 41.0, -69.0, 43.0],
'New York harbor region': [-75., 39., -71., 41.5],
'Puerto Rico': [-75, 12, -55, 26],
'East Coast': [-77, 34, -70, 40],
'North West': [-130, 38, -121, 50],
'Gulf of Mexico': [-92, 28, -84, 31],
'Arctic': [-179, 63, -140, 80],
'North East': [-74, 40, -69, 42],
'Virginia Beach': [-76, 34, -74, 38]}
bounding_box = area['East Coast']
#temporal range - May 1 2014 - May 10 2014
start_date = dt.datetime(2014,5,1,0,50).strftime('%Y-%m-%d %H:%M')
end_date = dt.datetime(2014,5,10).strftime('%Y-%m-%d %H:00')
time_date_range = [start_date,end_date] #start_date_end_date
jd_start = dt.datetime.strptime(start_date, '%Y-%m-%d %H:%M')
jd_stop = dt.datetime.strptime(end_date, '%Y-%m-%d %H:%M')
print start_date,'to',end_date
# <headingcell level=4>
# Specify data names of interest
# <codecell>
#put the names in a dict for ease of access
# put the names in a dict for ease of access
data_dict = {}
sos_name = 'waves'
data_dict["waves"] = {"names":['sea_surface_wave_significant_height',
'significant_wave_height',
'significant_height_of_wave',
'sea_surface_wave_significant_height(m)',
'sea_surface_wave_significant_height (m)',
'water_surface_height'],
"sos_name":["waves"]}
# <headingcell level=3>
# Search CSW for datasets of interest
# <codecell>
endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw' # NGDC Geoportal
csw = CatalogueServiceWeb(endpoint,timeout=60)
# <codecell>
# convert User Input into FES filters
start,stop = fes_date_filter(start_date,end_date)
bbox = fes.BBox(bounding_box)
#use the search name to create search filter
or_filt = fes.Or([fes.PropertyIsLike(propertyname='apiso:AnyText',literal='*%s*' % val,
escapeChar='\\',wildCard='*',singleChar='?') for val in data_dict["waves"]["names"]])
# try request using multiple filters "and" syntax: [[filter1,filter2]]
filter_list = [fes.And([ bbox, start, stop, or_filt]) ]
csw.getrecords2(constraints=filter_list,maxrecords=1000,esn='full')
print str(len(csw.records)) + " csw records found"
# <markdowncell>
# #### Dap URLs
# <codecell>
dap_urls = service_urls(csw.records)
#remove duplicates and organize
dap_urls = sorted(set(dap_urls))
print "Total DAP:",len(dap_urls)
#print the first 5...
print "\n".join(dap_urls[0:5])
# <markdowncell>
# #### SOS URLs
# <codecell>
sos_urls = service_urls(csw.records,service='sos:url')
#remove duplicates and organize
sos_urls = sorted(set(sos_urls))
print "Total SOS:",len(sos_urls)
print "\n".join(sos_urls)
# <markdowncell>
# ###Get most recent observations from all stations in bounding box
# <codecell>
start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(end_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')
# Define the SOS collector
collector = NdbcSos()
print collector.server.identification.title
collector.variables = data_dict["waves"]["sos_name"]
collector.server.identification.title
# Don't specify start and end date in the filter and the most recent observation will be returned
collector.filter(bbox=bounding_box,
variables=data_dict["waves"]["sos_name"])
response = collector.raw(responseFormat="text/csv")
obs_loc_df = pd.read_csv(BytesIO(response.encode('utf-8')),
parse_dates=True,
index_col='date_time')
# Now let's specify start and end times
collector.start_time = start_time
collector.end_time = end_time
ofrs = collector.server.offerings
# <codecell>
obs_loc_df.head()
# <codecell>
stations = [sta.split(':')[-1] for sta in obs_loc_df['station_id']]
obs_lon = [sta for sta in obs_loc_df['longitude (degree)']]
obs_lat = [sta for sta in obs_loc_df['latitude (degree)']]
# <headingcell level=3>
# Request CSV response from SOS and convert to Pandas DataFrames
# <codecell>
ts_rng = pd.date_range(start=start_date, end=end_date)
ts = pd.DataFrame(index=ts_rng)
# Save all of the observation data into a list of dataframes
obs_df = []
# Create a list of dataframes for just wave heights for comparing with modeled wave heights later
Hs_obs_df = []
for sta in stations:
raw_df = collector2df(collector, sta, sos_name)
obs_df.append(pd.DataFrame(pd.concat([raw_df, ts],axis=1)))
obs_df[-1].name = raw_df.name
if raw_df.empty:
Hs_obs_df.append(pd.DataFrame())
else:
Hs_obs_df.append(pd.DataFrame(pd.concat([raw_df, ts],axis=1)['sea_surface_wave_significant_height (m)']))
Hs_obs_df[-1].name = raw_df.name
# <markdowncell>
# ### Plot the Observation Stations on Map
# <codecell>
min_data_pts = 20
# Find center of bounding box
lat_center = abs(bounding_box[3]-bounding_box[1])/2 + bounding_box[1]
lon_center = abs(bounding_box[0]-bounding_box[2])/2 + bounding_box[0]
m = folium.Map(location=[lat_center, lon_center], zoom_start=6)
n = 0
for df in obs_df:
#get the station data from the sos end point
longname = df.name
lat = obs_loc_df['latitude (degree)'][n]
lon = obs_loc_df['longitude (degree)'][n]
popup_string = ('<b>Station:</b><br>'+ longname)
if len(df) > min_data_pts:
m.simple_marker([lat, lon], popup=popup_string)
else:
#popup_string += '<br>No Data Available'
popup_string += '<br>Not enough data available<br>requested pts: ' + str(min_data_pts ) + '<br>Available pts: ' + str(len(Hs_obs_df[n]))
m.circle_marker([lat, lon], popup=popup_string, fill_color='#ff0000', radius=10000, line_color='#ff0000')
n += 1
m.line(get_coordinates(bounding_box,bounding_box_type), line_color='#FF0000', line_weight=5)
inline_map(m)
# <codecell>
### Plot Hs and Tp for each station
# <codecell>
for df in obs_df:
if len(df) > min_data_pts:
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(20,5))
df['sea_surface_wave_significant_height (m)'].plot(ax=axes[0], color='r')
axes[0].set_title(df.name)
axes[0].set_ylabel('Hs (m)')
df['sea_surface_wave_peak_period (s)'].plot(ax=axes[1])
axes[1].set_title(df.name)
axes[1].set_ylabel('Tp (s)')
# <markdowncell>
# ###Get model output from OPeNDAP URLS
# Try to open all the OPeNDAP URLS using Iris from the British Met Office. If we can open in Iris, we know it's a model result.
# <codecell>
name_in_list = lambda cube: cube.standard_name in data_dict['waves']['names']
constraint = iris.Constraint(cube_func=name_in_list)
# <codecell>
# Use only data within 0.04 degrees (about 4 km).
max_dist = 0.04
# Use only data where the standard deviation of the time series exceeds 0.01 m (1 cm).
# This eliminates flat line model time series that come from land points that should have had missing values.
min_var = 0.01
for url in dap_urls:
if 'cdip' in url:
# The CDIP buoys are known to be observed data, so let's just skip
continue
try:
a = iris.load_cube(url, constraint)
# take first 20 chars for model name
mod_name = a.attributes['title'][0:20]
r = a.shape
timevar = find_timevar(a)
lat = a.coord(axis='Y').points
lon = a.coord(axis='X').points
jd = timevar.units.num2date(timevar.points)
start = timevar.units.date2num(jd_start)
istart = timevar.nearest_neighbour_index(start)
stop = timevar.units.date2num(jd_stop)
istop = timevar.nearest_neighbour_index(stop)
# Only proceed if we have data in the range requested.
if istart != istop:
nsta = len(stations)
if len(r) == 3:
print('[Structured grid model]:', url)
d = a[0, :, :].data
# Find the closest non-land point from a structured grid model.
if len(lon.shape) == 1:
lon, lat = np.meshgrid(lon, lat)
j, i, dd = find_ij(lon, lat, d, obs_lon, obs_lat)
for n in range(nsta):
# Only use if model cell is within 0.01 degree of requested
# location.
if dd[n] <= max_dist:
arr = a[istart:istop, j[n], i[n]].data
if arr.std() >= min_var:
c = mod_df(arr, timevar, istart, istop,
mod_name, ts)
name = Hs_obs_df[n].name
Hs_obs_df[n] = pd.concat([Hs_obs_df[n], c], axis=1)
Hs_obs_df[n].name = name
elif len(r) == 2:
print('[Unstructured grid model]:', url)
# Find the closest point from an unstructured grid model.
index, dd = nearxy(lon.flatten(), lat.flatten(),
obs_lon, obs_lat)
for n in range(nsta):
# Only use if model cell is within 0.1 degree of requested
# location.
if dd[n] <= max_dist:
arr = a[istart:istop, index[n]].data
if arr.std() >= min_var:
c = mod_df(arr, timevar, istart, istop,
mod_name, ts)
name = Hs_obs_df[n].name
Hs_obs_df[n] = pd.concat([Hs_obs_df[n], c], axis=1)
Hs_obs_df[n].name = name
elif len(r) == 1:
print('[Data]:', url)
except (ValueError, RuntimeError, CoordinateNotFoundError,
ConstraintMismatchError) as e:
warn("\n%s\n" % e)
pass
# <markdowncell>
# ### Plot Modeled vs Obs Wave Height
# <codecell>
for df in Hs_obs_df:
# Make sure there is obs data at the station for comparison
if 'sea_surface_wave_significant_height (m)' in df.columns:
ax = df.plot(figsize=(14, 6), title=df.name, legend=False)
plt.setp(ax.lines[0], linewidth=4.0, color='0.7', zorder=1, marker='.')
ax.legend()
ax.set_ylabel('m')
| mit |
asen6/amartyasenguptadotcom | django/contrib/auth/tests/tokens.py | 227 | 3416 | from datetime import date, timedelta
from django.conf import settings
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.test import TestCase
class TokenGeneratorTest(TestCase):
def test_make_token(self):
"""
Ensure that we can make a token and that it is valid
"""
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertTrue(p0.check_token(user, tk1))
def test_10265(self):
"""
Ensure that the token generated for a user created in the same request
will work correctly.
"""
# See ticket #10265
user = User.objects.create_user('comebackkid', 'test3@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
reload = User.objects.get(username='comebackkid')
tk2 = p0.make_token(reload)
self.assertEqual(tk1, tk2)
def test_timeout(self):
"""
Ensure we can use the token after n days, but no greater.
"""
# Uses a mocked version of PasswordResetTokenGenerator so we can change
# the value of 'today'
class Mocked(PasswordResetTokenGenerator):
def __init__(self, today):
self._today_val = today
def _today(self):
return self._today_val
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
p1 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS))
self.assertTrue(p1.check_token(user, tk1))
p2 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS + 1))
self.assertFalse(p2.check_token(user, tk1))
def test_django12_hash(self):
"""
Ensure we can use the hashes generated by Django 1.2
"""
# Hard code in the Django 1.2 algorithm (not the result, as it is time
# dependent)
def _make_token(user):
from django.utils.hashcompat import sha_constructor
from django.utils.http import int_to_base36
timestamp = (date.today() - date(2001,1,1)).days
ts_b36 = int_to_base36(timestamp)
hash = sha_constructor(settings.SECRET_KEY + unicode(user.id) +
user.password + user.last_login.strftime('%Y-%m-%d %H:%M:%S') +
unicode(timestamp)).hexdigest()[::2]
return "%s-%s" % (ts_b36, hash)
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = _make_token(user)
self.assertTrue(p0.check_token(user, tk1))
def test_date_length(self):
"""
Make sure we don't allow overly long dates, causing a potential DoS.
"""
user = User.objects.create_user('ima1337h4x0r', 'test4@example.com', 'p4ssw0rd')
p0 = PasswordResetTokenGenerator()
# This will put a 14-digit base36 timestamp into the token, which is too large.
tk1 = p0._make_token_with_timestamp(user, 175455491841851871349)
self.assertFalse(p0.check_token(user, tk1))
| bsd-3-clause |
gauribhoite/personfinder | env/google_appengine/lib/django-1.3/django/core/management/sql.py | 229 | 8259 | import os
import re
from django.conf import settings
from django.core.management.base import CommandError
from django.db import models
from django.db.models import get_models
def sql_create(app, style, connection):
"Returns a list of the CREATE TABLE SQL statements for the given app."
if connection.settings_dict['ENGINE'] == 'django.db.backends.dummy':
# This must be the "dummy" database backend, which means the user
# hasn't set ENGINE for the databse.
raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
"because you haven't specified the ENGINE setting for the database.\n" +
"Edit your settings file and change DATBASES['default']['ENGINE'] to something like\n" +
"'django.db.backends.postgresql' or 'django.db.backends.mysql'.")
# Get installed models, so we generate REFERENCES right.
# We trim models from the current app so that the sqlreset command does not
# generate invalid SQL (leaving models out of known_models is harmless, so
# we can be conservative).
app_models = models.get_models(app, include_auto_created=True)
final_output = []
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models])
pending_references = {}
for model in app_models:
output, references = connection.creation.sql_create_model(model, style, known_models)
final_output.extend(output)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in known_models:
final_output.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
final_output.extend(connection.creation.sql_for_pending_references(model, style, pending_references))
# Keep track of the fact that we've created the table for this model.
known_models.add(model)
# Handle references to tables that are from other apps
# but don't exist physically.
not_installed_models = set(pending_references.keys())
if not_installed_models:
alter_sql = []
for model in not_installed_models:
alter_sql.extend(['-- ' + sql for sql in
connection.creation.sql_for_pending_references(model, style, pending_references)])
if alter_sql:
final_output.append('-- The following references should be added but depend on non-existent tables:')
final_output.extend(alter_sql)
return final_output
def sql_delete(app, style, connection):
"Returns a list of the DROP TABLE SQL statements for the given app."
# This should work even if a connection isn't available
try:
cursor = connection.cursor()
except:
cursor = None
# Figure out which tables already exist
if cursor:
table_names = connection.introspection.get_table_list(cursor)
else:
table_names = []
output = []
# Output DROP TABLE statements for standard application tables.
to_delete = set()
references_to_delete = {}
app_models = models.get_models(app, include_auto_created=True)
for model in app_models:
if cursor and connection.introspection.table_name_converter(model._meta.db_table) in table_names:
# The table exists, so it needs to be dropped
opts = model._meta
for f in opts.local_fields:
if f.rel and f.rel.to not in to_delete:
references_to_delete.setdefault(f.rel.to, []).append( (model, f) )
to_delete.add(model)
for model in app_models:
if connection.introspection.table_name_converter(model._meta.db_table) in table_names:
output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style))
# Close database connection explicitly, in case this output is being piped
# directly into a database client, to avoid locking issues.
if cursor:
cursor.close()
connection.close()
return output[::-1] # Reverse it, to deal with table dependencies.
def sql_reset(app, style, connection):
"Returns a list of the DROP TABLE SQL, then the CREATE TABLE SQL, for the given module."
# This command breaks a lot and should be deprecated
import warnings
warnings.warn(
'This command has been deprecated. The command ``sqlflush`` can be used to delete everything. You can also use ALTER TABLE or DROP TABLE statements manually.',
PendingDeprecationWarning
)
return sql_delete(app, style, connection) + sql_all(app, style, connection)
def sql_flush(style, connection, only_django=False):
"""
Returns a list of the SQL statements used to flush the database.
If only_django is True, then only table names that have associated Django
models and are in INSTALLED_APPS will be included.
"""
if only_django:
tables = connection.introspection.django_table_names(only_existing=True)
else:
tables = connection.introspection.table_names()
statements = connection.ops.sql_flush(
style, tables, connection.introspection.sequence_list()
)
return statements
def sql_custom(app, style, connection):
"Returns a list of the custom table modifying SQL statements for the given app."
output = []
app_models = get_models(app)
app_dir = os.path.normpath(os.path.join(os.path.dirname(app.__file__), 'sql'))
for model in app_models:
output.extend(custom_sql_for_model(model, style, connection))
return output
def sql_indexes(app, style, connection):
"Returns a list of the CREATE INDEX SQL statements for all models in the given app."
output = []
for model in models.get_models(app):
output.extend(connection.creation.sql_indexes_for_model(model, style))
return output
def sql_all(app, style, connection):
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
return sql_create(app, style, connection) + sql_custom(app, style, connection) + sql_indexes(app, style, connection)
def custom_sql_for_model(model, style, connection):
opts = model._meta
app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql'))
output = []
# Post-creation SQL should come before any initial SQL data is loaded.
# However, this should not be done for models that are unmanaged or
# for fields that are part of a parent model (via model inheritance).
if opts.managed:
post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')]
for f in post_sql_fields:
output.extend(f.post_create_sql(style, model._meta.db_table))
# Some backends can't execute more than one SQL statement at a time,
# so split into separate statements.
statements = re.compile(r";[ \t]*$", re.M)
# Find custom SQL, if it's available.
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.object_name.lower(), backend_name)),
os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
for sql_file in sql_files:
if os.path.exists(sql_file):
fp = open(sql_file, 'U')
for statement in statements.split(fp.read().decode(settings.FILE_CHARSET)):
# Remove any comments from the file
statement = re.sub(ur"--.*([\n\Z]|$)", "", statement)
if statement.strip():
output.append(statement + u";")
fp.close()
return output
def emit_post_sync_signal(created_models, verbosity, interactive, db):
# Emit the post_sync signal for every application.
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
if verbosity >= 2:
print "Running post-sync handlers for application", app_name
models.signals.post_syncdb.send(sender=app, app=app,
created_models=created_models, verbosity=verbosity,
interactive=interactive, db=db)
| apache-2.0 |
codeparticle/Tenable.io-SDK-for-Python | examples/scans.py | 1 | 3840 | import os
from datetime import datetime
from time import time
from tenable_io.api.models import Scan
from tenable_io.api.scans import ScanExportRequest
from tenable_io.client import TenableIOClient
from tenable_io.exceptions import TenableIOApiException
def example(test_name, test_file):
# Generate unique name and file.
scan_name = test_name(u'example scan')
test_nessus_file = test_file(u'example_report.nessus')
test_pdf_file = test_file(u'example_report.pdf')
'''
Instantiate an instance of the TenableIOClient.
'''
client = TenableIOClient()
'''
Create a scan.
'''
scan = client.scan_helper.create(
name=scan_name,
text_targets='tenable.com',
template='discovery'
)
assert scan.name() == scan_name
'''
Retrieve a scan by ID.
'''
scan_b = client.scan_helper.id(scan.id)
assert scan_b is not scan
assert scan_b.name() == scan_name
'''
Select scans by name.
'''
scans = client.scan_helper.scans(name=scan_name)
assert scans[0].name() == scan_name
'''
Select scans by name with regular expression.
'''
scans = client.scan_helper.scans(name_regex=r'.*example scan.*')
assert len(scans) > 0
'''
Launch a scan, then download when scan is completed.
Note: The `download` method blocks until the scan is completed and the report is downloaded.
'''
scan.launch().download(test_pdf_file)
assert os.path.isfile(test_pdf_file)
os.remove(test_pdf_file)
'''
Launch a scan, pause it, resume it, then stop it.
'''
scan.launch().pause()
assert scan.status() == Scan.STATUS_PAUSED
scan.resume().stop()
assert scan.status() == Scan.STATUS_CANCELED
'''
Stop a running scan if it does not complete within a specific duration.
'''
start = time()
scan.launch().wait_or_cancel_after(10)
assert time() - start >= 10
'''
Retrieve the history of a scan since a specific date or all.
Note: The `since` argument is optional, all the history if omitted.
'''
histories = scan.histories(since=datetime(2016, 12, 1))
assert len(histories) > 0
'''
Download the report for a specific scan in history.
'''
scan.download(test_pdf_file, history_id=histories[0].history_id)
assert os.path.isfile(test_pdf_file)
os.remove(test_pdf_file)
'''
Create a new scan by copying a scan.
'''
scan_copy = scan.copy()
assert scan_copy.id != scan.id
assert scan_copy.status() == Scan.STATUS_EMPTY
'''
Export a scan into a NESSUS file.
'''
scan.download(test_nessus_file, format=ScanExportRequest.FORMAT_NESSUS)
assert os.path.isfile(test_nessus_file)
'''
Create a new scan by importing a NESSUS file.
'''
imported_scan = client.scan_helper.import_scan(test_nessus_file)
assert imported_scan.details().info.name == scan.details().info.name
os.remove(test_nessus_file)
'''
Stop all scans.
Note: Use with caution as this will stop all ongoing scans (including any automated test).
'''
# client.scan_helper.stop_all()
'''
Check if a target has recently been scanned (including running scans).
'''
activities = client.scan_helper.activities('tenable.com')
last_history_id = scan.last_history().history_id
assert [a for a in activities if last_history_id == a.history_id]
'''
Delete scans.
'''
scan.delete()
scan_copy.delete()
imported_scan.delete()
try:
scan.details()
assert False
except TenableIOApiException:
pass
try:
scan_copy.details()
assert False
except TenableIOApiException:
pass
try:
imported_scan.details()
assert False
except TenableIOApiException:
pass
| mit |
ilo10/scikit-learn | sklearn/feature_extraction/tests/test_image.py | 205 | 10378 | # Authors: Emmanuelle Gouillart <emmanuelle.gouillart@normalesup.org>
# Gael Varoquaux <gael.varoquaux@normalesup.org>
# License: BSD 3 clause
import numpy as np
import scipy as sp
from scipy import ndimage
from nose.tools import assert_equal, assert_true
from numpy.testing import assert_raises
from sklearn.feature_extraction.image import (
img_to_graph, grid_to_graph, extract_patches_2d,
reconstruct_from_patches_2d, PatchExtractor, extract_patches)
from sklearn.utils.graph import connected_components
def test_img_to_graph():
x, y = np.mgrid[:4, :4] - 10
grad_x = img_to_graph(x)
grad_y = img_to_graph(y)
assert_equal(grad_x.nnz, grad_y.nnz)
# Negative elements are the diagonal: the elements of the original
# image. Positive elements are the values of the gradient, they
# should all be equal on grad_x and grad_y
np.testing.assert_array_equal(grad_x.data[grad_x.data > 0],
grad_y.data[grad_y.data > 0])
def test_grid_to_graph():
#Checking that the function works with graphs containing no edges
size = 2
roi_size = 1
# Generating two convex parts with one vertex
# Thus, edges will be empty in _to_graph
mask = np.zeros((size, size), dtype=np.bool)
mask[0:roi_size, 0:roi_size] = True
mask[-roi_size:, -roi_size:] = True
mask = mask.reshape(size ** 2)
A = grid_to_graph(n_x=size, n_y=size, mask=mask, return_as=np.ndarray)
assert_true(connected_components(A)[0] == 2)
# Checking that the function works whatever the type of mask is
mask = np.ones((size, size), dtype=np.int16)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask)
assert_true(connected_components(A)[0] == 1)
# Checking dtype of the graph
mask = np.ones((size, size))
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.bool)
assert_true(A.dtype == np.bool)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.int)
assert_true(A.dtype == np.int)
A = grid_to_graph(n_x=size, n_y=size, n_z=size, mask=mask, dtype=np.float)
assert_true(A.dtype == np.float)
def test_connect_regions():
lena = sp.misc.lena()
for thr in (50, 150):
mask = lena > thr
graph = img_to_graph(lena, mask)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
def test_connect_regions_with_grid():
lena = sp.misc.lena()
mask = lena > 50
graph = grid_to_graph(*lena.shape, mask=mask)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
mask = lena > 150
graph = grid_to_graph(*lena.shape, mask=mask, dtype=None)
assert_equal(ndimage.label(mask)[1], connected_components(graph)[0])
def _downsampled_lena():
lena = sp.misc.lena().astype(np.float32)
lena = (lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2]
+ lena[1::2, 1::2])
lena = (lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2]
+ lena[1::2, 1::2])
lena = lena.astype(np.float)
lena /= 16.0
return lena
def _orange_lena(lena=None):
lena = _downsampled_lena() if lena is None else lena
lena_color = np.zeros(lena.shape + (3,))
lena_color[:, :, 0] = 256 - lena
lena_color[:, :, 1] = 256 - lena / 2
lena_color[:, :, 2] = 256 - lena / 4
return lena_color
def _make_images(lena=None):
lena = _downsampled_lena() if lena is None else lena
# make a collection of lenas
images = np.zeros((3,) + lena.shape)
images[0] = lena
images[1] = lena + 1
images[2] = lena + 2
return images
downsampled_lena = _downsampled_lena()
orange_lena = _orange_lena(downsampled_lena)
lena_collection = _make_images(downsampled_lena)
def test_extract_patches_all():
lena = downsampled_lena
i_h, i_w = lena.shape
p_h, p_w = 16, 16
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(lena, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
def test_extract_patches_all_color():
lena = orange_lena
i_h, i_w = lena.shape[:2]
p_h, p_w = 16, 16
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(lena, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w, 3))
def test_extract_patches_all_rect():
lena = downsampled_lena
lena = lena[:, 32:97]
i_h, i_w = lena.shape
p_h, p_w = 16, 12
expected_n_patches = (i_h - p_h + 1) * (i_w - p_w + 1)
patches = extract_patches_2d(lena, (p_h, p_w))
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
def test_extract_patches_max_patches():
lena = downsampled_lena
i_h, i_w = lena.shape
p_h, p_w = 16, 16
patches = extract_patches_2d(lena, (p_h, p_w), max_patches=100)
assert_equal(patches.shape, (100, p_h, p_w))
expected_n_patches = int(0.5 * (i_h - p_h + 1) * (i_w - p_w + 1))
patches = extract_patches_2d(lena, (p_h, p_w), max_patches=0.5)
assert_equal(patches.shape, (expected_n_patches, p_h, p_w))
assert_raises(ValueError, extract_patches_2d, lena, (p_h, p_w),
max_patches=2.0)
assert_raises(ValueError, extract_patches_2d, lena, (p_h, p_w),
max_patches=-1.0)
def test_reconstruct_patches_perfect():
lena = downsampled_lena
p_h, p_w = 16, 16
patches = extract_patches_2d(lena, (p_h, p_w))
lena_reconstructed = reconstruct_from_patches_2d(patches, lena.shape)
np.testing.assert_array_equal(lena, lena_reconstructed)
def test_reconstruct_patches_perfect_color():
lena = orange_lena
p_h, p_w = 16, 16
patches = extract_patches_2d(lena, (p_h, p_w))
lena_reconstructed = reconstruct_from_patches_2d(patches, lena.shape)
np.testing.assert_array_equal(lena, lena_reconstructed)
def test_patch_extractor_fit():
lenas = lena_collection
extr = PatchExtractor(patch_size=(8, 8), max_patches=100, random_state=0)
assert_true(extr == extr.fit(lenas))
def test_patch_extractor_max_patches():
lenas = lena_collection
i_h, i_w = lenas.shape[1:3]
p_h, p_w = 8, 8
max_patches = 100
expected_n_patches = len(lenas) * max_patches
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
max_patches = 0.5
expected_n_patches = len(lenas) * int((i_h - p_h + 1) * (i_w - p_w + 1)
* max_patches)
extr = PatchExtractor(patch_size=(p_h, p_w), max_patches=max_patches,
random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
def test_patch_extractor_max_patches_default():
lenas = lena_collection
extr = PatchExtractor(max_patches=100, random_state=0)
patches = extr.transform(lenas)
assert_equal(patches.shape, (len(lenas) * 100, 12, 12))
def test_patch_extractor_all_patches():
lenas = lena_collection
i_h, i_w = lenas.shape[1:3]
p_h, p_w = 8, 8
expected_n_patches = len(lenas) * (i_h - p_h + 1) * (i_w - p_w + 1)
extr = PatchExtractor(patch_size=(p_h, p_w), random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w))
def test_patch_extractor_color():
lenas = _make_images(orange_lena)
i_h, i_w = lenas.shape[1:3]
p_h, p_w = 8, 8
expected_n_patches = len(lenas) * (i_h - p_h + 1) * (i_w - p_w + 1)
extr = PatchExtractor(patch_size=(p_h, p_w), random_state=0)
patches = extr.transform(lenas)
assert_true(patches.shape == (expected_n_patches, p_h, p_w, 3))
def test_extract_patches_strided():
image_shapes_1D = [(10,), (10,), (11,), (10,)]
patch_sizes_1D = [(1,), (2,), (3,), (8,)]
patch_steps_1D = [(1,), (1,), (4,), (2,)]
expected_views_1D = [(10,), (9,), (3,), (2,)]
last_patch_1D = [(10,), (8,), (8,), (2,)]
image_shapes_2D = [(10, 20), (10, 20), (10, 20), (11, 20)]
patch_sizes_2D = [(2, 2), (10, 10), (10, 11), (6, 6)]
patch_steps_2D = [(5, 5), (3, 10), (3, 4), (4, 2)]
expected_views_2D = [(2, 4), (1, 2), (1, 3), (2, 8)]
last_patch_2D = [(5, 15), (0, 10), (0, 8), (4, 14)]
image_shapes_3D = [(5, 4, 3), (3, 3, 3), (7, 8, 9), (7, 8, 9)]
patch_sizes_3D = [(2, 2, 3), (2, 2, 2), (1, 7, 3), (1, 3, 3)]
patch_steps_3D = [(1, 2, 10), (1, 1, 1), (2, 1, 3), (3, 3, 4)]
expected_views_3D = [(4, 2, 1), (2, 2, 2), (4, 2, 3), (3, 2, 2)]
last_patch_3D = [(3, 2, 0), (1, 1, 1), (6, 1, 6), (6, 3, 4)]
image_shapes = image_shapes_1D + image_shapes_2D + image_shapes_3D
patch_sizes = patch_sizes_1D + patch_sizes_2D + patch_sizes_3D
patch_steps = patch_steps_1D + patch_steps_2D + patch_steps_3D
expected_views = expected_views_1D + expected_views_2D + expected_views_3D
last_patches = last_patch_1D + last_patch_2D + last_patch_3D
for (image_shape, patch_size, patch_step, expected_view,
last_patch) in zip(image_shapes, patch_sizes, patch_steps,
expected_views, last_patches):
image = np.arange(np.prod(image_shape)).reshape(image_shape)
patches = extract_patches(image, patch_shape=patch_size,
extraction_step=patch_step)
ndim = len(image_shape)
assert_true(patches.shape[:ndim] == expected_view)
last_patch_slices = [slice(i, i + j, None) for i, j in
zip(last_patch, patch_size)]
assert_true((patches[[slice(-1, None, None)] * ndim] ==
image[last_patch_slices].squeeze()).all())
def test_extract_patches_square():
# test same patch size for all dimensions
lena = downsampled_lena
i_h, i_w = lena.shape
p = 8
expected_n_patches = ((i_h - p + 1), (i_w - p + 1))
patches = extract_patches(lena, patch_shape=p)
assert_true(patches.shape == (expected_n_patches[0], expected_n_patches[1],
p, p))
def test_width_patch():
# width and height of the patch should be less than the image
x = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert_raises(ValueError, extract_patches_2d, x, (4, 1))
assert_raises(ValueError, extract_patches_2d, x, (1, 4))
| bsd-3-clause |
nimasmi/wagtail | wagtail/admin/action_menu.py | 1 | 7466 | """Handles rendering of the list of actions in the footer of the page create/edit views."""
from django.conf import settings
from django.forms import Media, MediaDefiningClass
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from wagtail.core import hooks
from wagtail.core.models import UserPagePermissionsProxy
class ActionMenuItem(metaclass=MediaDefiningClass):
"""Defines an item in the actions drop-up on the page creation/edit view"""
order = 100 # default order index if one is not specified on init
template = 'wagtailadmin/pages/action_menu/menu_item.html'
label = ''
name = None
def __init__(self, order=None):
if order is not None:
self.order = order
def is_shown(self, request, context):
"""
Whether this action should be shown on this request; permission checks etc should go here.
By default, actions are shown for unlocked pages, hidden for locked pages
request = the current request object
context = dictionary containing at least:
'view' = 'create', 'edit' or 'revisions_revert'
'page' (if view = 'edit' or 'revisions_revert') = the page being edited
'parent_page' (if view = 'create') = the parent page of the page being created
'user_page_permissions' = a UserPagePermissionsProxy for the current user, to test permissions against
"""
return (context['view'] == 'create' or not context['page'].locked)
def get_context(self, request, parent_context):
"""Defines context for the template, overridable to use more data"""
context = parent_context.copy()
context.update({
'label': self.label,
'url': self.get_url(request, context),
'name': self.name,
})
return context
def get_url(self, request, context):
return None
def render_html(self, request, parent_context):
context = self.get_context(request, parent_context)
return render_to_string(self.template, context, request=request)
class PublishMenuItem(ActionMenuItem):
label = _("Publish")
name = 'action-publish'
template = 'wagtailadmin/pages/action_menu/publish.html'
def is_shown(self, request, context):
if context['view'] == 'create':
return context['user_page_permissions'].for_page(context['parent_page']).can_publish_subpage()
else: # view == 'edit' or 'revisions_revert'
return (
not context['page'].locked
and context['user_page_permissions'].for_page(context['page']).can_publish()
)
def get_context(self, request, parent_context):
context = super().get_context(request, parent_context)
context['is_revision'] = (context['view'] == 'revisions_revert')
return context
class SubmitForModerationMenuItem(ActionMenuItem):
label = _("Submit for moderation")
name = 'action-submit'
def is_shown(self, request, context):
WAGTAIL_MODERATION_ENABLED = getattr(settings, 'WAGTAIL_MODERATION_ENABLED', True)
if not WAGTAIL_MODERATION_ENABLED:
return False
elif context['view'] == 'create':
return True
elif context['view'] == 'edit':
return not context['page'].locked
else: # context == revisions_revert
return False
class UnpublishMenuItem(ActionMenuItem):
label = _("Unpublish")
name = 'action-unpublish'
def is_shown(self, request, context):
return (
context['view'] == 'edit'
and not context['page'].locked
and context['user_page_permissions'].for_page(context['page']).can_unpublish()
)
def get_url(self, request, context):
return reverse('wagtailadmin_pages:unpublish', args=(context['page'].id,))
class DeleteMenuItem(ActionMenuItem):
name = 'action-delete'
label = _("Delete")
def is_shown(self, request, context):
return (
context['view'] == 'edit'
and not context['page'].locked
and context['user_page_permissions'].for_page(context['page']).can_delete()
)
def get_url(self, request, context):
return reverse('wagtailadmin_pages:delete', args=(context['page'].id,))
class SaveDraftMenuItem(ActionMenuItem):
name = 'action-save-draft'
label = _("Save Draft")
template = 'wagtailadmin/pages/action_menu/save_draft.html'
def get_context(self, request, parent_context):
context = super().get_context(request, parent_context)
context['is_revision'] = (context['view'] == 'revisions_revert')
return context
class PageLockedMenuItem(ActionMenuItem):
name = 'action-page-locked'
label = _("Page locked")
template = 'wagtailadmin/pages/action_menu/page_locked.html'
def is_shown(self, request, context):
return ('page' in context) and (context['page'].locked)
def get_context(self, request, parent_context):
context = super().get_context(request, parent_context)
context['is_revision'] = (context['view'] == 'revisions_revert')
return context
BASE_PAGE_ACTION_MENU_ITEMS = None
def _get_base_page_action_menu_items():
"""
Retrieve the global list of menu items for the page action menu,
which may then be customised on a per-request basis
"""
global BASE_PAGE_ACTION_MENU_ITEMS
if BASE_PAGE_ACTION_MENU_ITEMS is None:
BASE_PAGE_ACTION_MENU_ITEMS = [
PageLockedMenuItem(order=-10000),
SaveDraftMenuItem(order=0),
UnpublishMenuItem(order=10),
DeleteMenuItem(order=20),
PublishMenuItem(order=30),
SubmitForModerationMenuItem(order=40),
]
for hook in hooks.get_hooks('register_page_action_menu_item'):
BASE_PAGE_ACTION_MENU_ITEMS.append(hook())
return BASE_PAGE_ACTION_MENU_ITEMS
class PageActionMenu:
template = 'wagtailadmin/pages/action_menu/menu.html'
def __init__(self, request, **kwargs):
self.request = request
self.context = kwargs
self.context['user_page_permissions'] = UserPagePermissionsProxy(self.request.user)
self.menu_items = [
menu_item
for menu_item in _get_base_page_action_menu_items()
if menu_item.is_shown(self.request, self.context)
]
self.menu_items.sort(key=lambda item: item.order)
for hook in hooks.get_hooks('construct_page_action_menu'):
hook(self.menu_items, self.request, self.context)
try:
self.default_item = self.menu_items.pop(0)
except IndexError:
self.default_item = None
def render_html(self):
return render_to_string(self.template, {
'default_menu_item': self.default_item.render_html(self.request, self.context),
'show_menu': bool(self.menu_items),
'rendered_menu_items': [
menu_item.render_html(self.request, self.context)
for menu_item in self.menu_items
],
}, request=self.request)
@cached_property
def media(self):
media = Media()
for item in self.menu_items:
media += item.media
return media
| bsd-3-clause |
yatsek/autokey | src/lib/qtui/enginesettings.py | 50 | 2667 | #!/usr/bin/env python
# coding=UTF-8
#
# Generated by pykdeuic4 from enginesettings.ui on Sun Mar 4 11:39:39 2012
#
# WARNING! All changes to this file will be lost.
from PyKDE4 import kdecore
from PyKDE4 import kdeui
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(400, 300)
self.verticalLayout_2 = QtGui.QVBoxLayout(Form)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.groupBox = QtGui.QGroupBox(Form)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(self.groupBox)
self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.folderLabel = QtGui.QLabel(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.folderLabel.sizePolicy().hasHeightForWidth())
self.folderLabel.setSizePolicy(sizePolicy)
self.folderLabel.setObjectName(_fromUtf8("folderLabel"))
self.horizontalLayout.addWidget(self.folderLabel)
self.browseButton = QtGui.QPushButton(self.groupBox)
self.browseButton.setObjectName(_fromUtf8("browseButton"))
self.horizontalLayout.addWidget(self.browseButton)
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout_2.addWidget(self.groupBox)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(kdecore.i18n(_fromUtf8("Form")))
self.groupBox.setTitle(kdecore.i18n(_fromUtf8("User Module Folder")))
self.label.setText(kdecore.i18n(_fromUtf8("Any Python modules placed in this folder will be available for import by scripts.")))
self.folderLabel.setText(kdecore.i18n(_fromUtf8("None selected")))
self.browseButton.setText(kdecore.i18n(_fromUtf8("Browse")))
| gpl-3.0 |
bwrsandman/OpenUpgrade | addons/base_report_designer/plugin/openerp_report_designer/bin/script/__init__.py | 384 | 1414 | #########################################################################
#
# Copyright (c) 2003-2004 Danny Brewer d29583@groovegarden.com
# Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>).
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# See: http://www.gnu.org/licenses/lgpl.html
#
#############################################################################
import Expression
import lib
import Fields
import modify
import Repeatln
import ServerParameter
import NewReport
import LoginTest
import Change
import About
import AddAttachment
import ConvertBracesToField
import ConvertFieldsToBraces
import ExportToRML
import SendtoServer
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jeremiahyan/odoo | addons/hr_presence/models/hr_employee.py | 4 | 8595 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from ast import literal_eval
from odoo import fields, models, _, api
from odoo.exceptions import UserError
from odoo.fields import Datetime
_logger = logging.getLogger(__name__)
class Employee(models.AbstractModel):
_inherit = 'hr.employee.base'
email_sent = fields.Boolean(default=False)
ip_connected = fields.Boolean(default=False)
manually_set_present = fields.Boolean(default=False)
# Stored field used in the presence kanban reporting view
# to allow group by state.
hr_presence_state_display = fields.Selection([
('to_define', 'To Define'),
('present', 'Present'),
('absent', 'Absent'),
])
def _compute_presence_state(self):
super()._compute_presence_state()
employees = self.filtered(lambda e: e.hr_presence_state != 'present' and not e.is_absent)
company = self.env.company
employee_to_check_working = employees.filtered(lambda e:
not e.is_absent and
(e.email_sent or e.ip_connected or e.manually_set_present))
working_now_list = employee_to_check_working._get_employee_working_now()
for employee in employees:
if not employee.is_absent and company.hr_presence_last_compute_date and employee.id in working_now_list and \
company.hr_presence_last_compute_date.day == Datetime.now().day and \
(employee.email_sent or employee.ip_connected or employee.manually_set_present):
employee.hr_presence_state = 'present'
@api.model
def _check_presence(self):
company = self.env.company
if not company.hr_presence_last_compute_date or \
company.hr_presence_last_compute_date.day != Datetime.now().day:
self.env['hr.employee'].search([
('company_id', '=', company.id)
]).write({
'email_sent': False,
'ip_connected': False,
'manually_set_present': False
})
employees = self.env['hr.employee'].search([('company_id', '=', company.id)])
all_employees = employees
# Check on IP
if literal_eval(self.env['ir.config_parameter'].sudo().get_param('hr.hr_presence_control_ip', 'False')):
ip_list = company.hr_presence_control_ip_list
ip_list = ip_list.split(',') if ip_list else []
ip_employees = self.env['hr.employee']
for employee in employees:
employee_ips = self.env['res.users.log'].search([
('create_uid', '=', employee.user_id.id),
('ip', '!=', False),
('create_date', '>=', Datetime.to_string(Datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)))]
).mapped('ip')
if any(ip in ip_list for ip in employee_ips):
ip_employees |= employee
ip_employees.write({'ip_connected': True})
employees = employees - ip_employees
# Check on sent emails
if literal_eval(self.env['ir.config_parameter'].sudo().get_param('hr.hr_presence_control_email', 'False')):
email_employees = self.env['hr.employee']
threshold = company.hr_presence_control_email_amount
for employee in employees:
sent_emails = self.env['mail.message'].search_count([
('author_id', '=', employee.user_id.partner_id.id),
('date', '>=', Datetime.to_string(Datetime.now().replace(hour=0, minute=0, second=0, microsecond=0))),
('date', '<=', Datetime.to_string(Datetime.now()))])
if sent_emails >= threshold:
email_employees |= employee
email_employees.write({'email_sent': True})
employees = employees - email_employees
company.sudo().hr_presence_last_compute_date = Datetime.now()
for employee in all_employees:
employee.hr_presence_state_display = employee.hr_presence_state
@api.model
def _action_open_presence_view(self):
# Compute the presence/absence for the employees on the same
# company than the HR/manager. Then opens the kanban view
# of the employees with an undefined presence/absence
_logger.info("Employees presence checked by: %s" % self.env.user.name)
self._check_presence()
return {
"type": "ir.actions.act_window",
"res_model": "hr.employee",
"views": [[self.env.ref('hr_presence.hr_employee_view_kanban').id, "kanban"], [False, "tree"], [False, "form"]],
'view_mode': 'kanban,tree,form',
"domain": [],
"name": "Employee's Presence to Define",
"search_view_id": [self.env.ref('hr_presence.hr_employee_view_presence_search').id, 'search'],
"context": {'search_default_group_hr_presence_state': 1,
'searchpanel_default_hr_presence_state_display': 'to_define'},
}
def action_set_present(self):
if not self.env.user.has_group('hr.group_hr_manager'):
raise UserError(_("You don't have the right to do this. Please contact an Administrator."))
self.write({'manually_set_present': True})
def write(self, vals):
if vals.get('hr_presence_state_display') == 'present':
vals['manually_set_present'] = True
return super().write(vals)
def action_open_leave_request(self):
self.ensure_one()
return {
"type": "ir.actions.act_window",
"res_model": "hr.leave",
"views": [[False, "form"]],
"view_mode": 'form',
"context": {'default_employee_id': self.id},
}
# --------------------------------------------------
# Messaging
# --------------------------------------------------
def action_send_sms(self):
self.ensure_one()
if not self.env.user.has_group('hr.group_hr_manager'):
raise UserError(_("You don't have the right to do this. Please contact an Administrator."))
if not self.mobile_phone:
raise UserError(_("There is no professional mobile for this employee."))
context = dict(self.env.context)
context.update(default_res_model='hr.employee', default_res_id=self.id, default_composition_mode='comment', default_number_field_name='mobile_phone')
template = self.env.ref('hr_presence.sms_template_presence', False)
if not template:
context['default_body'] = _("""Exception made if there was a mistake of ours, it seems that you are not at your office and there is not request of time off from you.
Please, take appropriate measures in order to carry out this work absence.
Do not hesitate to contact your manager or the human resource department.""")
else:
context['default_template_id'] = template.id
return {
"type": "ir.actions.act_window",
"res_model": "sms.composer",
"view_mode": 'form',
"context": context,
"name": "Send SMS Text Message",
"target": "new",
}
def action_send_mail(self):
self.ensure_one()
if not self.env.user.has_group('hr.group_hr_manager'):
raise UserError(_("You don't have the right to do this. Please contact an Administrator."))
if not self.work_email:
raise UserError(_("There is no professional email address for this employee."))
template = self.env.ref('hr_presence.mail_template_presence', False)
compose_form = self.env.ref('mail.email_compose_message_wizard_form', False)
ctx = dict(
default_model="hr.employee",
default_res_id=self.id,
default_use_template=bool(template),
default_template_id=template.id,
default_composition_mode='comment',
default_is_log=True,
custom_layout='mail.mail_notification_light',
)
return {
'name': _('Compose Email'),
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form.id, 'form')],
'view_id': compose_form.id,
'target': 'new',
'context': ctx,
}
| gpl-3.0 |
vincepandolfo/django | tests/auth_tests/test_mixins.py | 9 | 8497 | from django.contrib.auth import models
from django.contrib.auth.mixins import (
LoginRequiredMixin, PermissionRequiredMixin, UserPassesTestMixin,
)
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import RequestFactory, TestCase
from django.views.generic import View
class AlwaysTrueMixin(UserPassesTestMixin):
def test_func(self):
return True
class AlwaysFalseMixin(UserPassesTestMixin):
def test_func(self):
return False
class EmptyResponseView(View):
def get(self, request, *args, **kwargs):
return HttpResponse()
class AlwaysTrueView(AlwaysTrueMixin, EmptyResponseView):
pass
class AlwaysFalseView(AlwaysFalseMixin, EmptyResponseView):
pass
class StackedMixinsView1(LoginRequiredMixin, PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth_tests.add_customuser', 'auth_tests.change_customuser']
raise_exception = True
class StackedMixinsView2(PermissionRequiredMixin, LoginRequiredMixin, EmptyResponseView):
permission_required = ['auth_tests.add_customuser', 'auth_tests.change_customuser']
raise_exception = True
class AccessMixinTests(TestCase):
factory = RequestFactory()
def test_stacked_mixins_success(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
view = StackedMixinsView2.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
def test_stacked_mixins_missing_permission(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser',))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
def test_stacked_mixins_not_logged_in(self):
user = models.User.objects.create(username='joe', password='qwerty')
user.is_authenticated = lambda: False
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
class UserPassesTestTests(TestCase):
factory = RequestFactory()
def _test_redirect(self, view=None, url='/accounts/login/?next=/rand'):
if not view:
view = AlwaysFalseView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, url)
def test_default(self):
self._test_redirect()
def test_custom_redirect_url(self):
class AView(AlwaysFalseView):
login_url = '/login/'
self._test_redirect(AView.as_view(), '/login/?next=/rand')
def test_custom_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = 'goto'
self._test_redirect(AView.as_view(), '/accounts/login/?goto=/rand')
def test_no_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = None
self._test_redirect(AView.as_view(), '/accounts/login/')
def test_raise_exception(self):
class AView(AlwaysFalseView):
raise_exception = True
request = self.factory.get('/rand')
request.user = AnonymousUser()
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
def test_raise_exception_custom_message(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
permission_denied_message = msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_raise_exception_custom_message_function(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
def get_permission_denied_message(self):
return msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_user_passes(self):
view = AlwaysTrueView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 200)
class LoginRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
def test_login_required(self):
"""
Check that login_required works on a simple view wrapped in a
login_required decorator.
"""
class AView(LoginRequiredMixin, EmptyResponseView):
pass
view = AView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual('/accounts/login/?next=/rand', response.url)
request = self.factory.get('/rand')
request.user = self.user
response = view(request)
self.assertEqual(response.status_code, 200)
class PermissionsRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth_tests.add_customuser', 'auth_tests.change_customuser']
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = 'auth_tests.add_customuser'
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
'auth_tests.add_customuser', 'auth_tests.change_customuser', 'non-existent-permission',
]
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = [
'auth_tests.add_customuser', 'auth_tests.change_customuser', 'non-existent-permission',
]
raise_exception = True
request = self.factory.get('/rand')
request.user = self.user
with self.assertRaises(PermissionDenied):
AView.as_view()(request)
| bsd-3-clause |
kumar-physics/eppic-pipeline | src/EPPICpipeline/CheckDatabase.py | 1 | 5808 | '''
Created on Jan 22, 2015
@author: baskaran_k
'''
from commands import getstatusoutput
import MySQLdb
import sys
from string import atof
class CheckDatabase:
def __init__(self,database,outfolder):
self.outFolder=outfolder
self.mysqluser='eppicweb'
self.mysqlhost='eppic01.psi.ch'
self.mysqlpasswd=''
self.database=database
self.cnx=MySQLdb.connect(user=self.mysqluser,host=self.mysqlhost,passwd=self.mysqlpasswd,db=self.database,local_infile=True)
self.cifrepo='' #path to cifrepo that contains mmcif file
self.getPDBlist()
self.getDatabaselist()
self.findMissing()
self.printOutput()
self.writeFiles()
def getPDBlist(self):
getlist=getstatusoutput("ls %s/ | sed 's/.cif.gz//g'"%(self.cifrepo))
if getlist[0]:
print "Can't get the list from cifrep"
sys.exit(1)
else:
self.pdblist=getlist[1].split("\n")
def getDatabaselist(self):
c=self.cnx.cursor()
mysqlcmd="select inputName from Job where submissionId<0 "
c.execute(mysqlcmd)
self.eppiclist=[i[0] for i in c.fetchall()]
mysqlcmd="select inputName from Job where submissionId<0 and status != 'Finished'"
c.execute(mysqlcmd)
self.eppicErrorlist=[i[0] for i in c.fetchall()]
def findMissing(self):
self.missing=list(set(self.pdblist)-set(self.eppiclist))
self.obsolete=list(set(self.eppiclist)-set(self.pdblist))
def printOutput(self):
print "\tTotal No. of entries in the PDB repo \t\t%d"%(len(self.pdblist))
print "\tTotal No. of entries in the EPPIC db \t\t%d"%(len(self.eppiclist))
print "\tNo. of entries with error in EPPIC db \t\t%s"%(len(self.eppicErrorlist))
print "\tNo. of entries missing in EPPIC db \t\t%s"%(len(self.missing))
print "\tNo. of obsoleted entries exists in EPPIC db \t%s"%(len(self.obsolete))
def writeFiles(self):
open("%s/pdbrepo.list"%(self.outFolder),'w').write("%s\n"%("\n".join(self.pdblist)))
open("%s/eppicdb.list"%(self.outFolder),'w').write("%s\n"%("\n".join(self.eppiclist)))
open("%s/eppicerror.list"%(self.outFolder),'w').write("%s\n"%("\n".join(self.eppicErrorlist)))
open("%s/eppicmissing.list"%(self.outFolder),'w').write("%s\n"%("\n".join(self.missing)))
open("%s/obsolete.list"%(self.outFolder),'w').write("%s\n"%("\n".join(self.obsolete)))
def runQuery(self,qqq):
c=self.cnx.cursor()
c.execute(qqq)
x=c.fetchall()[0][0]
return atof(x)
def interfaceGrowth(self):
c=self.cnx.cursor()
for year in range(1979,2015):
all="select count(*) from PdbInfo where releaseDate < '%s-12-31'"%(year)
intface="select count(*) from PdbInfo as p inner join Interface as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31'"%(year)
ifacecl="select count(*) from PdbInfo as p inner join InterfaceCluster as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31'"%(year)
allx="select count(*) from PdbInfo where releaseDate < '%s-12-31' and expMethod='X-RAY DIFFRACTION'"%(year)
intfacex="select count(*) from PdbInfo as p inner join Interface as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31' and p.expMethod='X-RAY DIFFRACTION'"%(year)
ifaceclx="select count(*) from PdbInfo as p inner join InterfaceCluster as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31' and p.expMethod='X-RAY DIFFRACTION'"%(year)
sall="select count(*) from PdbInfo where releaseDate < '%s-12-31' and releaseDate > '%s-12-31'"%(year,year-1)
sintface="select count(*) from PdbInfo as p inner join Interface as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31' and releaseDate > '%s-12-31'"%(year,year-1)
sifacecl="select count(*) from PdbInfo as p inner join InterfaceCluster as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31' and releaseDate > '%s-12-31'"%(year,year-1)
sallx="select count(*) from PdbInfo where releaseDate < '%s-12-31' and releaseDate > '%s-12-31' and expMethod='X-RAY DIFFRACTION'"%(year,year-1)
sintfacex="select count(*) from PdbInfo as p inner join Interface as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31' and releaseDate > '%s-12-31' and p.expMethod='X-RAY DIFFRACTION'"%(year,year-1)
sifaceclx="select count(*) from PdbInfo as p inner join InterfaceCluster as i on p.pdbCode=i.pdbCode where p.releaseDate < '%s-12-31' and releaseDate > '%s-12-31' and p.expMethod='X-RAY DIFFRACTION'"%(year,year-1)
a=self.runQuery(all)
i1=self.runQuery(intface)
i2=self.runQuery(ifacecl)
ax=self.runQuery(allx)
i1x=self.runQuery(intfacex)
i2x=self.runQuery(ifaceclx)
sa=self.runQuery(sall)
si1=self.runQuery(sintface)
si2=self.runQuery(sifacecl)
sax=self.runQuery(sallx)
si1x=self.runQuery(sintfacex)
si2x=self.runQuery(sifaceclx)
print "%d\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f\t%.2f"%\
(year,a,i1,i2,i1/a,i2/a,ax,i1x,i2x,i1x/ax,i2x/a,sa,si1,si2,si1/sa,si2/sa,sax,si1x,si2x,si1x/sax,si2x/sax)
if __name__=="__main__":
# p=CheckDatabase("eppic_2015_01",'/media/baskaran_k/data/test')
# p.interfaceGrowth()
if len(sys.argv)==3:
db=sys.argv[1]
path=sys.argv[2]
p=CheckDatabase(db,path)
else:
print "Usage: python %s <eppicdb name to test> <path to output dir>"%(sys.argv[0])
| gpl-2.0 |
devs4v/devs4v-information-retrieval15 | project/venv/lib/python2.7/site-packages/django/contrib/sessions/backends/signed_cookies.py | 88 | 2797 | from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase
from django.core import signing
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies')
except (signing.BadSignature, ValueError):
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer)
@classmethod
def clear_expired(cls):
pass
| mit |
yugangzhang/chxanalys | versioneer.py | 367 | 62474 |
# Version: 0.15
"""
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the `git archive` command. As a result, generated tarballs will
contain enough information to get the proper version.
To allow `setup.py` to compute a version too, a `versioneer.py` is added to
the top level of your source tree, next to `setup.py` and the `setup.cfg`
that configures it. This overrides several distutils/setuptools commands to
compute the version when invoked, and changes `setup.py build` and `setup.py
sdist` to replace `_version.py` with a small static file that contains just
the generated version data.
## Installation
First, decide on values for the following configuration variables:
* `VCS`: the version control system you use. Currently accepts "git".
* `style`: the style of version string to be produced. See "Styles" below for
details. Defaults to "pep440", which looks like
`TAG[+DISTANCE.gSHORTHASH[.dirty]]`.
* `versionfile_source`:
A project-relative pathname into which the generated version strings should
be written. This is usually a `_version.py` next to your project's main
`__init__.py` file, so it can be imported at runtime. If your project uses
`src/myproject/__init__.py`, this should be `src/myproject/_version.py`.
This file should be checked in to your VCS as usual: the copy created below
by `setup.py setup_versioneer` will include code that parses expanded VCS
keywords in generated tarballs. The 'build' and 'sdist' commands will
replace it with a copy that has just the calculated version string.
This must be set even if your project does not have any modules (and will
therefore never import `_version.py`), since "setup.py sdist" -based trees
still need somewhere to record the pre-calculated version strings. Anywhere
in the source tree should do. If there is a `__init__.py` next to your
`_version.py`, the `setup.py setup_versioneer` command (described below)
will append some `__version__`-setting assignments, if they aren't already
present.
* `versionfile_build`:
Like `versionfile_source`, but relative to the build directory instead of
the source directory. These will differ when your setup.py uses
'package_dir='. If you have `package_dir={'myproject': 'src/myproject'}`,
then you will probably have `versionfile_build='myproject/_version.py'` and
`versionfile_source='src/myproject/_version.py'`.
If this is set to None, then `setup.py build` will not attempt to rewrite
any `_version.py` in the built tree. If your project does not have any
libraries (e.g. if it only builds a script), then you should use
`versionfile_build = None` and override `distutils.command.build_scripts`
to explicitly insert a copy of `versioneer.get_version()` into your
generated script.
* `tag_prefix`:
a string, like 'PROJECTNAME-', which appears at the start of all VCS tags.
If your tags look like 'myproject-1.2.0', then you should use
tag_prefix='myproject-'. If you use unprefixed tags like '1.2.0', this
should be an empty string.
* `parentdir_prefix`:
a optional string, frequently the same as tag_prefix, which appears at the
start of all unpacked tarball filenames. If your tarball unpacks into
'myproject-1.2.0', this should be 'myproject-'. To disable this feature,
just omit the field from your `setup.cfg`.
This tool provides one script, named `versioneer`. That script has one mode,
"install", which writes a copy of `versioneer.py` into the current directory
and runs `versioneer.py setup` to finish the installation.
To versioneer-enable your project:
* 1: Modify your `setup.cfg`, adding a section named `[versioneer]` and
populating it with the configuration values you decided earlier (note that
the option names are not case-sensitive):
````
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix = ""
parentdir_prefix = myproject-
````
* 2: Run `versioneer install`. This will do the following:
* copy `versioneer.py` into the top of your source tree
* create `_version.py` in the right place (`versionfile_source`)
* modify your `__init__.py` (if one exists next to `_version.py`) to define
`__version__` (by calling a function from `_version.py`)
* modify your `MANIFEST.in` to include both `versioneer.py` and the
generated `_version.py` in sdist tarballs
`versioneer install` will complain about any problems it finds with your
`setup.py` or `setup.cfg`. Run it multiple times until you have fixed all
the problems.
* 3: add a `import versioneer` to your setup.py, and add the following
arguments to the setup() call:
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
* 4: commit these changes to your VCS. To make sure you won't forget,
`versioneer install` will mark everything it touched for addition using
`git add`. Don't forget to add `setup.py` and `setup.cfg` too.
## Post-Installation Usage
Once established, all uses of your tree from a VCS checkout should get the
current version string. All generated tarballs should include an embedded
version string (so users who unpack them will not need a VCS tool installed).
If you distribute your project through PyPI, then the release process should
boil down to two steps:
* 1: git tag 1.0
* 2: python setup.py register sdist upload
If you distribute it through github (i.e. users use github to generate
tarballs with `git archive`), the process is:
* 1: git tag 1.0
* 2: git push; git push --tags
Versioneer will report "0+untagged.NUMCOMMITS.gHASH" until your tree has at
least one tag in its history.
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default "pep440" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of "unknown".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, "pep440", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional "local
version" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
that this commit is two revisions ("+2") beyond the "0.11" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. "0.11".
Other styles are available. See details.md in the Versioneer source tree for
descriptions.
## Debugging
Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
to return a version of "0+unknown". To investigate the problem, run `setup.py
version`, which will run the version-lookup code in a verbose mode, and will
display the full contents of `get_versions()` (including the `error` string,
which may help identify what went wrong).
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
### Upgrading to 0.15
Starting with this version, Versioneer is configured with a `[versioneer]`
section in your `setup.cfg` file. Earlier versions required the `setup.py` to
set attributes on the `versioneer` module immediately after import. The new
version will refuse to run (raising an exception during import) until you
have provided the necessary `setup.cfg` section.
In addition, the Versioneer package provides an executable named
`versioneer`, and the installation process is driven by running `versioneer
install`. In 0.14 and earlier, the executable was named
`versioneer-installer` and was run without an argument.
### Upgrading to 0.14
0.14 changes the format of the version string. 0.13 and earlier used
hyphen-separated strings like "0.11-2-g1076c97-dirty". 0.14 and beyond use a
plus-separated "local version" section strings, with dot-separated
components, like "0.11+2.g1076c97". PEP440-strict tools did not like the old
format, but should be ok with the new one.
### Upgrading from 0.11 to 0.12
Nothing special.
### Upgrading from 0.10 to 0.11
You must add a `versioneer.VCS = "git"` to your `setup.py` before re-running
`setup.py setup_versioneer`. This will enable the use of additional
version-control systems (SVN, etc) in the future.
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is hereby released into the
public domain. The `_version.py` that it creates is also in the public
domain.
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
pass
def get_root():
# we require that all commands are run from the project root, i.e. the
# directory that contains setup.py, setup.cfg, and versioneer.py .
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND').")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]:
print("Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
pass
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
def decorate(f):
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
return None
return stdout
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.15 (https://github.com/warner/python-versioneer)
import errno
import os
import re
import subprocess
import sys
def get_keywords():
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full}
return keywords
class VersioneerConfig:
pass
def get_config():
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
pass
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
def decorate(f):
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
return None
return stdout
def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%%s', but '%%s' doesn't start with "
"prefix '%%s'" %% (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs-tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %%s" %% root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag, this yields TAG-NUM-gHEX[-dirty]
# if there are no tags, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long"],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def plus_or_dot(pieces):
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
# now build up version string, with post-release "local version
# identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
# get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
# exceptions:
# 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
# TAG[.post.devDISTANCE] . No -dirty
# exceptions:
# 1: no tags. 0.post.devDISTANCE
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
# TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that
# .dev0 sorts backwards (a dirty tree will appear "older" than the
# corresponding clean one), but you shouldn't be releasing software with
# -dirty anyways.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
# TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
# TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty
# --always'
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
# TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty
# --always -long'. The distance/hash is unconditional.
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
def get_versions():
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree"}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version"}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
if not keywords:
raise NotThisMethod("no keywords at all, weird")
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs-tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags"}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
if not os.path.exists(os.path.join(root, ".git")):
if verbose:
print("no .git in %s" % root)
raise NotThisMethod("no .git directory")
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
# if there is a tag, this yields TAG-NUM-gHEX[-dirty]
# if there are no tags, this yields HEX[-dirty] (no NUM)
describe_out = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long"],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
# Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '%s', but '%s' doesn't start with "
"prefix '%s'" % (root, dirname, parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None}
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.15) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
import sys
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
# now build up version string, with post-release "local version
# identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
# get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
# exceptions:
# 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
# TAG[.post.devDISTANCE] . No -dirty
# exceptions:
# 1: no tags. 0.post.devDISTANCE
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
# TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that
# .dev0 sorts backwards (a dirty tree will appear "older" than the
# corresponding clean one), but you shouldn't be releasing software with
# -dirty anyways.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
# TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty.
# exceptions:
# 1: no tags. 0.postDISTANCE[.dev0]
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
# TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty
# --always'
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
# TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty
# --always -long'. The distance/hash is unconditional.
# exceptions:
# 1: no tags. HEX[-dirty] (note: no 'g' prefix)
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"]}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None}
class VersioneerBadRootError(Exception):
pass
def get_versions(verbose=False):
# returns dict with two keys: 'version' and 'full'
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version"}
def get_version():
return get_versions()["version"]
def get_cmdclass():
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix = ""
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-time keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
| bsd-3-clause |
jgraham/servo | tests/wpt/web-platform-tests/tools/html5lib/utils/spider.py | 436 | 4157 | #!/usr/bin/env python
"""Spider to try and find bugs in the parser. Requires httplib2 and elementtree
usage:
import spider
s = spider.Spider()
s.spider("http://www.google.com", maxURLs=100)
"""
import urllib.request, urllib.error, urllib.parse
import urllib.robotparser
import md5
import httplib2
import html5lib
from html5lib.treebuilders import etree
class Spider(object):
def __init__(self):
self.unvisitedURLs = set()
self.visitedURLs = set()
self.buggyURLs=set()
self.robotParser = urllib.robotparser.RobotFileParser()
self.contentDigest = {}
self.http = httplib2.Http(".cache")
def run(self, initialURL, maxURLs=1000):
urlNumber = 0
self.visitedURLs.add(initialURL)
content = self.loadURL(initialURL)
while maxURLs is None or urlNumber < maxURLs:
if content is not None:
self.parse(content)
urlNumber += 1
if not self.unvisitedURLs:
break
content = self.loadURL(self.unvisitedURLs.pop())
def parse(self, content):
failed = False
p = html5lib.HTMLParser(tree=etree.TreeBuilder)
try:
tree = p.parse(content)
except:
self.buggyURLs.add(self.currentURL)
failed = True
print("BUGGY:", self.currentURL)
self.visitedURLs.add(self.currentURL)
if not failed:
self.updateURLs(tree)
def loadURL(self, url):
resp, content = self.http.request(url, "GET")
self.currentURL = url
digest = md5.md5(content).hexdigest()
if digest in self.contentDigest:
content = None
self.visitedURLs.add(url)
else:
self.contentDigest[digest] = url
if resp['status'] != "200":
content = None
return content
def updateURLs(self, tree):
"""Take all the links in the current document, extract the URLs and
update the list of visited and unvisited URLs according to whether we
have seen them before or not"""
urls = set()
#Remove all links we have already visited
for link in tree.findall(".//a"):
try:
url = urllib.parse.urldefrag(link.attrib['href'])[0]
if (url and url not in self.unvisitedURLs and url
not in self.visitedURLs):
urls.add(url)
except KeyError:
pass
#Remove all non-http URLs and a dd a sutiable base URL where that is
#missing
newUrls = set()
for url in urls:
splitURL = list(urllib.parse.urlsplit(url))
if splitURL[0] != "http":
continue
if splitURL[1] == "":
splitURL[1] = urllib.parse.urlsplit(self.currentURL)[1]
newUrls.add(urllib.parse.urlunsplit(splitURL))
urls = newUrls
responseHeaders = {}
#Now we want to find the content types of the links we haven't visited
for url in urls:
try:
resp, content = self.http.request(url, "HEAD")
responseHeaders[url] = resp
except AttributeError as KeyError:
#Don't know why this happens
pass
#Remove links not of content-type html or pages not found
#XXX - need to deal with other status codes?
toVisit = set([url for url in urls if url in responseHeaders and
"html" in responseHeaders[url]['content-type'] and
responseHeaders[url]['status'] == "200"])
#Now check we are allowed to spider the page
for url in toVisit:
robotURL = list(urllib.parse.urlsplit(url)[:2])
robotURL.extend(["robots.txt", "", ""])
robotURL = urllib.parse.urlunsplit(robotURL)
self.robotParser.set_url(robotURL)
if not self.robotParser.can_fetch("*", url):
toVisit.remove(url)
self.visitedURLs.update(urls)
self.unvisitedURLs.update(toVisit)
| mpl-2.0 |
eepalms/gem5-newcache | src/arch/x86/isa/insts/x87/stack_management/__init__.py | 91 | 2317 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["stack_control",
"clear_state"]
microcode = '''
# X86 microcode
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
| bsd-3-clause |
michigraber/scikit-learn | sklearn/svm/bounds.py | 280 | 2911 | """Determination of parameter bounds"""
# Author: Paolo Losi
# License: BSD 3 clause
from warnings import warn
import numpy as np
from ..preprocessing import LabelBinarizer
from ..utils.validation import check_consistent_length, check_array
from ..utils.extmath import safe_sparse_dot
def l1_min_c(X, y, loss='squared_hinge', fit_intercept=True,
intercept_scaling=1.0):
"""
Return the lowest bound for C such that for C in (l1_min_C, infinity)
the model is guaranteed not to be empty. This applies to l1 penalized
classifiers, such as LinearSVC with penalty='l1' and
linear_model.LogisticRegression with penalty='l1'.
This value is valid if class_weight parameter in fit() is not set.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target vector relative to X
loss : {'squared_hinge', 'log'}, default 'squared_hinge'
Specifies the loss function.
With 'squared_hinge' it is the squared hinge loss (a.k.a. L2 loss).
With 'log' it is the loss of logistic regression models.
'l2' is accepted as an alias for 'squared_hinge', for backward
compatibility reasons, but should not be used in new code.
fit_intercept : bool, default: True
Specifies if the intercept should be fitted by the model.
It must match the fit() method parameter.
intercept_scaling : float, default: 1
when fit_intercept is True, instance vector x becomes
[x, intercept_scaling],
i.e. a "synthetic" feature with constant value equals to
intercept_scaling is appended to the instance vector.
It must match the fit() method parameter.
Returns
-------
l1_min_c: float
minimum value for C
"""
if loss == "l2":
warn("loss='l2' will be impossible from 0.18 onwards."
" Use loss='squared_hinge' instead.",
DeprecationWarning)
loss = "squared_hinge"
if loss not in ('squared_hinge', 'log'):
raise ValueError('loss type not in ("squared_hinge", "log", "l2")')
X = check_array(X, accept_sparse='csc')
check_consistent_length(X, y)
Y = LabelBinarizer(neg_label=-1).fit_transform(y).T
# maximum absolute value over classes and features
den = np.max(np.abs(safe_sparse_dot(Y, X)))
if fit_intercept:
bias = intercept_scaling * np.ones((np.size(y), 1))
den = max(den, abs(np.dot(Y, bias)).max())
if den == 0.0:
raise ValueError('Ill-posed l1_min_c calculation: l1 will always '
'select zero coefficients for this data')
if loss == 'squared_hinge':
return 0.5 / den
else: # loss == 'log':
return 2.0 / den
| bsd-3-clause |
ShiYw/Sigil | 3rdparty/python/Lib/lib2to3/pgen2/conv.py | 134 | 9642 | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Convert graminit.[ch] spit out by pgen to Python code.
Pgen is the Python parser generator. It is useful to quickly create a
parser from a grammar file in Python's grammar notation. But I don't
want my parsers to be written in C (yet), so I'm translating the
parsing tables to Python data structures and writing a Python parse
engine.
Note that the token numbers are constants determined by the standard
Python tokenizer. The standard token module defines these numbers and
their names (the names are not used much). The token numbers are
hardcoded into the Python tokenizer and into pgen. A Python
implementation of the Python tokenizer is also available, in the
standard tokenize module.
On the other hand, symbol numbers (representing the grammar's
non-terminals) are assigned by pgen based on the actual grammar
input.
Note: this module is pretty much obsolete; the pgen module generates
equivalent grammar tables directly from the Grammar.txt input file
without having to invoke the Python pgen C program.
"""
# Python imports
import re
# Local imports
from pgen2 import grammar, token
class Converter(grammar.Grammar):
"""Grammar subclass that reads classic pgen output files.
The run() method reads the tables as produced by the pgen parser
generator, typically contained in two C files, graminit.h and
graminit.c. The other methods are for internal use only.
See the base class for more documentation.
"""
def run(self, graminit_h, graminit_c):
"""Load the grammar tables from the text files written by pgen."""
self.parse_graminit_h(graminit_h)
self.parse_graminit_c(graminit_c)
self.finish_off()
def parse_graminit_h(self, filename):
"""Parse the .h file written by pgen. (Internal)
This file is a sequence of #define statements defining the
nonterminals of the grammar as numbers. We build two tables
mapping the numbers to names and back.
"""
try:
f = open(filename)
except OSError as err:
print("Can't open %s: %s" % (filename, err))
return False
self.symbol2number = {}
self.number2symbol = {}
lineno = 0
for line in f:
lineno += 1
mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line)
if not mo and line.strip():
print("%s(%s): can't parse %s" % (filename, lineno,
line.strip()))
else:
symbol, number = mo.groups()
number = int(number)
assert symbol not in self.symbol2number
assert number not in self.number2symbol
self.symbol2number[symbol] = number
self.number2symbol[number] = symbol
return True
def parse_graminit_c(self, filename):
"""Parse the .c file written by pgen. (Internal)
The file looks as follows. The first two lines are always this:
#include "pgenheaders.h"
#include "grammar.h"
After that come four blocks:
1) one or more state definitions
2) a table defining dfas
3) a table defining labels
4) a struct defining the grammar
A state definition has the following form:
- one or more arc arrays, each of the form:
static arc arcs_<n>_<m>[<k>] = {
{<i>, <j>},
...
};
- followed by a state array, of the form:
static state states_<s>[<t>] = {
{<k>, arcs_<n>_<m>},
...
};
"""
try:
f = open(filename)
except OSError as err:
print("Can't open %s: %s" % (filename, err))
return False
# The code below essentially uses f's iterator-ness!
lineno = 0
# Expect the two #include lines
lineno, line = lineno+1, next(f)
assert line == '#include "pgenheaders.h"\n', (lineno, line)
lineno, line = lineno+1, next(f)
assert line == '#include "grammar.h"\n', (lineno, line)
# Parse the state definitions
lineno, line = lineno+1, next(f)
allarcs = {}
states = []
while line.startswith("static arc "):
while line.startswith("static arc "):
mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$",
line)
assert mo, (lineno, line)
n, m, k = list(map(int, mo.groups()))
arcs = []
for _ in range(k):
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+{(\d+), (\d+)},$", line)
assert mo, (lineno, line)
i, j = list(map(int, mo.groups()))
arcs.append((i, j))
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
allarcs[(n, m)] = arcs
lineno, line = lineno+1, next(f)
mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line)
assert mo, (lineno, line)
s, t = list(map(int, mo.groups()))
assert s == len(states), (lineno, line)
state = []
for _ in range(t):
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line)
assert mo, (lineno, line)
k, n, m = list(map(int, mo.groups()))
arcs = allarcs[n, m]
assert k == len(arcs), (lineno, line)
state.append(arcs)
states.append(state)
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
lineno, line = lineno+1, next(f)
self.states = states
# Parse the dfas
dfas = {}
mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
for i in range(ndfas):
lineno, line = lineno+1, next(f)
mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$',
line)
assert mo, (lineno, line)
symbol = mo.group(2)
number, x, y, z = list(map(int, mo.group(1, 3, 4, 5)))
assert self.symbol2number[symbol] == number, (lineno, line)
assert self.number2symbol[number] == symbol, (lineno, line)
assert x == 0, (lineno, line)
state = states[z]
assert y == len(state), (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line)
assert mo, (lineno, line)
first = {}
rawbitset = eval(mo.group(1))
for i, c in enumerate(rawbitset):
byte = ord(c)
for j in range(8):
if byte & (1<<j):
first[i*8 + j] = 1
dfas[number] = (state, first)
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
self.dfas = dfas
# Parse the labels
labels = []
lineno, line = lineno+1, next(f)
mo = re.match(r"static label labels\[(\d+)\] = {$", line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
for i in range(nlabels):
lineno, line = lineno+1, next(f)
mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line)
assert mo, (lineno, line)
x, y = mo.groups()
x = int(x)
if y == "0":
y = None
else:
y = eval(y)
labels.append((x, y))
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
self.labels = labels
# Parse the grammar struct
lineno, line = lineno+1, next(f)
assert line == "grammar _PyParser_Grammar = {\n", (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+(\d+),$", line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
assert ndfas == len(self.dfas)
lineno, line = lineno+1, next(f)
assert line == "\tdfas,\n", (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+{(\d+), labels},$", line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
assert nlabels == len(self.labels), (lineno, line)
lineno, line = lineno+1, next(f)
mo = re.match(r"\s+(\d+)$", line)
assert mo, (lineno, line)
start = int(mo.group(1))
assert start in self.number2symbol, (lineno, line)
self.start = start
lineno, line = lineno+1, next(f)
assert line == "};\n", (lineno, line)
try:
lineno, line = lineno+1, next(f)
except StopIteration:
pass
else:
assert 0, (lineno, line)
def finish_off(self):
"""Create additional useful structures. (Internal)."""
self.keywords = {} # map from keyword strings to arc labels
self.tokens = {} # map from numeric token values to arc labels
for ilabel, (type, value) in enumerate(self.labels):
if type == token.NAME and value is not None:
self.keywords[value] = ilabel
elif value is None:
self.tokens[type] = ilabel
| gpl-3.0 |
piantado/LOTlib | LOTlib/Projects/Quantifier/Run/Search_SimpleMCMC.py | 4 | 2024 | # -*- coding: utf-8 -*-
"""
All out rational-rules style gibbs on lexicons.
For MPI or local.
This is much slower than the vectorized versions.
MPI run:
$ mpiexec --hostfile ../../hosts.mpich2 -n 15 python Search_MCMC.py
"""
from LOTlib.MPI.MPI_map import MPI_map
from LOTlib import mh_sample
from LOTlib.FiniteBestSet import FiniteBestSet
from LOTlib.Examples.Quantifier.Model import *
CHAINS = 3 #how many times do we run?
DATA_AMOUNTS = range(0,300, 100) #range(0,1500,100)
SAMPLES = 1 # 1000000
TOP_COUNT = 50
OUT_PATH = "/home/piantado/Desktop/mit/Libraries/LOTlib/LOTlib/Examples/Quantifier/data/mcmc-run.pkl"
QUIET = False
RUN_MPI = True # should we run on MPI? If not, just run as normal python
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# the main sampling function to run
#one run with these parameters
def run(data_size):
print "Running ", data_size
# We store the top 100 from each run
hypset = FiniteBestSet(TOP_COUNT, max=True)
# initialize the data
data = generate_data(data_size)
# starting hypothesis -- here this generates at random
learner = GriceanQuantifierLexicon(make_my_hypothesis, my_weight_function)
# We will defautly generate from null the grammar if no value is specified
for w in target.all_words(): learner.set_word(w)
# populate the finite sample by running the sampler for this many steps
for x in mh_sample(learner, data, SAMPLES, skip=0):
hypset.push(x, x.posterior_score)
return hypset
if __name__ == "__main__":
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# MPI interface
# Map. SimpleMPI will use a normal MAP if we are not running in MPI
allret = MPI_map(run, map(lambda x: [x], DATA_AMOUNTS * CHAINS)) # this many chains
## combine into a single hypothesis set and save
outhyp = FiniteBestSet(max=True)
for r in allret:
print "# Merging ", len(r)
outhyp.merge(r)
import pickle
pickle.dump(outhyp, open(OUT_PATH, 'w'))
| gpl-3.0 |
ingokegel/intellij-community | python/testData/refactoring/move/optimizeImportsAfterMoveInvalidatesMembersToBeMoved/after/src/src.py | 22 | 2358 | # -*- coding: utf-8 -*-
# (c) 2017 Tuomas Airaksinen
#
# This file is part of Serviceform.
#
# Serviceform is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Serviceform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Serviceform. If not, see <http://www.gnu.org/licenses/>.
import datetime
import string
import logging
from enum import Enum
from typing import Tuple, Set, Optional, Sequence, Iterator, Iterable, TYPE_CHECKING
from colorful.fields import RGBColorField
from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation
from django.db import models
from django.db.models import Prefetch
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils import timezone
from django.utils.functional import cached_property
from django.utils.html import format_html
from django.utils.translation import ugettext_lazy as _
from guardian.shortcuts import get_users_with_perms
from select2 import fields as select2_fields
from serviceform.tasks.models import Task
from .. import emails, utils
from ..utils import ColorStr
from .mixins import CopyMixin
from .people import Participant, ResponsibilityPerson
from .email import EmailTemplate
from .participation import QuestionAnswer
if TYPE_CHECKING:
from .participation import ParticipationActivity, ParticipationActivityChoice
local_tz = timezone.get_default_timezone()
logger = logging.getLogger(__name__)
def imported_symbols_anchor():
print(RGBColorField, settings, GenericRelation, Prefetch, render_to_string, reverse, format_html,
get_users_with_perms, select2_fields, Task, emails, CopyMixin, Participant, ResponsibilityPerson,
EmailTemplate, QuestionAnswer, ParticipationActivity, ParticipationActivityChoice, datetime, Enum, string,
Tuple, Set, Optional, Sequence, Iterator, Iterable, _, cached_property, models, utils, ColorStr)
| apache-2.0 |
macs03/demo-cms | cms/lib/python2.7/site-packages/django/utils/unittest/result.py | 570 | 6105 | """Test result object"""
import sys
import traceback
import unittest
from StringIO import StringIO
from django.utils.unittest import util
from django.utils.unittest.compatibility import wraps
__unittest = True
def failfast(method):
@wraps(method)
def inner(self, *args, **kw):
if getattr(self, 'failfast', False):
self.stop()
return method(self, *args, **kw)
return inner
STDOUT_LINE = '\nStdout:\n%s'
STDERR_LINE = '\nStderr:\n%s'
class TestResult(unittest.TestResult):
"""Holder for test result information.
Test results are automatically managed by the TestCase and TestSuite
classes, and do not need to be explicitly manipulated by writers of tests.
Each instance holds the total number of tests run, and collections of
failures and errors that occurred among those test runs. The collections
contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
formatted traceback of the error that occurred.
"""
_previousTestClass = None
_moduleSetUpFailed = False
def __init__(self):
self.failfast = False
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
self.buffer = False
self._stdout_buffer = None
self._stderr_buffer = None
self._original_stdout = sys.stdout
self._original_stderr = sys.stderr
self._mirrorOutput = False
def startTest(self, test):
"Called when the given test is about to be run"
self.testsRun += 1
self._mirrorOutput = False
if self.buffer:
if self._stderr_buffer is None:
self._stderr_buffer = StringIO()
self._stdout_buffer = StringIO()
sys.stdout = self._stdout_buffer
sys.stderr = self._stderr_buffer
def startTestRun(self):
"""Called once before any tests are executed.
See startTest for a method called before each test.
"""
def stopTest(self, test):
"""Called when the given test has been run"""
if self.buffer:
if self._mirrorOutput:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
self._original_stdout.write(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
self._original_stderr.write(STDERR_LINE % error)
sys.stdout = self._original_stdout
sys.stderr = self._original_stderr
self._stdout_buffer.seek(0)
self._stdout_buffer.truncate()
self._stderr_buffer.seek(0)
self._stderr_buffer.truncate()
self._mirrorOutput = False
def stopTestRun(self):
"""Called once after all tests are executed.
See stopTest for a method called after each test.
"""
@failfast
def addError(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
"""
self.errors.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
@failfast
def addFailure(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info()."""
self.failures.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
def addSkip(self, test, reason):
"""Called when a test is skipped."""
self.skipped.append((test, reason))
def addExpectedFailure(self, test, err):
"""Called when an expected failure/error occured."""
self.expectedFailures.append(
(test, self._exc_info_to_string(err, test)))
@failfast
def addUnexpectedSuccess(self, test):
"""Called when a test was expected to fail, but succeed."""
self.unexpectedSuccesses.append(test)
def wasSuccessful(self):
"Tells whether or not this result was a success"
return (len(self.failures) + len(self.errors) == 0)
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = True
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msgLines = traceback.format_exception(exctype, value, tb, length)
else:
msgLines = traceback.format_exception(exctype, value, tb)
if self.buffer:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
msgLines.append(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
msgLines.append(STDERR_LINE % error)
return ''.join(msgLines)
def _is_relevant_tb_level(self, tb):
return '__unittest' in tb.tb_frame.f_globals
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def __repr__(self):
return "<%s run=%i errors=%i failures=%i>" % \
(util.strclass(self.__class__), self.testsRun, len(self.errors),
len(self.failures))
| mit |
ageron/tensorflow | tensorflow/python/keras/preprocessing/image.py | 13 | 21213 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
# pylint: disable=g-import-not-at-top
"""Set of tools for real-time data augmentation on image data.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_preprocessing import image
try:
from scipy import linalg # pylint: disable=unused-import
from scipy import ndimage # pylint: disable=unused-import
except ImportError:
pass
from tensorflow.python.keras import backend
from tensorflow.python.keras import utils
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.tf_export import keras_export
random_rotation = image.random_rotation
random_shift = image.random_shift
random_shear = image.random_shear
random_zoom = image.random_zoom
apply_channel_shift = image.apply_channel_shift
random_channel_shift = image.random_channel_shift
apply_brightness_shift = image.apply_brightness_shift
random_brightness = image.random_brightness
apply_affine_transform = image.apply_affine_transform
load_img = image.load_img
@keras_export('keras.preprocessing.image.array_to_img')
def array_to_img(x, data_format=None, scale=True, dtype=None):
"""Converts a 3D Numpy array to a PIL Image instance.
Arguments:
x: Input Numpy array.
data_format: Image data format.
either "channels_first" or "channels_last".
scale: Whether to rescale image values
to be within `[0, 255]`.
dtype: Dtype to use.
Returns:
A PIL Image instance.
Raises:
ImportError: if PIL is not available.
ValueError: if invalid `x` or `data_format` is passed.
"""
if data_format is None:
data_format = backend.image_data_format()
kwargs = {}
if 'dtype' in tf_inspect.getfullargspec(image.array_to_img)[0]:
if dtype is None:
dtype = backend.floatx()
kwargs['dtype'] = dtype
return image.array_to_img(x, data_format=data_format, scale=scale, **kwargs)
@keras_export('keras.preprocessing.image.img_to_array')
def img_to_array(img, data_format=None, dtype=None):
"""Converts a PIL Image instance to a Numpy array.
Arguments:
img: PIL Image instance.
data_format: Image data format,
either "channels_first" or "channels_last".
dtype: Dtype to use for the returned array.
Returns:
A 3D Numpy array.
Raises:
ValueError: if invalid `img` or `data_format` is passed.
"""
if data_format is None:
data_format = backend.image_data_format()
kwargs = {}
if 'dtype' in tf_inspect.getfullargspec(image.img_to_array)[0]:
if dtype is None:
dtype = backend.floatx()
kwargs['dtype'] = dtype
return image.img_to_array(img, data_format=data_format, **kwargs)
@keras_export('keras.preprocessing.image.save_img')
def save_img(path,
x,
data_format=None,
file_format=None,
scale=True,
**kwargs):
"""Saves an image stored as a Numpy array to a path or file object.
Arguments:
path: Path or file object.
x: Numpy array.
data_format: Image data format,
either "channels_first" or "channels_last".
file_format: Optional file format override. If omitted, the
format to use is determined from the filename extension.
If a file object was used instead of a filename, this
parameter should always be used.
scale: Whether to rescale image values to be within `[0, 255]`.
**kwargs: Additional keyword arguments passed to `PIL.Image.save()`.
"""
if data_format is None:
data_format = backend.image_data_format()
image.save_img(path,
x,
data_format=data_format,
file_format=file_format,
scale=scale, **kwargs)
@keras_export('keras.preprocessing.image.Iterator')
class Iterator(image.Iterator, utils.Sequence):
pass
@keras_export('keras.preprocessing.image.DirectoryIterator')
class DirectoryIterator(image.DirectoryIterator, Iterator):
"""Iterator capable of reading images from a directory on disk.
Arguments:
directory: Path to the directory to read images from.
Each subdirectory in this directory will be
considered to contain images from one class,
or alternatively you could specify class subdirectories
via the `classes` argument.
image_data_generator: Instance of `ImageDataGenerator`
to use for random transformations and normalization.
target_size: tuple of integers, dimensions to resize input images to.
color_mode: One of `"rgb"`, `"rgba"`, `"grayscale"`.
Color mode to read images.
classes: Optional list of strings, names of subdirectories
containing images from each class (e.g. `["dogs", "cats"]`).
It will be computed automatically if not set.
class_mode: Mode for yielding the targets:
`"binary"`: binary targets (if there are only two classes),
`"categorical"`: categorical targets,
`"sparse"`: integer targets,
`"input"`: targets are images identical to input images (mainly
used to work with autoencoders),
`None`: no targets get yielded (only input images are yielded).
batch_size: Integer, size of a batch.
shuffle: Boolean, whether to shuffle the data between epochs.
seed: Random seed for data shuffling.
data_format: String, one of `channels_first`, `channels_last`.
save_to_dir: Optional directory where to save the pictures
being yielded, in a viewable format. This is useful
for visualizing the random transformations being
applied, for debugging purposes.
save_prefix: String prefix to use for saving sample
images (if `save_to_dir` is set).
save_format: Format to use for saving sample images
(if `save_to_dir` is set).
subset: Subset of data (`"training"` or `"validation"`) if
validation_split is set in ImageDataGenerator.
interpolation: Interpolation method used to resample the image if the
target size is different from that of the loaded image.
Supported methods are "nearest", "bilinear", and "bicubic".
If PIL version 1.1.3 or newer is installed, "lanczos" is also
supported. If PIL version 3.4.0 or newer is installed, "box" and
"hamming" are also supported. By default, "nearest" is used.
dtype: Dtype to use for generated arrays.
"""
def __init__(self, directory, image_data_generator,
target_size=(256, 256),
color_mode='rgb',
classes=None,
class_mode='categorical',
batch_size=32,
shuffle=True,
seed=None,
data_format=None,
save_to_dir=None,
save_prefix='',
save_format='png',
follow_links=False,
subset=None,
interpolation='nearest',
dtype=None):
if data_format is None:
data_format = backend.image_data_format()
kwargs = {}
if 'dtype' in tf_inspect.getfullargspec(
image.ImageDataGenerator.__init__)[0]:
if dtype is None:
dtype = backend.floatx()
kwargs['dtype'] = dtype
super(DirectoryIterator, self).__init__(
directory, image_data_generator,
target_size=target_size,
color_mode=color_mode,
classes=classes,
class_mode=class_mode,
batch_size=batch_size,
shuffle=shuffle,
seed=seed,
data_format=data_format,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format,
follow_links=follow_links,
subset=subset,
interpolation=interpolation,
**kwargs)
@keras_export('keras.preprocessing.image.NumpyArrayIterator')
class NumpyArrayIterator(image.NumpyArrayIterator, Iterator):
"""Iterator yielding data from a Numpy array.
Arguments:
x: Numpy array of input data or tuple.
If tuple, the second elements is either
another numpy array or a list of numpy arrays,
each of which gets passed
through as an output without any modifications.
y: Numpy array of targets data.
image_data_generator: Instance of `ImageDataGenerator`
to use for random transformations and normalization.
batch_size: Integer, size of a batch.
shuffle: Boolean, whether to shuffle the data between epochs.
sample_weight: Numpy array of sample weights.
seed: Random seed for data shuffling.
data_format: String, one of `channels_first`, `channels_last`.
save_to_dir: Optional directory where to save the pictures
being yielded, in a viewable format. This is useful
for visualizing the random transformations being
applied, for debugging purposes.
save_prefix: String prefix to use for saving sample
images (if `save_to_dir` is set).
save_format: Format to use for saving sample images
(if `save_to_dir` is set).
subset: Subset of data (`"training"` or `"validation"`) if
validation_split is set in ImageDataGenerator.
dtype: Dtype to use for the generated arrays.
"""
def __init__(self, x, y, image_data_generator,
batch_size=32,
shuffle=False,
sample_weight=None,
seed=None,
data_format=None,
save_to_dir=None,
save_prefix='',
save_format='png',
subset=None,
dtype=None):
if data_format is None:
data_format = backend.image_data_format()
kwargs = {}
if 'dtype' in tf_inspect.getfullargspec(
image.NumpyArrayIterator.__init__)[0]:
if dtype is None:
dtype = backend.floatx()
kwargs['dtype'] = dtype
super(NumpyArrayIterator, self).__init__(
x, y, image_data_generator,
batch_size=batch_size,
shuffle=shuffle,
sample_weight=sample_weight,
seed=seed,
data_format=data_format,
save_to_dir=save_to_dir,
save_prefix=save_prefix,
save_format=save_format,
subset=subset,
**kwargs)
@keras_export('keras.preprocessing.image.ImageDataGenerator')
class ImageDataGenerator(image.ImageDataGenerator):
"""Generate batches of tensor image data with real-time data augmentation.
The data will be looped over (in batches).
Arguments:
featurewise_center: Boolean.
Set input mean to 0 over the dataset, feature-wise.
samplewise_center: Boolean. Set each sample mean to 0.
featurewise_std_normalization: Boolean.
Divide inputs by std of the dataset, feature-wise.
samplewise_std_normalization: Boolean. Divide each input by its std.
zca_epsilon: epsilon for ZCA whitening. Default is 1e-6.
zca_whitening: Boolean. Apply ZCA whitening.
rotation_range: Int. Degree range for random rotations.
width_shift_range: Float, 1-D array-like or int
- float: fraction of total width, if < 1, or pixels if >= 1.
- 1-D array-like: random elements from the array.
- int: integer number of pixels from interval
`(-width_shift_range, +width_shift_range)`
- With `width_shift_range=2` possible values
are integers `[-1, 0, +1]`,
same as with `width_shift_range=[-1, 0, +1]`,
while with `width_shift_range=1.0` possible values are floats
in the interval [-1.0, +1.0).
height_shift_range: Float, 1-D array-like or int
- float: fraction of total height, if < 1, or pixels if >= 1.
- 1-D array-like: random elements from the array.
- int: integer number of pixels from interval
`(-height_shift_range, +height_shift_range)`
- With `height_shift_range=2` possible values
are integers `[-1, 0, +1]`,
same as with `height_shift_range=[-1, 0, +1]`,
while with `height_shift_range=1.0` possible values are floats
in the interval [-1.0, +1.0).
brightness_range: Tuple or list of two floats. Range for picking
a brightness shift value from.
shear_range: Float. Shear Intensity
(Shear angle in counter-clockwise direction in degrees)
zoom_range: Float or [lower, upper]. Range for random zoom.
If a float, `[lower, upper] = [1-zoom_range, 1+zoom_range]`.
channel_shift_range: Float. Range for random channel shifts.
fill_mode: One of {"constant", "nearest", "reflect" or "wrap"}.
Default is 'nearest'.
Points outside the boundaries of the input are filled
according to the given mode:
- 'constant': kkkkkkkk|abcd|kkkkkkkk (cval=k)
- 'nearest': aaaaaaaa|abcd|dddddddd
- 'reflect': abcddcba|abcd|dcbaabcd
- 'wrap': abcdabcd|abcd|abcdabcd
cval: Float or Int.
Value used for points outside the boundaries
when `fill_mode = "constant"`.
horizontal_flip: Boolean. Randomly flip inputs horizontally.
vertical_flip: Boolean. Randomly flip inputs vertically.
rescale: rescaling factor. Defaults to None.
If None or 0, no rescaling is applied,
otherwise we multiply the data by the value provided
(after applying all other transformations).
preprocessing_function: function that will be implied on each input.
The function will run after the image is resized and augmented.
The function should take one argument:
one image (Numpy tensor with rank 3),
and should output a Numpy tensor with the same shape.
data_format: Image data format,
either "channels_first" or "channels_last".
"channels_last" mode means that the images should have shape
`(samples, height, width, channels)`,
"channels_first" mode means that the images should have shape
`(samples, channels, height, width)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
validation_split: Float. Fraction of images reserved for validation
(strictly between 0 and 1).
dtype: Dtype to use for the generated arrays.
Examples:
Example of using `.flow(x, y)`:
```python
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = np_utils.to_categorical(y_train, num_classes)
y_test = np_utils.to_categorical(y_test, num_classes)
datagen = ImageDataGenerator(
featurewise_center=True,
featurewise_std_normalization=True,
rotation_range=20,
width_shift_range=0.2,
height_shift_range=0.2,
horizontal_flip=True)
# compute quantities required for featurewise normalization
# (std, mean, and principal components if ZCA whitening is applied)
datagen.fit(x_train)
# fits the model on batches with real-time data augmentation:
model.fit_generator(datagen.flow(x_train, y_train, batch_size=32),
steps_per_epoch=len(x_train) / 32, epochs=epochs)
# here's a more "manual" example
for e in range(epochs):
print('Epoch', e)
batches = 0
for x_batch, y_batch in datagen.flow(x_train, y_train, batch_size=32):
model.fit(x_batch, y_batch)
batches += 1
if batches >= len(x_train) / 32:
# we need to break the loop by hand because
# the generator loops indefinitely
break
```
Example of using `.flow_from_directory(directory)`:
```python
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
'data/train',
target_size=(150, 150),
batch_size=32,
class_mode='binary')
validation_generator = test_datagen.flow_from_directory(
'data/validation',
target_size=(150, 150),
batch_size=32,
class_mode='binary')
model.fit_generator(
train_generator,
steps_per_epoch=2000,
epochs=50,
validation_data=validation_generator,
validation_steps=800)
```
Example of transforming images and masks together.
```python
# we create two instances with the same arguments
data_gen_args = dict(featurewise_center=True,
featurewise_std_normalization=True,
rotation_range=90,
width_shift_range=0.1,
height_shift_range=0.1,
zoom_range=0.2)
image_datagen = ImageDataGenerator(**data_gen_args)
mask_datagen = ImageDataGenerator(**data_gen_args)
# Provide the same seed and keyword arguments to the fit and flow methods
seed = 1
image_datagen.fit(images, augment=True, seed=seed)
mask_datagen.fit(masks, augment=True, seed=seed)
image_generator = image_datagen.flow_from_directory(
'data/images',
class_mode=None,
seed=seed)
mask_generator = mask_datagen.flow_from_directory(
'data/masks',
class_mode=None,
seed=seed)
# combine generators into one which yields image and masks
train_generator = zip(image_generator, mask_generator)
model.fit_generator(
train_generator,
steps_per_epoch=2000,
epochs=50)
```
"""
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
zca_epsilon=1e-6,
rotation_range=0,
width_shift_range=0.,
height_shift_range=0.,
brightness_range=None,
shear_range=0.,
zoom_range=0.,
channel_shift_range=0.,
fill_mode='nearest',
cval=0.,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
preprocessing_function=None,
data_format=None,
validation_split=0.0,
dtype=None):
if data_format is None:
data_format = backend.image_data_format()
kwargs = {}
if 'dtype' in tf_inspect.getfullargspec(
image.ImageDataGenerator.__init__)[0]:
if dtype is None:
dtype = backend.floatx()
kwargs['dtype'] = dtype
super(ImageDataGenerator, self).__init__(
featurewise_center=featurewise_center,
samplewise_center=samplewise_center,
featurewise_std_normalization=featurewise_std_normalization,
samplewise_std_normalization=samplewise_std_normalization,
zca_whitening=zca_whitening,
zca_epsilon=zca_epsilon,
rotation_range=rotation_range,
width_shift_range=width_shift_range,
height_shift_range=height_shift_range,
brightness_range=brightness_range,
shear_range=shear_range,
zoom_range=zoom_range,
channel_shift_range=channel_shift_range,
fill_mode=fill_mode,
cval=cval,
horizontal_flip=horizontal_flip,
vertical_flip=vertical_flip,
rescale=rescale,
preprocessing_function=preprocessing_function,
data_format=data_format,
validation_split=validation_split,
**kwargs)
keras_export('keras.preprocessing.image.random_rotation')(random_rotation)
keras_export('keras.preprocessing.image.random_shift')(random_shift)
keras_export('keras.preprocessing.image.random_shear')(random_shear)
keras_export('keras.preprocessing.image.random_zoom')(random_zoom)
keras_export(
'keras.preprocessing.image.apply_channel_shift')(apply_channel_shift)
keras_export(
'keras.preprocessing.image.random_channel_shift')(random_channel_shift)
keras_export(
'keras.preprocessing.image.apply_brightness_shift')(apply_brightness_shift)
keras_export('keras.preprocessing.image.random_brightness')(random_brightness)
keras_export(
'keras.preprocessing.image.apply_affine_transform')(apply_affine_transform)
keras_export('keras.preprocessing.image.load_img')(load_img)
| apache-2.0 |
baloo/shinken | test/test_business_correlator.py | 1 | 49745 | #!/usr/bin/env python2.6
# Copyright (C) 2009-2010 :
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
import re
#It's ugly I know....
from shinken_test import *
class TestBusinesscorrel(ShinkenTest):
# Uncomment this is you want to use a specific configuration
# for your test
def setUp(self):
self.setup_with_file('etc/nagios_business_correlator.cfg')
# We will try a simple bd1 OR db2
def test_simple_or_business_correlator(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
svc_bd1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db1")
self.assert_(svc_bd1.got_business_rule == False)
self.assert_(svc_bd1.business_rule is None)
svc_bd2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db2")
self.assert_(svc_bd2.got_business_rule == False)
self.assert_(svc_bd2.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Simple_Or")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == '|')
# We check for good parent/childs links
# So svc_cor should be a son of svc_bd1 and svc_bd2
# and bd1 and bd2 should be parents of svc_cor
self.assert_(svc_cor in svc_bd1.child_dependencies)
self.assert_(svc_cor in svc_bd2.child_dependencies)
self.assert_(svc_bd1 in svc_cor.parent_dependencies)
self.assert_(svc_bd2 in svc_cor.parent_dependencies)
sons = bp_rule.sons
print "Sons,", sons
#We(ve got 2 sons, 2 services nodes
self.assert_(len(sons) == 2)
self.assert_(sons[0].operand == 'service')
self.assert_(sons[0].sons[0] == svc_bd1)
self.assert_(sons[1].operand == 'service')
self.assert_(sons[1].sons[0] == svc_bd2)
# Now state working on the states
self.scheduler_loop(1, [[svc_bd2, 0, 'OK | value1=1 value2=2'], [svc_bd1, 0, 'OK | rtt=10']])
self.assert_(svc_bd1.state == 'OK')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd2.state == 'OK')
self.assert_(svc_bd2.state_type == 'HARD')
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we set the bd1 as soft/CRITICAL
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'SOFT')
self.assert_(svc_bd1.last_hard_state_id == 0)
# The business rule must still be 0
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we get bd1 CRITICAL/HARD
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 2)
# The rule must still be a 0 (or inside)
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we also set bd2 as CRITICAL/HARD... byebye 0 :)
self.scheduler_loop(2, [[svc_bd2, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'CRITICAL')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 2)
# And now the state of the rule must be 2
state = bp_rule.get_state()
self.assert_(state == 2)
# And If we set one WARNING?
self.scheduler_loop(2, [[svc_bd2, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'WARNING')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 1)
# Must be WARNING (better no 0 value)
state = bp_rule.get_state()
self.assert_(state == 1)
# We will try a simple bd1 AND db2
def test_simple_and_business_correlator(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
svc_bd1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db1")
self.assert_(svc_bd1.got_business_rule == False)
self.assert_(svc_bd1.business_rule is None)
svc_bd2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db2")
self.assert_(svc_bd2.got_business_rule == False)
self.assert_(svc_bd2.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Simple_And")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == '&')
sons = bp_rule.sons
print "Sons,", sons
#We(ve got 2 sons, 2 services nodes
self.assert_(len(sons) == 2)
self.assert_(sons[0].operand == 'service')
self.assert_(sons[0].sons[0] == svc_bd1)
self.assert_(sons[1].operand == 'service')
self.assert_(sons[1].sons[0] == svc_bd2)
# Now state working on the states
self.scheduler_loop(1, [[svc_bd2, 0, 'OK | value1=1 value2=2'], [svc_bd1, 0, 'OK | rtt=10']])
self.assert_(svc_bd1.state == 'OK')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd2.state == 'OK')
self.assert_(svc_bd2.state_type == 'HARD')
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we set the bd1 as soft/CRITICAL
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'SOFT')
self.assert_(svc_bd1.last_hard_state_id == 0)
# The business rule must still be 0
# becase we want HARD states
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we get bd1 CRITICAL/HARD
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 2)
# The rule must go CRITICAL
state = bp_rule.get_state()
self.assert_(state == 2)
# Now we also set bd2 as WARNING/HARD...
self.scheduler_loop(2, [[svc_bd2, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'WARNING')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 1)
# And now the state of the rule must be 2
state = bp_rule.get_state()
self.assert_(state == 2)
# And If we set one WARNING too?
self.scheduler_loop(2, [[svc_bd1, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'WARNING')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 1)
# Must be WARNING (worse no 0 value for both)
state = bp_rule.get_state()
self.assert_(state == 1)
# We will try a simple 1of: bd1 OR/AND db2
def test_simple_1of_business_correlator(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
svc_bd1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db1")
self.assert_(svc_bd1.got_business_rule == False)
self.assert_(svc_bd1.business_rule is None)
svc_bd2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db2")
self.assert_(svc_bd2.got_business_rule == False)
self.assert_(svc_bd2.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Simple_1Of")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == 'of:')
# Simple 1of: so in fact a triple (1,2,2) (1of and MAX,MAX
self.assert_(bp_rule.of_values == (1,2,2))
sons = bp_rule.sons
print "Sons,", sons
# We've got 2 sons, 2 services nodes
self.assert_(len(sons) == 2)
self.assert_(sons[0].operand == 'service')
self.assert_(sons[0].sons[0] == svc_bd1)
self.assert_(sons[1].operand == 'service')
self.assert_(sons[1].sons[0] == svc_bd2)
# Now state working on the states
self.scheduler_loop(1, [[svc_bd2, 0, 'OK | value1=1 value2=2'], [svc_bd1, 0, 'OK | rtt=10']])
self.assert_(svc_bd1.state == 'OK')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd2.state == 'OK')
self.assert_(svc_bd2.state_type == 'HARD')
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we set the bd1 as soft/CRITICAL
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'SOFT')
self.assert_(svc_bd1.last_hard_state_id == 0)
# The business rule must still be 0
# becase we want HARD states
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we get bd1 CRITICAL/HARD
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 2)
# The rule still be OK
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we also set bd2 as CRITICAL/HARD...
self.scheduler_loop(2, [[svc_bd2, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'CRITICAL')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 2)
# And now the state of the rule must be 2 now
state = bp_rule.get_state()
self.assert_(state == 2)
# And If we set one WARNING now?
self.scheduler_loop(2, [[svc_bd1, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'WARNING')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 1)
# Must be WARNING (worse no 0 value for both, like for AND rule)
state = bp_rule.get_state()
self.assert_(state == 1)
# We will try a simple 1of: test_router_0 OR/AND test_host_0
def test_simple_1of_business_correlator_with_hosts(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Simple_1Of_with_host")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == 'of:')
# Simple 1of: so in fact a triple (1,2,2) (1of and MAX,MAX
self.assert_(bp_rule.of_values == (1,2,2))
sons = bp_rule.sons
print "Sons,", sons
# We've got 2 sons, 2 services nodes
self.assert_(len(sons) == 2)
self.assert_(sons[0].operand == 'host')
self.assert_(sons[0].sons[0] == host)
self.assert_(sons[1].operand == 'host')
self.assert_(sons[1].sons[0] == router)
# We will try a simple bd1 OR db2, but this time we will
# schedule a real check and see if it's good
def test_simple_or_business_correlator_with_schedule(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
svc_bd1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db1")
self.assert_(svc_bd1.got_business_rule == False)
self.assert_(svc_bd1.business_rule is None)
svc_bd2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db2")
self.assert_(svc_bd2.got_business_rule == False)
self.assert_(svc_bd2.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Simple_Or")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == '|')
sons = bp_rule.sons
print "Sons,", sons
#We(ve got 2 sons, 2 services nodes
self.assert_(len(sons) == 2)
self.assert_(sons[0].operand == 'service')
self.assert_(sons[0].sons[0] == svc_bd1)
self.assert_(sons[1].operand == 'service')
self.assert_(sons[1].sons[0] == svc_bd2)
# Now state working on the states
self.scheduler_loop(1, [[svc_bd2, 0, 'OK | value1=1 value2=2'], [svc_bd1, 0, 'OK | rtt=10']])
self.assert_(svc_bd1.state == 'OK')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd2.state == 'OK')
self.assert_(svc_bd2.state_type == 'HARD')
state = bp_rule.get_state()
self.assert_(state == 0)
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# Now we set the bd1 as soft/CRITICAL
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'SOFT')
self.assert_(svc_bd1.last_hard_state_id == 0)
# The business rule must still be 0
state = bp_rule.get_state()
self.assert_(state == 0)
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# Now we get bd1 CRITICAL/HARD
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 2)
# The rule must still be a 0 (or inside)
state = bp_rule.get_state()
self.assert_(state == 0)
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# Now we also set bd2 as CRITICAL/HARD... byebye 0 :)
self.scheduler_loop(2, [[svc_bd2, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'CRITICAL')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 2)
# And now the state of the rule must be 2
state = bp_rule.get_state()
self.assert_(state == 2)
# And now we must be CRITICAL/SOFT!
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'CRITICAL')
self.assert_(svc_cor.state_type == 'SOFT')
self.assert_(svc_cor.last_hard_state_id == 0)
#OK, re recheck again, GO HARD!
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'CRITICAL')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 2)
# And If we set one WARNING?
self.scheduler_loop(2, [[svc_bd2, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'WARNING')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 1)
# Must be WARNING (better no 0 value)
state = bp_rule.get_state()
self.assert_(state == 1)
# And in a HARD
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'WARNING')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 1)
print "All elements", bp_rule.list_all_elements()
print "IMPACT:", svc_bd2.impacts
for i in svc_bd2.impacts:
print i.get_name()
# Assert that Simple_Or Is an impact of the problem bd2
self.assert_(svc_cor in svc_bd2.impacts)
# and bd1 too
self.assert_(svc_cor in svc_bd1.impacts)
def test_dep_node_list_elements(self):
svc_bd1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db1")
self.assert_(svc_bd1.got_business_rule == False)
self.assert_(svc_bd1.business_rule is None)
svc_bd2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db2")
self.assert_(svc_bd2.got_business_rule == False)
self.assert_(svc_bd2.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Simple_Or")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == '|')
print "All elements", bp_rule.list_all_elements()
all_elt = bp_rule.list_all_elements()
self.assert_(svc_bd2 in all_elt)
self.assert_(svc_bd1 in all_elt)
print "DBG: bd2 depend_on_me", svc_bd2.act_depend_of_me
# We will try a full ERP rule and
# schedule a real check and see if it's good
def test_full_erp_rule_with_schedule(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
svc_bd1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db1")
self.assert_(svc_bd1.got_business_rule == False)
self.assert_(svc_bd1.business_rule is None)
svc_bd2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db2")
self.assert_(svc_bd2.got_business_rule == False)
self.assert_(svc_bd2.business_rule is None)
svc_web1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "web1")
self.assert_(svc_web1.got_business_rule == False)
self.assert_(svc_web1.business_rule is None)
svc_web2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "web2")
self.assert_(svc_web2.got_business_rule == False)
self.assert_(svc_web2.business_rule is None)
svc_lvs1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "lvs1")
self.assert_(svc_lvs1.got_business_rule == False)
self.assert_(svc_lvs1.business_rule is None)
svc_lvs2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "lvs2")
self.assert_(svc_lvs2.got_business_rule == False)
self.assert_(svc_lvs2.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "ERP")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == '&')
sons = bp_rule.sons
print "Sons,", sons
#We've got 3 sons, each 3 rules
self.assert_(len(sons) == 3)
bd_node = sons[0]
self.assert_(bd_node.operand == '|')
self.assert_(bd_node.sons[0].sons[0] == svc_bd1)
self.assert_(bd_node.sons[1].sons[0] == svc_bd2)
# Now state working on the states
self.scheduler_loop(1, [[svc_bd2, 0, 'OK | value1=1 value2=2'], [svc_bd1, 0, 'OK | rtt=10']])
self.assert_(svc_bd1.state == 'OK')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd2.state == 'OK')
self.assert_(svc_bd2.state_type == 'HARD')
state = bp_rule.get_state()
self.assert_(state == 0)
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# Now we set the bd1 as soft/CRITICAL
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'SOFT')
self.assert_(svc_bd1.last_hard_state_id == 0)
# The business rule must still be 0
state = bp_rule.get_state()
self.assert_(state == 0)
print "Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "ERP: Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# Now we get bd1 CRITICAL/HARD
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 2)
# The rule must still be a 0 (or inside)
state = bp_rule.get_state()
self.assert_(state == 0)
print "ERP: Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "ERP: Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# Now we also set bd2 as CRITICAL/HARD... byebye 0 :)
self.scheduler_loop(2, [[svc_bd2, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'CRITICAL')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 2)
# And now the state of the rule must be 2
state = bp_rule.get_state()
self.assert_(state == 2)
# And now we must be CRITICAL/SOFT!
print "ERP: Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "ERP: Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'CRITICAL')
self.assert_(svc_cor.state_type == 'SOFT')
self.assert_(svc_cor.last_hard_state_id == 0)
#OK, re recheck again, GO HARD!
print "ERP: Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "ERP: Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'CRITICAL')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 2)
# And If we set one WARNING?
self.scheduler_loop(2, [[svc_bd2, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'WARNING')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 1)
# Must be WARNING (better no 0 value)
state = bp_rule.get_state()
self.assert_(state == 1)
# And in a HARD
print "ERP: Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "ERP: Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'WARNING')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 1)
print "All elements", bp_rule.list_all_elements()
print "IMPACT:", svc_bd2.impacts
for i in svc_bd2.impacts:
print i.get_name()
# Assert that Simple_Or Is an impact of the problem bd2
self.assert_(svc_cor in svc_bd2.impacts)
# and bd1 too
self.assert_(svc_cor in svc_bd1.impacts)
# And now all is green :)
self.scheduler_loop(2, [[svc_bd2, 0, 'OK | value1=1 value2=2'], [svc_bd1, 0, 'OK | value1=1 value2=2']])
print "ERP: Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "ERP: Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# And no more in impact
self.assert_(svc_cor not in svc_bd2.impacts)
self.assert_(svc_cor not in svc_bd1.impacts)
# And what if we set 2 service from distant rule CRITICAL?
# ERP should be still OK
# And now all is green :)
self.scheduler_loop(2, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2'], [svc_web1, 2, 'CRITICAL | value1=1 value2=2']])
print "ERP: Launch internal check"
svc_cor.launch_check(now-1)
c = svc_cor.actions[0]
self.assert_(c.internal == True)
self.assert_(c.is_launchable(now))
#ask the scheduler to launch this check
#and ask 2 loops: one for launch the check
#and another to integer the result
self.scheduler_loop(2, [])
# We should have no more the check
self.assert_(len(svc_cor.actions) == 0)
print "ERP: Look at svc_cor state", svc_cor.state
# What is the svc_cor state now?
self.assert_(svc_cor.state == 'OK')
self.assert_(svc_cor.state_type == 'HARD')
self.assert_(svc_cor.last_hard_state_id == 0)
# We will try a simple 1of: bd1 OR/AND db2
def test_complex_ABCof_business_correlator(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
A = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "A")
self.assert_(A.got_business_rule == False)
self.assert_(A.business_rule is None)
B = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "B")
self.assert_(B.got_business_rule == False)
self.assert_(B.business_rule is None)
C = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "C")
self.assert_(C.got_business_rule == False)
self.assert_(C.business_rule is None)
D = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "D")
self.assert_(D.got_business_rule == False)
self.assert_(D.business_rule is None)
E = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "E")
self.assert_(E.got_business_rule == False)
self.assert_(E.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Complex_ABCOf")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == 'of:')
self.assert_(bp_rule.of_values == (5,1,1))
sons = bp_rule.sons
print "Sons,", sons
# We've got 2 sons, 2 services nodes
self.assert_(len(sons) == 5)
self.assert_(sons[0].operand == 'service')
self.assert_(sons[0].sons[0] == A)
self.assert_(sons[1].operand == 'service')
self.assert_(sons[1].sons[0] == B)
self.assert_(sons[2].operand == 'service')
self.assert_(sons[2].sons[0] == C)
self.assert_(sons[3].operand == 'service')
self.assert_(sons[3].sons[0] == D)
self.assert_(sons[4].operand == 'service')
self.assert_(sons[4].sons[0] == E)
# Now state working on the states
self.scheduler_loop(1, [[A, 0, 'OK'], [B, 0, 'OK'], [C, 0, 'OK'], [D, 0, 'OK'], [E, 0, 'OK']])
self.assert_(A.state == 'OK')
self.assert_(A.state_type == 'HARD')
self.assert_(B.state == 'OK')
self.assert_(B.state_type == 'HARD')
self.assert_(C.state == 'OK')
self.assert_(C.state_type == 'HARD')
self.assert_(D.state == 'OK')
self.assert_(D.state_type == 'HARD')
self.assert_(E.state == 'OK')
self.assert_(E.state_type == 'HARD')
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we set the A as soft/CRITICAL
self.scheduler_loop(1, [[A, 2, 'CRITICAL']])
self.assert_(A.state == 'CRITICAL')
self.assert_(A.state_type == 'SOFT')
self.assert_(A.last_hard_state_id == 0)
# The business rule must still be 0
# becase we want HARD states
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we get A CRITICAL/HARD
self.scheduler_loop(1, [[A, 2, 'CRITICAL']])
self.assert_(A.state == 'CRITICAL')
self.assert_(A.state_type == 'HARD')
self.assert_(A.last_hard_state_id == 2)
# The rule still be OK
state = bp_rule.get_state()
self.assert_(state == 2)
# Now we also set B as CRITICAL/HARD...
self.scheduler_loop(2, [[B, 2, 'CRITICAL']])
self.assert_(B.state == 'CRITICAL')
self.assert_(B.state_type == 'HARD')
self.assert_(B.last_hard_state_id == 2)
# And now the state of the rule must be 2 now
state = bp_rule.get_state()
self.assert_(state == 2)
# And If we set A dn B WARNING now?
self.scheduler_loop(2, [[A, 1, 'WARNING'], [B, 1, 'WARNING']])
self.assert_(A.state == 'WARNING')
self.assert_(A.state_type == 'HARD')
self.assert_(A.last_hard_state_id == 1)
self.assert_(B.state == 'WARNING')
self.assert_(B.state_type == 'HARD')
self.assert_(B.last_hard_state_id == 1)
# Must be WARNING (worse no 0 value for both, like for AND rule)
state = bp_rule.get_state()
print "state", state
self.assert_(state == 1)
# Ok now more fun, with changing of_values and states
### W O O O O
# 4 of: -> Ok (we got 4 OK, and not 4 warn or crit, so it's OK)
# 5,1,1 -> Warning (at least one warning, and no crit -> warning)
# 5,2,1 -> OK (we want warning only if we got 2 bad states, so not here)
self.scheduler_loop(2, [[A, 1, 'WARNING'], [B, 0, 'OK']])
#4 of: -> 4,5,5
bp_rule.of_values = (4,5,5)
bp_rule.is_of_mul = False
self.assert_(bp_rule.get_state() == 0)
# 5,1,1
bp_rule.of_values = (5,1,1)
bp_rule.is_of_mul = True
self.assert_(bp_rule.get_state() == 1)
# 5,2,1
bp_rule.of_values = (5,2,1)
bp_rule.is_of_mul = True
self.assert_(bp_rule.get_state() == 0)
###* W C O O O
# 4 of: -> Crtitical (not 4 ok, so we take the worse state, the critical)
# 4,1,1 -> Critical (2 states raise the waring, but on raise critical, so worse state is critical)
self.scheduler_loop(2, [[A, 1, 'WARNING'], [B, 2, 'Crit']])
#4 of: -> 4,5,5
bp_rule.of_values = (4,5,5)
bp_rule.is_of_mul = False
self.assert_(bp_rule.get_state() == 2)
# 4,1,1
bp_rule.of_values = (4,1,1)
bp_rule.is_of_mul = True
self.assert_(bp_rule.get_state() == 2)
##* W C C O O
#* 2 of: OK
#* 4,1,1 -> Critical (same as before)
#* 4,1,3 -> warning (the warning rule is raised, but the critical is not)
self.scheduler_loop(2, [[A, 1, 'WARNING'], [B, 2, 'Crit'], [C, 2, 'Crit']])
#* 2 of: 2,5,5
bp_rule.of_values = (2,5,5)
bp_rule.is_of_mul = False
self.assert_(bp_rule.get_state() == 0)
#* 4,1,1
bp_rule.of_values = (4,1,1)
bp_rule.is_of_mul = True
self.assert_(bp_rule.get_state() == 2)
#* 4,1,3
bp_rule.of_values = (4,1,3)
bp_rule.is_of_mul = True
self.assert_(bp_rule.get_state() == 1)
# We will try a simple bd1 AND NOT db2
def test_simple_and_not_business_correlator(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
print "Get the hosts and services"
now = time.time()
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
svc_bd1 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db1")
self.assert_(svc_bd1.got_business_rule == False)
self.assert_(svc_bd1.business_rule is None)
svc_bd2 = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "db2")
self.assert_(svc_bd2.got_business_rule == False)
self.assert_(svc_bd2.business_rule is None)
svc_cor = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "Simple_And_not")
self.assert_(svc_cor.got_business_rule == True)
self.assert_(svc_cor.business_rule is not None)
bp_rule = svc_cor.business_rule
self.assert_(bp_rule.operand == '&')
sons = bp_rule.sons
print "Sons,", sons
#We(ve got 2 sons, 2 services nodes
self.assert_(len(sons) == 2)
self.assert_(sons[0].operand == 'service')
self.assert_(sons[0].sons[0] == svc_bd1)
self.assert_(sons[1].operand == 'service')
self.assert_(sons[1].sons[0] == svc_bd2)
# Now state working on the states
self.scheduler_loop(2, [[svc_bd1, 0, 'OK | value1=1 value2=2'], [svc_bd2, 2, 'CRITICAL | rtt=10']])
self.assert_(svc_bd1.state == 'OK')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd2.state == 'CRITICAL')
self.assert_(svc_bd2.state_type == 'HARD')
# We are a NOT, so should be OK here
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we set the bd1 as soft/CRITICAL
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'SOFT')
self.assert_(svc_bd1.last_hard_state_id == 0)
# The business rule must still be 0
# becase we want HARD states
state = bp_rule.get_state()
self.assert_(state == 0)
# Now we get bd1 CRITICAL/HARD
self.scheduler_loop(1, [[svc_bd1, 2, 'CRITICAL | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'CRITICAL')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 2)
# The rule must go CRITICAL
state = bp_rule.get_state()
self.assert_(state == 2)
# Now we also set bd2 as WARNING/HARD...
self.scheduler_loop(2, [[svc_bd2, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd2.state == 'WARNING')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 1)
# And now the state of the rule must be 2
state = bp_rule.get_state()
self.assert_(state == 2)
# And If we set one WARNING too?
self.scheduler_loop(2, [[svc_bd1, 1, 'WARNING | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'WARNING')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 1)
# Must be WARNING (worse no 0 value for both)
state = bp_rule.get_state()
self.assert_(state == 1)
# Now try to get ok in both place, should be bad :)
self.scheduler_loop(2, [[svc_bd1, 0, 'OK | value1=1 value2=2'], [svc_bd2, 0, 'OK | value1=1 value2=2']])
self.assert_(svc_bd1.state == 'OK')
self.assert_(svc_bd1.state_type == 'HARD')
self.assert_(svc_bd1.last_hard_state_id == 0)
self.assert_(svc_bd2.state == 'OK')
self.assert_(svc_bd2.state_type == 'HARD')
self.assert_(svc_bd2.last_hard_state_id == 0)
# Must be CRITICAL (ok and not ok IS no OK :) )
state = bp_rule.get_state()
self.assert_(state == 2)
class TestConfigBroken(ShinkenTest):
"""A class with a broken configuration, where business rules reference unknown hosts/services"""
def setUp(self):
self.setup_with_file('etc/nagios_business_correlator_broken.cfg')
def test_conf_is_correct(self):
#
# Business rules use services which don't exist. We want
# the arbiter to output an error message and exit
# in a controlled manner.
#
print "conf_is_correct", self.conf.conf_is_correct
self.assert_(not self.conf.conf_is_correct)
# Get the arbiter's log broks
logs = [b.data['log'] for b in self.broks.values() if b.type == 'log']
# Simple_1Of_1unk_svc : my business rule is invalid
# Simple_1Of_1unk_svc : Business rule uses unknown service test_host_0/db3
self.assert_(len([log for log in logs if re.search('Simple_1Of_1unk_svc', log)]) == 2)
self.assert_(len([log for log in logs if re.search('service test_host_0/db3', log)]) == 1)
# ERP_unk_svc : my business rule is invalid
# ERP_unk_svc : Business rule uses unknown service test_host_0/web100
# ERP_unk_svc : Business rule uses unknown service test_host_0/lvs100
self.assert_(len([log for log in logs if re.search('ERP_unk_svc', log)]) == 3)
self.assert_(len([log for log in logs if re.search('service test_host_0/web100', log)]) == 1)
self.assert_(len([log for log in logs if re.search('service test_host_0/lvs100', log)]) == 1)
# Simple_1Of_1unk_host : my business rule is invalid
# Simple_1Of_1unk_host : Business rule uses unknown host test_host_9
self.assert_(len([log for log in logs if re.search('Simple_1Of_1unk_host', log)]) == 2)
self.assert_(len([log for log in logs if re.search('host test_host_9', log)]) == 1)
# Now the number of all failed business rules.
self.assert_(len([log for log in logs if re.search('my business rule is invalid', log)]) == 3)
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
pixelrebel/st2 | st2common/tests/unit/test_resource_registrar.py | 3 | 2270 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import mock
from st2common.content import utils as content_utils
from st2common.bootstrap.base import ResourceRegistrar
from st2common.persistence.pack import Pack
from st2common.persistence.pack import ConfigSchema
from st2tests import DbTestCase
from st2tests import fixturesloader
__all__ = [
'ResourceRegistrarTestCase'
]
PACK_PATH = os.path.join(fixturesloader.get_fixtures_packs_base_path(), 'dummy_pack_1')
class ResourceRegistrarTestCase(DbTestCase):
def test_register_packs(self):
# Verify DB is empty
pack_dbs = Pack.get_all()
config_schema_dbs = ConfigSchema.get_all()
self.assertEqual(len(pack_dbs), 0)
self.assertEqual(len(config_schema_dbs), 0)
registrar = ResourceRegistrar(use_pack_cache=False)
registrar._pack_loader.get_packs = mock.Mock()
registrar._pack_loader.get_packs.return_value = {'dummy_pack_1': PACK_PATH}
packs_base_paths = content_utils.get_packs_base_paths()
registrar.register_packs(base_dirs=packs_base_paths)
# Verify pack and schema have been registered
pack_dbs = Pack.get_all()
config_schema_dbs = ConfigSchema.get_all()
self.assertEqual(len(pack_dbs), 1)
self.assertEqual(len(config_schema_dbs), 1)
self.assertEqual(pack_dbs[0].name, 'dummy_pack_1')
self.assertTrue('api_key' in config_schema_dbs[0].attributes)
self.assertTrue('api_secret' in config_schema_dbs[0].attributes)
| apache-2.0 |
coingraham/lena | lambda/requests/cookies.py | 177 | 17387 | # -*- coding: utf-8 -*-
"""
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import copy
import time
import calendar
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host
# header
if not self._r.headers.get('Host'):
return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain
host = self._r.headers['Host']
parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it
return urlunparse([
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
parsed.fragment
])
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
@property
def host(self):
return self.get_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
if not (hasattr(response, '_original_response') and
response._original_response):
return
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""Produce an appropriate Cookie header string to be sent with `request`, or None."""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name != name:
continue
if domain is not None and domain != cookie.domain:
continue
if path is not None and path != cookie.path:
continue
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific."""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Requests does not use the dict interface internally; it's just for
compatibility with external client code. All requests code should work
out of the box with externally provided instances of ``CookieJar``, e.g.
``LWPCookieJar`` and ``FileCookieJar``.
Unlike a regular CookieJar, this class is pickleable.
.. warning:: dictionary operations that are normally O(1) may be O(n).
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
.. warning:: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains."""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies
from the jar. See itervalues() and iteritems()."""
for cookie in iter(self):
yield cookie.name
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the
jar. See values() and items()."""
return list(self.iterkeys())
def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies
from the jar. See iterkeys() and iteritems()."""
for cookie in iter(self):
yield cookie.value
def values(self):
"""Dict-like values() that returns a list of values of cookies from the
jar. See keys() and items()."""
return list(self.itervalues())
def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples
from the jar. See iterkeys() and itervalues()."""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the
jar. See keys() and values(). Allows client-code to call
``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
pairs."""
return list(self.iteritems())
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise."""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain
old Python dict of name-value pairs of cookies that meet the
requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws
exception if there are more than one cookie with name. In that case,
use the more explicit get() method instead.
.. warning:: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws
exception if there is already a cookie of that name in the jar. In that
case, use the more explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
``remove_cookie_by_name()``."""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
cookie.value = cookie.value.replace('\\"', '')
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(copy.copy(cookie))
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as
args name and optional domain and path. Returns a cookie.value. If
there are conflicting cookies, _find arbitrarily chooses one. See
_find_no_duplicates if you want an exception thrown if there are
conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""Both ``__get_item__`` and ``get`` call this function: it's never
used elsewhere in Requests. Takes as args name and optional domain and
path. Returns a cookie.value. Throws KeyError if cookie is not found
and CookieConflictError if there are multiple cookies that match name
and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def _copy_cookie_jar(jar):
if jar is None:
return None
if hasattr(jar, 'copy'):
# We're dealing with an instance of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
new_jar.clear()
for cookie in jar:
new_jar.set_cookie(copy.copy(cookie))
return new_jar
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
try:
expires = int(time.time() + int(morsel['max-age']))
except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = calendar.timegm(
time.strptime(morsel['expires'], time_template)
)
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
)
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
names_from_jar = [cookie.name for cookie in cookiejar]
for name in cookie_dict:
if overwrite or (name not in names_from_jar):
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
def merge_cookies(cookiejar, cookies):
"""Add cookies to cookiejar and returns a merged CookieJar.
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar):
try:
cookiejar.update(cookies)
except AttributeError:
for cookie_in_jar in cookies:
cookiejar.set_cookie(cookie_in_jar)
return cookiejar
| mit |
umglurf/juleol | migrations/versions/8eb7162afee7_.py | 1 | 1418 | # SPDX-FileCopyrightText: 2020 Håvard Moen <post@haavard.name>
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""add oauth
Revision ID: 8eb7162afee7
Revises: f0ba66f2e9b2
Create Date: 2020-11-01 16:10:08.345978
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8eb7162afee7'
down_revision = 'f0ba66f2e9b2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('admins')
op.drop_column('participants', 'password')
op.add_column('participants', sa.Column('email', sa.String(length=255), nullable=True))
op.create_index('participant_email', 'participants', ['email', 'tasting_id'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'admins',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.drop_constraint('participant_email', 'participants', type_='unique')
op.drop_column('participants', 'email')
op.add_column('participants', sa.Column('password', sa.String(length=255), nullable=False))
# ### end Alembic commands ###
| gpl-3.0 |
google/grr | grr/client/grr_response_client/windows/installers.py | 1 | 17420 | #!/usr/bin/env python
"""These are windows specific installers.
NOTE: Subprocess module is broken on windows in that pipes are not handled
correctly. See for example:
http://bugs.python.org/issue3905
This problem seems to go away when we use pipes for all standard handles:
https://launchpadlibrarian.net/134750748/pyqtgraph_subprocess.patch
We also set shell=True because that seems to avoid having an extra cmd.exe
window pop up.
"""
import contextlib
import datetime
import errno
import itertools
import logging
import os
import shutil
import subprocess
import sys
import time
from typing import Callable, Iterable
from absl import flags
import pywintypes
import win32process
import win32service
import win32serviceutil
import winerror
import winreg
from grr_response_client.windows import regconfig
from grr_response_core import config
SERVICE_RESTART_DELAY_MSEC = 120 * 1000
SERVICE_RESET_FAIL_COUNT_DELAY_SEC = 86400
flags.DEFINE_string(
"interpolate_fleetspeak_service_config", "",
"If set, only interpolate a fleetspeak service config. "
"The value is a path to a file to interpolate (rewrite).")
def _StartService(service_name):
"""Starts a Windows service with the given name.
Args:
service_name: string The name of the service to be started.
"""
logging.info("Trying to start service %s.", service_name)
try:
win32serviceutil.StartService(service_name)
logging.info("Service '%s' started.", service_name)
except pywintypes.error as e:
if getattr(e, "winerror", None) == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
logging.debug("Tried to start '%s', but the service is not installed.",
service_name)
else:
logging.exception("Encountered error trying to start '%s':", service_name)
def _StartServices(service_names: Iterable[str]) -> None:
for service_name in service_names:
_StartService(service_name)
STOPPED_SERVICES = []
def _StopService(service_name, service_binary_name=None):
"""Stops a Windows service with the given name.
Args:
service_name: string The name of the service to be stopped.
service_binary_name: string If given, also kill this binary as a best effort
fallback solution.
"""
logging.info("Trying to stop service %s.", service_name)
# QueryServiceStatus returns: scvType, svcState, svcControls, err,
# svcErr, svcCP, svcWH
try:
status = win32serviceutil.QueryServiceStatus(service_name)[1]
except pywintypes.error as e:
if getattr(e, "winerror", None) == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
logging.debug("Tried to stop '%s', but the service is not installed.",
service_name)
else:
logging.exception("Unable to query status of service '%s':", service_name)
return
for _ in range(20):
if status == win32service.SERVICE_STOPPED:
break
elif status != win32service.SERVICE_STOP_PENDING:
try:
STOPPED_SERVICES.append(service_name)
win32serviceutil.StopService(service_name)
except pywintypes.error:
logging.exception("Unable to stop service '%s':", service_name)
time.sleep(1)
status = win32serviceutil.QueryServiceStatus(service_name)[1]
if status == win32service.SERVICE_STOPPED:
logging.info("Service '%s' stopped.", service_name)
return
elif not service_binary_name:
return
# Taskkill will fail on systems predating Windows XP, this is a best
# effort fallback solution.
output = subprocess.check_output(
["taskkill", "/im", "%s*" % service_binary_name, "/f"],
shell=True,
stdin=subprocess.PIPE,
stderr=subprocess.PIPE)
logging.debug("%s", output)
# Sleep a bit to ensure that process really quits.
time.sleep(2)
def _RemoveService(service_name):
"""Removes the service `service_name`."""
logging.info("Trying to remove service %s.", service_name)
try:
win32serviceutil.RemoveService(service_name)
logging.info("Service '%s' removed.", service_name)
except pywintypes.error as e:
if getattr(e, "winerror", None) == winerror.ERROR_SERVICE_DOES_NOT_EXIST:
logging.debug("Tried to remove '%s', but the service is not installed.",
service_name)
else:
logging.exception("Unable to remove service '%s':", service_name)
def _CreateService(service_name: str, description: str,
command_line: str) -> None:
"""Creates a Windows service."""
logging.info("Creating service '%s'.", service_name)
with contextlib.ExitStack() as stack:
hscm = win32service.OpenSCManager(None, None,
win32service.SC_MANAGER_ALL_ACCESS)
stack.callback(win32service.CloseServiceHandle, hscm)
hs = win32service.CreateService(hscm, service_name, service_name,
win32service.SERVICE_ALL_ACCESS,
win32service.SERVICE_WIN32_OWN_PROCESS,
win32service.SERVICE_AUTO_START,
win32service.SERVICE_ERROR_NORMAL,
command_line, None, 0, None, None, None)
stack.callback(win32service.CloseServiceHandle, hs)
service_failure_actions = {
"ResetPeriod":
SERVICE_RESET_FAIL_COUNT_DELAY_SEC,
"RebootMsg":
u"",
"Command":
u"",
"Actions": [
(win32service.SC_ACTION_RESTART, SERVICE_RESTART_DELAY_MSEC),
(win32service.SC_ACTION_RESTART, SERVICE_RESTART_DELAY_MSEC),
(win32service.SC_ACTION_RESTART, SERVICE_RESTART_DELAY_MSEC),
]
}
win32service.ChangeServiceConfig2(
hs, win32service.SERVICE_CONFIG_FAILURE_ACTIONS,
service_failure_actions)
win32service.ChangeServiceConfig2(hs,
win32service.SERVICE_CONFIG_DESCRIPTION,
description)
logging.info("Successfully created service '%s'.", service_name)
def _OpenRegkey(key_path):
# Note that this function will create the specified registry key,
# along with all its ancestors if they do not exist.
hive_name, subpath = key_path.split("\\", 1)
hive = getattr(winreg, hive_name)
return winreg.CreateKey(hive, subpath)
def _CheckForWow64():
"""Checks to ensure we are not running on a Wow64 system."""
if win32process.IsWow64Process():
raise RuntimeError("Will not install a 32 bit client on a 64 bit system. "
"Please use the correct client.")
def _StopPreviousService():
"""Stops the Windows service hosting the GRR process."""
_StopService(
service_name=config.CONFIG["Nanny.service_name"],
service_binary_name=config.CONFIG["Nanny.service_binary_name"])
if not config.CONFIG["Client.fleetspeak_enabled"]:
return
_StopService(service_name=config.CONFIG["Client.fleetspeak_service_name"])
def _DeleteGrrFleetspeakService():
"""Deletes GRR's fleetspeak service entry from the registry."""
# Delete GRR's Fleetspeak config from the registry so Fleetspeak
# doesn't try to restart GRR unless/until installation completes
# successfully.
key_path = config.CONFIG["Client.fleetspeak_unsigned_services_regkey"]
regkey = _OpenRegkey(key_path)
try:
winreg.DeleteValue(regkey, config.CONFIG["Client.name"])
logging.info("Deleted value '%s' of key '%s'.",
config.CONFIG["Client.name"], key_path)
except OSError as e:
# Windows will raise a no-such-file-or-directory error if
# GRR's config hasn't been written to the registry yet.
if e.errno != errno.ENOENT:
raise
def _FileRetryLoop(path: str, f: Callable[[], None]) -> None:
"""If `path` exists, calls `f` in a retry loop."""
if not os.path.exists(path):
return
attempts = 0
while True:
try:
f()
return
except OSError as e:
attempts += 1
if e.errno == errno.EACCES and attempts < 10:
# The currently installed GRR process may stick around for a few
# seconds after the service is terminated (keeping the contents of
# the installation directory locked).
logging.warning(
"Encountered permission-denied error while trying to process "
"'%s'. Retrying...",
path,
exc_info=True)
time.sleep(3)
else:
raise
def _RmTree(path: str) -> None:
_FileRetryLoop(path, lambda: shutil.rmtree(path))
def _Rename(src: str, dst: str) -> None:
_FileRetryLoop(src, lambda: os.rename(src, dst))
def _RmTreePseudoTransactional(path: str) -> None:
"""Removes `path`.
Makes sure that either `path` is gone or that it is still present as
it was.
Args:
path: The path to remove.
"""
suffix = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
temp_path = f"{path}_orphaned_{suffix}"
logging.info("Trying to rename %s -> %s.", path, temp_path)
# Assuming there was a `path`:
# _Rename succeeds -> `path` is gone, we can proceed with the install.
# _Rename fails -> we know that we still have `path` as it was.
_Rename(path, temp_path)
try:
logging.info("Trying to remove %s.", temp_path)
_RmTree(temp_path)
except: # pylint: disable=bare-except
logging.warning("Failed to remove %s. Ignoring.", temp_path, exc_info=True)
def _IsReinstall() -> bool:
result = os.path.exists(config.CONFIG["Client.install_path"])
logging.info("Checking if this is a re-install: %s.", result)
return result
def _ClearInstallPath() -> None:
install_path = config.CONFIG["Client.install_path"]
logging.info("Clearing install path %s.", install_path)
_RmTreePseudoTransactional(install_path)
os.makedirs(install_path)
def _CopyToSystemDir():
"""Copies the binaries from the temporary unpack location.
This requires running services to be stopped or we might not be able to write
on the binary. We then copy the entire directory where we are running from
into the location indicated by "Client.install_path".
"""
executable_directory = os.path.dirname(sys.executable)
install_path = config.CONFIG["Client.install_path"]
logging.info("Installing binaries %s -> %s", executable_directory,
config.CONFIG["Client.install_path"])
# Recursively copy the temp directory to the installation directory.
for root, dirs, files in os.walk(executable_directory):
for name in dirs:
src_path = os.path.join(root, name)
relative_path = os.path.relpath(src_path, executable_directory)
dest_path = os.path.join(install_path, relative_path)
try:
os.mkdir(dest_path)
except OSError as e:
# Ignore already-exists exceptions.
if e.errno != errno.EEXIST:
raise
for name in files:
src_path = os.path.join(root, name)
relative_path = os.path.relpath(src_path, executable_directory)
dest_path = os.path.join(install_path, relative_path)
shutil.copy(src_path, dest_path)
# These options will be copied to the registry to configure the nanny service.
_NANNY_OPTIONS = frozenset([
"Nanny.child_binary",
"Nanny.child_command_line",
"Nanny.service_name",
"Nanny.service_description",
])
# Options for the legacy (non-Fleetspeak) GRR installation that should get
# deleted when installing Fleetspeak-enabled GRR clients.
_LEGACY_OPTIONS = frozenset(
itertools.chain(_NANNY_OPTIONS,
["Nanny.status", "Nanny.heartbeat", "Client.labels"]))
def _InstallNanny():
"""Installs the nanny program."""
# We need to copy the nanny sections to the registry to ensure the
# service is correctly configured.
new_config = config.CONFIG.MakeNewConfig()
new_config.SetWriteBack(config.CONFIG["Config.writeback"])
for option in _NANNY_OPTIONS:
new_config.Set(option, config.CONFIG.Get(option))
new_config.Write()
args = [
config.CONFIG["Nanny.binary"], "--service_key",
config.CONFIG["Client.config_key"], "install"
]
logging.debug("Calling %s", (args,))
output = subprocess.check_output(
args, shell=True, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
logging.debug("%s", output)
def _DeleteLegacyConfigOptions(registry_key_uri):
"""Deletes config values in the registry for legacy GRR installations."""
key_spec = regconfig.ParseRegistryURI(registry_key_uri)
try:
regkey = winreg.OpenKeyEx(key_spec.winreg_hive, key_spec.path, 0,
winreg.KEY_ALL_ACCESS)
except OSError as e:
if e.errno == errno.ENOENT:
logging.info("Skipping legacy config purge for non-existent key: %s.",
registry_key_uri)
return
else:
raise
for option in _LEGACY_OPTIONS:
try:
winreg.DeleteValue(regkey, option)
logging.info("Deleted value '%s' of key %s.", option, key_spec)
except OSError as e:
# Windows will raise a no-such-file-or-directory error if the config
# option does not exist in the registry. This is expected when upgrading
# to a newer Fleetspeak-enabled version.
if e.errno != errno.ENOENT:
raise
def _IsFleetspeakBundled():
return os.path.exists(
os.path.join(config.CONFIG["Client.install_path"],
"fleetspeak-client.exe"))
def _InstallBundledFleetspeak():
fleetspeak_client = os.path.join(config.CONFIG["Client.install_path"],
"fleetspeak-client.exe")
fleetspeak_config = os.path.join(config.CONFIG["Client.install_path"],
"fleetspeak-client.config")
_RemoveService(config.CONFIG["Client.fleetspeak_service_name"])
_CreateService(
service_name=config.CONFIG["Client.fleetspeak_service_name"],
description="Fleetspeak communication agent.",
command_line=f"\"{fleetspeak_client}\" -config \"{fleetspeak_config}\"")
def _MaybeInterpolateFleetspeakServiceConfig():
"""Interpolates the fleetspeak service config if present."""
fleetspeak_unsigned_config_path = os.path.join(
config.CONFIG["Client.install_path"],
config.CONFIG["Client.fleetspeak_unsigned_config_fname"])
template_path = f"{fleetspeak_unsigned_config_path}.in"
if not os.path.exists(template_path):
return
_InterpolateFleetspeakServiceConfig(template_path,
fleetspeak_unsigned_config_path)
def _InterpolateFleetspeakServiceConfig(src_path: str, dst_path: str) -> None:
with open(src_path, "r") as src:
src_data = src.read()
with open(dst_path, "w") as dst:
interpolated = config.CONFIG.InterpolateValue(src_data)
interpolated = interpolated.replace("\\", "\\\\")
interpolated = interpolated.rstrip("\n")
dst.write(interpolated)
def _WriteGrrFleetspeakService():
logging.info("Writing GRR fleetspeak service registry key.")
# Write the Fleetspeak config to the registry.
key_path = config.CONFIG["Client.fleetspeak_unsigned_services_regkey"]
regkey = _OpenRegkey(key_path)
fleetspeak_unsigned_config_path = os.path.join(
config.CONFIG["Client.install_path"],
config.CONFIG["Client.fleetspeak_unsigned_config_fname"])
winreg.SetValueEx(regkey, config.CONFIG["Client.name"], 0, winreg.REG_SZ,
fleetspeak_unsigned_config_path)
def _Run():
"""Installs the windows client binary."""
if flags.FLAGS.interpolate_fleetspeak_service_config:
_InterpolateFleetspeakServiceConfig(
flags.FLAGS.interpolate_fleetspeak_service_config,
flags.FLAGS.interpolate_fleetspeak_service_config)
fs_service = config.CONFIG["Client.fleetspeak_service_name"]
_StopService(service_name=fs_service)
_StartService(service_name=fs_service)
return
_CheckForWow64()
is_reinstall = _IsReinstall()
was_bundled_fleetspeak = _IsFleetspeakBundled() if is_reinstall else False
_StopPreviousService()
try:
_ClearInstallPath()
except:
# We've failed to remove and old installation, but it's still there.
# Bring back the services that we've stopped previously.
_StartServices(STOPPED_SERVICES)
raise
if is_reinstall:
# If the install path existed before, we have deleted the current, working
# GRR installation.
# We have to delete the fleetspeak service entry as well.
_DeleteGrrFleetspeakService()
if was_bundled_fleetspeak:
_RemoveService(config.CONFIG["Client.fleetspeak_service_name"])
_RemoveService(config.CONFIG["Nanny.service_name"])
# At this point we have a "clean state".
# The old installation is not present and not running any more.
try:
_CopyToSystemDir()
_MaybeInterpolateFleetspeakServiceConfig()
except:
_StartServices(STOPPED_SERVICES)
raise
if not config.CONFIG["Client.fleetspeak_enabled"]:
logging.info("Fleetspeak not enabled, installing nanny.")
_InstallNanny()
return
# Remove the Nanny service for the legacy GRR since it will
# not be needed any more.
_RemoveService(config.CONFIG["Nanny.service_name"])
_DeleteLegacyConfigOptions(config.CONFIG["Config.writeback"])
_WriteGrrFleetspeakService()
fs_service = config.CONFIG["Client.fleetspeak_service_name"]
_StopService(service_name=fs_service)
if _IsFleetspeakBundled():
_InstallBundledFleetspeak()
_StartService(service_name=fs_service)
def Run():
try:
_Run()
except:
logging.error("The installer failed with an exception.", exc_info=True)
raise
| apache-2.0 |
lduarte1991/edx-platform | lms/djangoapps/django_comment_client/base/tests.py | 1 | 93420 | # -*- coding: utf-8 -*-
"""Tests for django comment client views."""
import json
import logging
import mock
from contextlib import contextmanager
import ddt
import pytest
from django.contrib.auth.models import User
from django.core.management import call_command
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from eventtracking.processors.exceptions import EventEmissionExit
from mock import ANY, Mock, patch
from nose.plugins.attrib import attr
from nose.tools import assert_equal, assert_true
from opaque_keys.edx.keys import CourseKey
from common.test.utils import MockSignalHandlerMixin, disable_signal
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from django_comment_client.base import views
from django_comment_client.tests.group_id import (
CohortedTopicGroupIdTestMixin,
GroupIdAssertionMixin,
NonCohortedTopicGroupIdTestMixin
)
from django_comment_client.tests.unicode import UnicodeTestMixin
from django_comment_client.tests.utils import CohortedTestCase, ForumsEnableMixin
from django_comment_common.models import (
assign_role,
CourseDiscussionSettings,
FORUM_ROLE_STUDENT,
Role
)
from django_comment_common.utils import ThreadContext, seed_permissions_roles, set_course_discussion_settings
from lms.djangoapps.teams.tests.factories import CourseTeamFactory, CourseTeamMembershipFactory
from lms.lib.comment_client import Thread
from openedx.core.djangoapps.course_groups.cohorts import set_course_cohorted
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES
from student.roles import CourseStaffRole, UserBasedRole
from student.tests.factories import CourseAccessRoleFactory, CourseEnrollmentFactory, UserFactory
from util.testing import UrlResetMixin
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
from track.middleware import TrackMiddleware
from track.views import segmentio
from track.views.tests.base import (
SegmentIOTrackingTestCaseBase,
SEGMENTIO_TEST_USER_ID
)
from event_transformers import ForumThreadViewedEventTransformer
log = logging.getLogger(__name__)
CS_PREFIX = "http://localhost:4567/api/v1"
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
# pylint: disable=missing-docstring
class MockRequestSetupMixin(object):
def _create_response_mock(self, data):
return Mock(text=json.dumps(data), json=Mock(return_value=data))
def _set_mock_request_data(self, mock_request, data):
mock_request.return_value = self._create_response_mock(data)
@attr(shard=2)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
@pytest.mark.django111_expected_failure
class CreateThreadGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
CohortedTopicGroupIdTestMixin,
NonCohortedTopicGroupIdTestMixin
):
cs_endpoint = "/threads"
def call_view(self, mock_request, commentable_id, user, group_id, pass_group_id=True):
self._set_mock_request_data(mock_request, {})
mock_request.return_value.status_code = 200
request_data = {"body": "body", "title": "title", "thread_type": "discussion"}
if pass_group_id:
request_data["group_id"] = group_id
request = RequestFactory().post("dummy_url", request_data)
request.user = user
request.view_name = "create_thread"
return views.create_thread(
request,
course_id=unicode(self.course.id),
commentable_id=commentable_id
)
def test_group_info_in_response(self, mock_request):
response = self.call_view(
mock_request,
"cohorted_topic",
self.student,
None
)
self._assert_json_response_contains_group_info(response)
@attr(shard=2)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_deleted')
class ThreadActionGroupIdTestCase(
MockRequestSetupMixin,
CohortedTestCase,
GroupIdAssertionMixin
):
def call_view(
self,
view_name,
mock_request,
user=None,
post_params=None,
view_args=None
):
self._set_mock_request_data(
mock_request,
{
"user_id": str(self.student.id),
"group_id": self.student_cohort.id,
"closed": False,
"type": "thread",
"commentable_id": "non_team_dummy_id"
}
)
mock_request.return_value.status_code = 200
request = RequestFactory().post("dummy_url", post_params or {})
request.user = user or self.student
request.view_name = view_name
return getattr(views, view_name)(
request,
course_id=unicode(self.course.id),
thread_id="dummy",
**(view_args or {})
)
def test_update(self, mock_request):
response = self.call_view(
"update_thread",
mock_request,
post_params={"body": "body", "title": "title"}
)
self._assert_json_response_contains_group_info(response)
def test_delete(self, mock_request):
response = self.call_view("delete_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_vote(self, mock_request):
response = self.call_view(
"vote_for_thread",
mock_request,
view_args={"value": "up"}
)
self._assert_json_response_contains_group_info(response)
response = self.call_view("undo_vote_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_flag(self, mock_request):
response = self.call_view("flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
response = self.call_view("un_flag_abuse_for_thread", mock_request)
self._assert_json_response_contains_group_info(response)
def test_pin(self, mock_request):
response = self.call_view(
"pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
response = self.call_view(
"un_pin_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(response)
def test_openclose(self, mock_request):
response = self.call_view(
"openclose_thread",
mock_request,
user=self.moderator
)
self._assert_json_response_contains_group_info(
response,
lambda d: d['content']
)
class ViewsTestCaseMixin(object):
def set_up_course(self, module_count=0):
"""
Creates a course, optionally with module_count discussion modules, and
a user with appropriate permissions.
"""
# create a course
self.course = CourseFactory.create(
org='MITx', course='999',
discussion_topics={"Some Topic": {"id": "some_topic"}},
display_name='Robot Super Course',
)
self.course_id = self.course.id
# add some discussion modules
for i in range(module_count):
ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id='id_module_{}'.format(i),
discussion_category='Category {}'.format(i),
discussion_target='Discussion {}'.format(i)
)
# seed the forums permissions and roles
call_command('seed_permissions_roles', unicode(self.course_id))
# Patch the comment client user save method so it does not try
# to create a new cc user when creating a django user
with patch('student.models.cc.User.save'):
uname = 'student'
email = 'student@edx.org'
self.password = 'test' # pylint: disable=attribute-defined-outside-init
# Create the user and make them active so we can log them in.
self.student = User.objects.create_user(uname, email, self.password) # pylint: disable=attribute-defined-outside-init
self.student.is_active = True
self.student.save()
# Add a discussion moderator
self.moderator = UserFactory.create(password=self.password) # pylint: disable=attribute-defined-outside-init
# Enroll the student in the course
CourseEnrollmentFactory(user=self.student,
course_id=self.course_id)
# Enroll the moderator and give them the appropriate roles
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
assert_true(self.client.login(username='student', password=self.password))
def _setup_mock_request(self, mock_request, include_depth=False):
"""
Ensure that mock_request returns the data necessary to make views
function correctly
"""
mock_request.return_value.status_code = 200
data = {
"user_id": str(self.student.id),
"closed": False,
"commentable_id": "non_team_dummy_id"
}
if include_depth:
data["depth"] = 0
self._set_mock_request_data(mock_request, data)
def create_thread_helper(self, mock_request, extra_request_data=None, extra_response_data=None):
"""
Issues a request to create a thread and verifies the result.
"""
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"thread_type": "discussion",
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": False,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
thread = {
"thread_type": "discussion",
"body": ["this is a post"],
"anonymous_to_peers": ["false"],
"auto_subscribe": ["false"],
"anonymous": ["false"],
"title": ["Hello"],
}
if extra_request_data:
thread.update(extra_request_data)
url = reverse('create_thread', kwargs={'commentable_id': 'i4x-MITx-999-course-Robot_Super_Course',
'course_id': unicode(self.course_id)})
response = self.client.post(url, data=thread)
assert_true(mock_request.called)
expected_data = {
'thread_type': 'discussion',
'body': u'this is a post',
'context': ThreadContext.COURSE,
'anonymous_to_peers': False, 'user_id': 1,
'title': u'Hello',
'commentable_id': u'i4x-MITx-999-course-Robot_Super_Course',
'anonymous': False,
'course_id': unicode(self.course_id),
}
if extra_response_data:
expected_data.update(extra_response_data)
mock_request.assert_called_with(
'post',
'{prefix}/i4x-MITx-999-course-Robot_Super_Course/threads'.format(prefix=CS_PREFIX),
data=expected_data,
params={'request_id': ANY},
headers=ANY,
timeout=5
)
assert_equal(response.status_code, 200)
def update_thread_helper(self, mock_request):
"""
Issues a request to update a thread and verifies the result.
"""
self._setup_mock_request(mock_request)
# Mock out saving in order to test that content is correctly
# updated. Otherwise, the call to thread.save() receives the
# same mocked request data that the original call to retrieve
# the thread did, overwriting any changes.
with patch.object(Thread, 'save'):
response = self.client.post(
reverse("update_thread", kwargs={
"thread_id": "dummy",
"course_id": unicode(self.course_id)
}),
data={"body": "foo", "title": "foo", "commentable_id": "some_topic"}
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(data['body'], 'foo')
self.assertEqual(data['title'], 'foo')
self.assertEqual(data['commentable_id'], 'some_topic')
@attr(shard=2)
@ddt.ddt
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
@disable_signal(views, 'thread_created')
@disable_signal(views, 'thread_edited')
class ViewsQueryCountTestCase(
ForumsEnableMixin,
UrlResetMixin,
ModuleStoreTestCase,
MockRequestSetupMixin,
ViewsTestCaseMixin
):
CREATE_USER = False
ENABLED_CACHES = ['default', 'mongo_metadata_inheritance', 'loc_cache']
ENABLED_SIGNALS = ['course_published']
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewsQueryCountTestCase, self).setUp()
def count_queries(func): # pylint: disable=no-self-argument
"""
Decorates test methods to count mongo and SQL calls for a
particular modulestore.
"""
def inner(self, default_store, module_count, mongo_calls, sql_queries, *args, **kwargs):
with modulestore().default_store(default_store):
self.set_up_course(module_count=module_count)
self.clear_caches()
with self.assertNumQueries(sql_queries, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(mongo_calls):
func(self, *args, **kwargs)
return inner
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 4, 32),
(ModuleStoreEnum.Type.split, 3, 13, 32),
)
@ddt.unpack
@count_queries
def test_create_thread(self, mock_request):
self.create_thread_helper(mock_request)
@ddt.data(
(ModuleStoreEnum.Type.mongo, 3, 3, 28),
(ModuleStoreEnum.Type.split, 3, 10, 28),
)
@ddt.unpack
@count_queries
def test_update_thread(self, mock_request):
self.update_thread_helper(mock_request)
@attr(shard=2)
@ddt.ddt
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
class ViewsTestCase(
ForumsEnableMixin,
UrlResetMixin,
SharedModuleStoreTestCase,
MockRequestSetupMixin,
ViewsTestCaseMixin,
MockSignalHandlerMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ViewsTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create(
org='MITx', course='999',
discussion_topics={"Some Topic": {"id": "some_topic"}},
display_name='Robot Super Course',
)
@classmethod
def setUpTestData(cls):
super(ViewsTestCase, cls).setUpTestData()
cls.course_id = cls.course.id
# seed the forums permissions and roles
call_command('seed_permissions_roles', unicode(cls.course_id))
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
# Patching the ENABLE_DISCUSSION_SERVICE value affects the contents of urls.py,
# so we need to call super.setUp() which reloads urls.py (because
# of the UrlResetMixin)
super(ViewsTestCase, self).setUp()
# Patch the comment client user save method so it does not try
# to create a new cc user when creating a django user
with patch('student.models.cc.User.save'):
uname = 'student'
email = 'student@edx.org'
self.password = 'test' # pylint: disable=attribute-defined-outside-init
# Create the user and make them active so we can log them in.
self.student = User.objects.create_user(uname, email, self.password) # pylint: disable=attribute-defined-outside-init
self.student.is_active = True
self.student.save()
# Add a discussion moderator
self.moderator = UserFactory.create(password=self.password) # pylint: disable=attribute-defined-outside-init
# Enroll the student in the course
CourseEnrollmentFactory(user=self.student,
course_id=self.course_id)
# Enroll the moderator and give them the appropriate roles
CourseEnrollmentFactory(user=self.moderator, course_id=self.course.id)
self.moderator.roles.add(Role.objects.get(name="Moderator", course_id=self.course.id))
assert_true(self.client.login(username='student', password=self.password))
@contextmanager
def assert_discussion_signals(self, signal, user=None):
if user is None:
user = self.student
with self.assert_signal_sent(views, signal, sender=None, user=user, exclude_args=('post',)):
yield
def test_create_thread(self, mock_request):
with self.assert_discussion_signals('thread_created'):
self.create_thread_helper(mock_request)
@pytest.mark.django111_expected_failure
def test_create_thread_standalone(self, mock_request):
team = CourseTeamFactory.create(
name="A Team",
course_id=self.course_id,
topic_id='topic_id',
discussion_topic_id="i4x-MITx-999-course-Robot_Super_Course"
)
# Add the student to the team so they can post to the commentable.
team.add_user(self.student)
# create_thread_helper verifies that extra data are passed through to the comments service
self.create_thread_helper(mock_request, extra_response_data={'context': ThreadContext.STANDALONE})
@ddt.data(
('follow_thread', 'thread_followed'),
('unfollow_thread', 'thread_unfollowed'),
)
@ddt.unpack
def test_follow_unfollow_thread_signals(self, view_name, signal, mock_request):
self.create_thread_helper(mock_request)
with self.assert_discussion_signals(signal):
response = self.client.post(
reverse(
view_name,
kwargs={"course_id": unicode(self.course_id), "thread_id": 'i4x-MITx-999-course-Robot_Super_Course'}
)
)
self.assertEqual(response.status_code, 200)
def test_delete_thread(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_thread_id = "test_thread_id"
request = RequestFactory().post("dummy_url", {"id": test_thread_id})
request.user = self.student
request.view_name = "delete_thread"
with self.assert_discussion_signals('thread_deleted'):
response = views.delete_thread(
request,
course_id=unicode(self.course.id),
thread_id=test_thread_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
def test_delete_comment(self, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
test_comment_id = "test_comment_id"
request = RequestFactory().post("dummy_url", {"id": test_comment_id})
request.user = self.student
request.view_name = "delete_comment"
with self.assert_discussion_signals('comment_deleted'):
response = views.delete_comment(
request,
course_id=unicode(self.course.id),
comment_id=test_comment_id
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
args = mock_request.call_args[0]
self.assertEqual(args[0], "delete")
self.assertTrue(args[1].endswith("/{}".format(test_comment_id)))
def _test_request_error(self, view_name, view_kwargs, data, mock_request):
"""
Submit a request against the given view with the given data and ensure
that the result is a 400 error and that no data was posted using
mock_request
"""
self._setup_mock_request(mock_request, include_depth=(view_name == "create_sub_comment"))
response = self.client.post(reverse(view_name, kwargs=view_kwargs), data=data)
self.assertEqual(response.status_code, 400)
for call in mock_request.call_args_list:
self.assertEqual(call[0][0].lower(), "get")
def test_create_thread_no_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo"},
mock_request
)
def test_create_thread_empty_title(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_create_thread_no_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"title": "foo"},
mock_request
)
def test_create_thread_empty_body(self, mock_request):
self._test_request_error(
"create_thread",
{"commentable_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_no_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo"},
mock_request
)
def test_update_thread_empty_title(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo", "title": " "},
mock_request
)
def test_update_thread_no_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"title": "foo"},
mock_request
)
def test_update_thread_empty_body(self, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " ", "title": "foo"},
mock_request
)
def test_update_thread_course_topic(self, mock_request):
with self.assert_discussion_signals('thread_edited'):
self.update_thread_helper(mock_request)
@patch('django_comment_client.utils.get_discussion_categories_ids', return_value=["test_commentable"])
def test_update_thread_wrong_commentable_id(self, mock_get_discussion_id_map, mock_request):
self._test_request_error(
"update_thread",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": "foo", "title": "foo", "commentable_id": "wrong_commentable"},
mock_request
)
def test_create_comment(self, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals('comment_created'):
response = self.client.post(
reverse(
"create_comment",
kwargs={"course_id": unicode(self.course_id), "thread_id": "dummy"}
),
data={"body": "body"}
)
self.assertEqual(response.status_code, 200)
def test_create_comment_no_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{},
mock_request
)
def test_create_comment_empty_body(self, mock_request):
self._test_request_error(
"create_comment",
{"thread_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " "},
mock_request
)
def test_create_sub_comment_no_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{},
mock_request
)
def test_create_sub_comment_empty_body(self, mock_request):
self._test_request_error(
"create_sub_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_no_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{},
mock_request
)
def test_update_comment_empty_body(self, mock_request):
self._test_request_error(
"update_comment",
{"comment_id": "dummy", "course_id": unicode(self.course_id)},
{"body": " "},
mock_request
)
def test_update_comment_basic(self, mock_request):
self._setup_mock_request(mock_request)
comment_id = "test_comment_id"
updated_body = "updated body"
with self.assert_discussion_signals('comment_edited'):
response = self.client.post(
reverse(
"update_comment",
kwargs={"course_id": unicode(self.course_id), "comment_id": comment_id}
),
data={"body": updated_body}
)
self.assertEqual(response.status_code, 200)
mock_request.assert_called_with(
"put",
"{prefix}/comments/{comment_id}".format(prefix=CS_PREFIX, comment_id=comment_id),
headers=ANY,
params=ANY,
timeout=ANY,
data={"body": updated_body}
)
def test_flag_thread_open(self, mock_request):
self.flag_thread(mock_request, False)
def test_flag_thread_close(self, mock_request):
self.flag_thread(mock_request, True)
def flag_thread(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1", "username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0,
})
url = reverse('flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert_equal(response.status_code, 200)
def test_un_flag_thread_open(self, mock_request):
self.un_flag_thread(mock_request, False)
def test_un_flag_thread_close(self, mock_request):
self.un_flag_thread(mock_request, True)
def un_flag_thread(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"title": "Hello",
"body": "this is a post",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "thread",
"group_id": None,
"pinned": False,
"endorsed": False,
"unread_comments_count": 0,
"read": False,
"comments_count": 0
})
url = reverse('un_flag_abuse_for_thread', kwargs={
'thread_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/threads/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/threads/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'mark_as_read': True, 'request_id': ANY, 'with_responses': False},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert_equal(response.status_code, 200)
def test_flag_comment_open(self, mock_request):
self.flag_comment(mock_request, False)
def test_flag_comment_close(self, mock_request):
self.flag_comment(mock_request, True)
def flag_comment(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [1],
"type": "comment",
"endorsed": False
})
url = reverse('flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_flag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert_equal(response.status_code, 200)
def test_un_flag_comment_open(self, mock_request):
self.un_flag_comment(mock_request, False)
def test_un_flag_comment_close(self, mock_request):
self.un_flag_comment(mock_request, True)
def un_flag_comment(self, mock_request, is_closed):
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"body": "this is a comment",
"course_id": "MITx/999/Robot_Super_Course",
"anonymous": False,
"anonymous_to_peers": False,
"commentable_id": "i4x-MITx-999-course-Robot_Super_Course",
"created_at": "2013-05-10T18:53:43Z",
"updated_at": "2013-05-10T18:53:43Z",
"at_position_list": [],
"closed": is_closed,
"id": "518d4237b023791dca00000d",
"user_id": "1",
"username": "robot",
"votes": {
"count": 0,
"up_count": 0,
"down_count": 0,
"point": 0
},
"abuse_flaggers": [],
"type": "comment",
"endorsed": False
})
url = reverse('un_flag_abuse_for_comment', kwargs={
'comment_id': '518d4237b023791dca00000d',
'course_id': unicode(self.course_id)
})
response = self.client.post(url)
assert_true(mock_request.called)
call_list = [
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('put', '{prefix}/comments/518d4237b023791dca00000d/abuse_unflag'.format(prefix=CS_PREFIX)),
{
'data': {'user_id': '1'},
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
),
(
('get', '{prefix}/comments/518d4237b023791dca00000d'.format(prefix=CS_PREFIX)),
{
'data': None,
'params': {'request_id': ANY},
'headers': ANY,
'timeout': 5
}
)
]
assert mock_request.call_args_list == call_list
assert_equal(response.status_code, 200)
@ddt.data(
('upvote_thread', 'thread_id', 'thread_voted'),
('upvote_comment', 'comment_id', 'comment_voted'),
('downvote_thread', 'thread_id', 'thread_voted'),
('downvote_comment', 'comment_id', 'comment_voted')
)
@ddt.unpack
def test_voting(self, view_name, item_id, signal, mock_request):
self._setup_mock_request(mock_request)
with self.assert_discussion_signals(signal):
response = self.client.post(
reverse(
view_name,
kwargs={item_id: 'dummy', 'course_id': unicode(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
def test_endorse_comment(self, mock_request):
self._setup_mock_request(mock_request)
self.client.login(username=self.moderator.username, password=self.password)
with self.assert_discussion_signals('comment_endorsed', user=self.moderator):
response = self.client.post(
reverse(
'endorse_comment',
kwargs={'comment_id': 'dummy', 'course_id': unicode(self.course_id)}
)
)
self.assertEqual(response.status_code, 200)
@attr(shard=2)
@patch("lms.lib.comment_client.utils.requests.request", autospec=True)
@disable_signal(views, 'comment_endorsed')
class ViewPermissionsTestCase(ForumsEnableMixin, UrlResetMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ViewPermissionsTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(ViewPermissionsTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.password = "test password"
cls.student = UserFactory.create(password=cls.password)
cls.moderator = UserFactory.create(password=cls.password)
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
CourseEnrollmentFactory(user=cls.moderator, course_id=cls.course.id)
cls.moderator.roles.add(Role.objects.get(name="Moderator", course_id=cls.course.id))
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ViewPermissionsTestCase, self).setUp()
def test_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_un_pin_thread_as_student(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_un_pin_thread_as_moderator(self, mock_request):
self._set_mock_request_data(mock_request, {})
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("un_pin_thread", kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def _set_mock_request_thread_and_comment(self, mock_request, thread_data, comment_data):
def handle_request(*args, **kwargs):
url = args[1]
if "/threads/" in url:
return self._create_response_mock(thread_data)
elif "/comments/" in url:
return self._create_response_mock(comment_data)
else:
raise ArgumentError("Bad url to mock request")
mock_request.side_effect = handle_request
def test_endorse_response_as_staff(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.moderator.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
def test_endorse_response_as_student(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.moderator.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 401)
def test_endorse_response_as_student_question_author(self, mock_request):
self._set_mock_request_thread_and_comment(
mock_request,
{"type": "thread", "thread_type": "question", "user_id": str(self.student.id)},
{"type": "comment", "thread_id": "dummy"}
)
self.client.login(username=self.student.username, password=self.password)
response = self.client.post(
reverse("endorse_comment", kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy"})
)
self.assertEqual(response.status_code, 200)
@attr(shard=2)
class CreateThreadUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateThreadUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateThreadUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request,):
"""
Test to make sure unicode data in a thread doesn't break it.
"""
self._set_mock_request_data(mock_request, {})
request = RequestFactory().post("dummy_url", {"thread_type": "discussion", "body": text, "title": text})
request.user = self.student
request.view_name = "create_thread"
response = views.create_thread(
# The commentable ID contains a username, the Unicode char below ensures it works fine
request, course_id=unicode(self.course.id), commentable_id=u"non_tåem_dummy_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
@attr(shard=2)
@disable_signal(views, 'thread_edited')
class UpdateThreadUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UpdateThreadUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UpdateThreadUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('django_comment_client.utils.get_discussion_categories_ids', return_value=["test_commentable"])
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request, mock_get_discussion_id_map):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text, "title": text, "thread_type": "question", "commentable_id": "test_commentable"})
request.user = self.student
request.view_name = "update_thread"
response = views.update_thread(request, course_id=unicode(self.course.id), thread_id="dummy_thread_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
self.assertEqual(mock_request.call_args[1]["data"]["title"], text)
self.assertEqual(mock_request.call_args[1]["data"]["thread_type"], "question")
self.assertEqual(mock_request.call_args[1]["data"]["commentable_id"], "test_commentable")
@attr(shard=2)
@disable_signal(views, 'comment_created')
class CreateCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
commentable_id = "non_team_dummy_id"
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": commentable_id
})
# We have to get clever here due to Thread's setters and getters.
# Patch won't work with it.
try:
Thread.commentable_id = commentable_id
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_comment"
response = views.create_comment(
request, course_id=unicode(self.course.id), thread_id="dummy_thread_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@attr(shard=2)
@disable_signal(views, 'comment_edited')
class UpdateCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UpdateCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UpdateCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
self._set_mock_request_data(mock_request, {
"user_id": str(self.student.id),
"closed": False,
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "update_comment"
response = views.update_comment(request, course_id=unicode(self.course.id), comment_id="dummy_comment_id")
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
@attr(shard=2)
@disable_signal(views, 'comment_created')
class CreateSubCommentUnicodeTestCase(
ForumsEnableMixin,
SharedModuleStoreTestCase,
UnicodeTestMixin,
MockRequestSetupMixin
):
"""
Make sure comments under a response can handle unicode.
"""
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(CreateSubCommentUnicodeTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(CreateSubCommentUnicodeTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def _test_unicode_data(self, text, mock_request):
"""
Create a comment with unicode in it.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread",
"commentable_id": "non_team_dummy_id"
})
request = RequestFactory().post("dummy_url", {"body": text})
request.user = self.student
request.view_name = "create_sub_comment"
Thread.commentable_id = "test_commentable"
try:
response = views.create_sub_comment(
request, course_id=unicode(self.course.id), comment_id="dummy_comment_id"
)
self.assertEqual(response.status_code, 200)
self.assertTrue(mock_request.called)
self.assertEqual(mock_request.call_args[1]["data"]["body"], text)
finally:
del Thread.commentable_id
@attr(shard=2)
@ddt.ddt
@patch("lms.lib.comment_client.utils.requests.request", autospec=True)
@disable_signal(views, 'thread_voted')
@disable_signal(views, 'thread_edited')
@disable_signal(views, 'comment_created')
@disable_signal(views, 'comment_voted')
@disable_signal(views, 'comment_deleted')
@pytest.mark.django111_expected_failure
class TeamsPermissionsTestCase(ForumsEnableMixin, UrlResetMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
# Most of the test points use the same ddt data.
# args: user, commentable_id, status_code
ddt_permissions_args = [
# Student in team can do operations on threads/comments within the team commentable.
('student_in_team', 'team_commentable_id', 200),
# Non-team commentables can be edited by any student.
('student_in_team', 'course_commentable_id', 200),
# Student not in team cannot do operations within the team commentable.
('student_not_in_team', 'team_commentable_id', 401),
# Non-team commentables can be edited by any student.
('student_not_in_team', 'course_commentable_id', 200),
# Moderators can always operator on threads within a team, regardless of team membership.
('moderator', 'team_commentable_id', 200),
# Group moderators have regular student privileges for creating a thread and commenting
('group_moderator', 'course_commentable_id', 200)
]
def change_divided_discussion_settings(self, scheme):
"""
Change divided discussion settings for the current course.
If dividing by cohorts, create and assign users to a cohort.
"""
enable_cohorts = True if scheme is CourseDiscussionSettings.COHORT else False
set_course_discussion_settings(
self.course.id,
enable_cohorts=enable_cohorts,
divided_discussions=[],
always_divide_inline_discussions=True,
division_scheme=scheme,
)
set_course_cohorted(self.course.id, enable_cohorts)
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(TeamsPermissionsTestCase, cls).setUpClassAndTestData():
teams_configuration = {
'topics': [{'id': "topic_id", 'name': 'Solar Power', 'description': 'Solar power is hot'}]
}
cls.course = CourseFactory.create(teams_configuration=teams_configuration)
@classmethod
def setUpTestData(cls):
super(TeamsPermissionsTestCase, cls).setUpTestData()
cls.course = CourseFactory.create()
cls.password = "test password"
seed_permissions_roles(cls.course.id)
# Create enrollment tracks
CourseModeFactory.create(
course_id=cls.course.id,
mode_slug=CourseMode.VERIFIED
)
CourseModeFactory.create(
course_id=cls.course.id,
mode_slug=CourseMode.AUDIT
)
# Create 6 users--
# student in team (in the team, audit)
# student not in team (not in the team, audit)
# cohorted (in the cohort, audit)
# verified (not in the cohort, verified)
# moderator (in the cohort, audit, moderator permissions)
# group moderator (in the cohort, verified, group moderator permissions)
def create_users_and_enroll(coursemode):
student = UserFactory.create(password=cls.password)
CourseEnrollmentFactory(
course_id=cls.course.id,
user=student,
mode=coursemode
)
return student
cls.student_in_team, cls.student_not_in_team, cls.moderator, cls.cohorted = (
[create_users_and_enroll(CourseMode.AUDIT) for _ in range(4)])
cls.verified, cls.group_moderator = [create_users_and_enroll(CourseMode.VERIFIED) for _ in range(2)]
# Give moderator and group moderator permissions
cls.moderator.roles.add(Role.objects.get(name="Moderator", course_id=cls.course.id))
assign_role(cls.course.id, cls.group_moderator, 'Group Moderator')
# Create a team
cls.team_commentable_id = "team_discussion_id"
cls.team = CourseTeamFactory.create(
name=u'The Only Team',
course_id=cls.course.id,
topic_id='topic_id',
discussion_topic_id=cls.team_commentable_id
)
CourseTeamMembershipFactory.create(team=cls.team, user=cls.student_in_team)
# Dummy commentable ID not linked to a team
cls.course_commentable_id = "course_level_commentable"
# Create cohort and add students to it
CohortFactory(
course_id=cls.course.id,
name='Test Cohort',
users=[cls.group_moderator, cls.cohorted]
)
@patch.dict("django.conf.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(TeamsPermissionsTestCase, self).setUp()
def _setup_mock(self, user, mock_request, data):
user = getattr(self, user)
self._set_mock_request_data(mock_request, data)
self.client.login(username=user.username, password=self.password)
@ddt.data(
# student_in_team will be able to update his own post, regardless of team membership
('student_in_team', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
('student_in_team', 'student_in_team', 'course_commentable_id', 200, CourseDiscussionSettings.NONE),
# students can only update their own posts
('student_in_team', 'moderator', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
# Even though student_not_in_team is not in the team, he can still modify posts he created while in the team.
('student_not_in_team', 'student_not_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Moderators can change their own posts and other people's posts.
('moderator', 'moderator', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
('moderator', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Group moderator can do operations on commentables within their group if the course is divided
('group_moderator', 'verified', 'course_commentable_id', 200, CourseDiscussionSettings.ENROLLMENT_TRACK),
('group_moderator', 'cohorted', 'course_commentable_id', 200, CourseDiscussionSettings.COHORT),
# Group moderators cannot do operations on commentables outside of their group
('group_moderator', 'verified', 'course_commentable_id', 401, CourseDiscussionSettings.COHORT),
('group_moderator', 'cohorted', 'course_commentable_id', 401, CourseDiscussionSettings.ENROLLMENT_TRACK),
# Group moderators cannot do operations when the course is not divided
('group_moderator', 'verified', 'course_commentable_id', 401, CourseDiscussionSettings.NONE),
('group_moderator', 'cohorted', 'course_commentable_id', 401, CourseDiscussionSettings.NONE)
)
@ddt.unpack
def test_update_thread(self, user, thread_author, commentable_id, status_code, division_scheme, mock_request):
"""
Verify that update_thread is limited to thread authors and privileged users (team membership does not matter).
"""
self.change_divided_discussion_settings(division_scheme)
commentable_id = getattr(self, commentable_id)
# thread_author is who is marked as the author of the thread being updated.
thread_author = getattr(self, thread_author)
self._setup_mock(
user, mock_request, # user is the person making the request.
{
"user_id": str(thread_author.id),
"closed": False, "commentable_id": commentable_id,
"context": "standalone",
"username": thread_author.username,
"course_id": unicode(self.course.id)
}
)
response = self.client.post(
reverse(
"update_thread",
kwargs={
"course_id": unicode(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo", "commentable_id": commentable_id}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(
# Students can delete their own posts
('student_in_team', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Moderators can delete any post
('moderator', 'student_in_team', 'team_commentable_id', 200, CourseDiscussionSettings.NONE),
# Others cannot delete posts
('student_in_team', 'moderator', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
('student_not_in_team', 'student_in_team', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
# Group moderator can do operations on commentables within their group if the course is divided
('group_moderator', 'verified', 'team_commentable_id', 200, CourseDiscussionSettings.ENROLLMENT_TRACK),
('group_moderator', 'cohorted', 'team_commentable_id', 200, CourseDiscussionSettings.COHORT),
# Group moderators cannot do operations on commentables outside of their group
('group_moderator', 'verified', 'team_commentable_id', 401, CourseDiscussionSettings.COHORT),
('group_moderator', 'cohorted', 'team_commentable_id', 401, CourseDiscussionSettings.ENROLLMENT_TRACK),
# Group moderators cannot do operations when the course is not divided
('group_moderator', 'verified', 'team_commentable_id', 401, CourseDiscussionSettings.NONE),
('group_moderator', 'cohorted', 'team_commentable_id', 401, CourseDiscussionSettings.NONE)
)
@ddt.unpack
def test_delete_comment(self, user, comment_author, commentable_id, status_code, division_scheme, mock_request):
commentable_id = getattr(self, commentable_id)
comment_author = getattr(self, comment_author)
self.change_divided_discussion_settings(division_scheme)
self._setup_mock(user, mock_request, {
"closed": False,
"commentable_id": commentable_id,
"user_id": str(comment_author.id),
"username": comment_author.username,
"course_id": unicode(self.course.id)
})
response = self.client.post(
reverse(
"delete_comment",
kwargs={
"course_id": unicode(self.course.id),
"comment_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_comment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(user, mock_request, {"closed": False, "commentable_id": commentable_id})
response = self.client.post(
reverse(
"create_comment",
kwargs={
"course_id": unicode(self.course.id),
"thread_id": "dummy"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_sub_comment(self, user, commentable_id, status_code, mock_request):
"""
Verify that create_subcomment is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
response = self.client.post(
reverse(
"create_sub_comment",
kwargs={
"course_id": unicode(self.course.id),
"comment_id": "dummy_comment"
}
),
data={"body": "foo", "title": "foo"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_comment_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting and flagging of comments is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id, "thread_id": "dummy_thread"},
)
for action in ["upvote_comment", "downvote_comment", "un_flag_abuse_for_comment", "flag_abuse_for_comment"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": unicode(self.course.id), "comment_id": "dummy_comment"}
)
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_threads_actions(self, user, commentable_id, status_code, mock_request):
"""
Verify that voting, flagging, and following of threads is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
self._setup_mock(
user, mock_request,
{"closed": False, "commentable_id": commentable_id},
)
for action in ["upvote_thread", "downvote_thread", "un_flag_abuse_for_thread", "flag_abuse_for_thread",
"follow_thread", "unfollow_thread"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": unicode(self.course.id), "thread_id": "dummy_thread"}
)
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_create_thread(self, user, commentable_id, status_code, __):
"""
Verify that creation of threads is limited to members of the team or users with 'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
# mock_request is not used because Commentables don't exist in comment service.
self.client.login(username=getattr(self, user).username, password=self.password)
response = self.client.post(
reverse(
"create_thread",
kwargs={"course_id": unicode(self.course.id), "commentable_id": commentable_id}
),
data={"body": "foo", "title": "foo", "thread_type": "discussion"}
)
self.assertEqual(response.status_code, status_code)
@ddt.data(*ddt_permissions_args)
@ddt.unpack
def test_commentable_actions(self, user, commentable_id, status_code, __):
"""
Verify that following of commentables is limited to members of the team or users with
'edit_content' permission.
"""
commentable_id = getattr(self, commentable_id)
# mock_request is not used because Commentables don't exist in comment service.
self.client.login(username=getattr(self, user).username, password=self.password)
for action in ["follow_commentable", "unfollow_commentable"]:
response = self.client.post(
reverse(
action,
kwargs={"course_id": unicode(self.course.id), "commentable_id": commentable_id}
)
)
self.assertEqual(response.status_code, status_code)
TEAM_COMMENTABLE_ID = 'test-team-discussion'
@attr(shard=2)
@disable_signal(views, 'comment_created')
@ddt.ddt
class ForumEventTestCase(ForumsEnableMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
"""
Forum actions are expected to launch analytics events. Test these here.
"""
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(ForumEventTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(ForumEventTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
cls.student.roles.add(Role.objects.get(name="Student", course_id=cls.course.id))
CourseAccessRoleFactory(course_id=cls.course.id, user=cls.student, role='Wizard')
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def test_thread_created_event(self, __, mock_emit):
request = RequestFactory().post(
"dummy_url", {
"thread_type": "discussion",
"body": "Test text",
"title": "Test",
"auto_subscribe": True
}
)
request.user = self.student
request.view_name = "create_thread"
views.create_thread(request, course_id=unicode(self.course.id), commentable_id="test_commentable")
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.thread.created')
self.assertEqual(event['body'], 'Test text')
self.assertEqual(event['title'], 'Test')
self.assertEqual(event['commentable_id'], 'test_commentable')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['options']['followed'], True)
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['anonymous'], False)
self.assertEqual(event['group_id'], None)
self.assertEqual(event['thread_type'], 'discussion')
self.assertEquals(event['anonymous_to_peers'], False)
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def test_response_event(self, mock_request, mock_emit):
"""
Check to make sure an event is fired when a user responds to a thread.
"""
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
"closed": False,
"commentable_id": 'test_commentable_id',
'thread_id': 'test_thread_id',
})
request = RequestFactory().post("dummy_url", {"body": "Test comment", 'auto_subscribe': True})
request.user = self.student
request.view_name = "create_comment"
views.create_comment(request, course_id=unicode(self.course.id), thread_id='test_thread_id')
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.response.created')
self.assertEqual(event['body'], "Test comment")
self.assertEqual(event['commentable_id'], 'test_commentable_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['options']['followed'], True)
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def test_comment_event(self, mock_request, mock_emit):
"""
Ensure an event is fired when someone comments on a response.
"""
self._set_mock_request_data(mock_request, {
"closed": False,
"depth": 1,
"thread_id": "test_thread_id",
"commentable_id": "test_commentable_id",
"parent_id": "test_response_id"
})
request = RequestFactory().post("dummy_url", {"body": "Another comment"})
request.user = self.student
request.view_name = "create_sub_comment"
views.create_sub_comment(request, course_id=unicode(self.course.id), comment_id="dummy_comment_id")
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, "edx.forum.comment.created")
self.assertEqual(event['body'], 'Another comment')
self.assertEqual(event['discussion']['id'], 'test_thread_id')
self.assertEqual(event['response']['id'], 'test_response_id')
self.assertEqual(event['user_forums_roles'], ['Student'])
self.assertEqual(event['user_course_roles'], ['Wizard'])
self.assertEqual(event['options']['followed'], False)
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
@ddt.data((
'create_thread',
'edx.forum.thread.created', {
'thread_type': 'discussion',
'body': 'Test text',
'title': 'Test',
'auto_subscribe': True
},
{'commentable_id': TEAM_COMMENTABLE_ID}
), (
'create_comment',
'edx.forum.response.created',
{'body': 'Test comment', 'auto_subscribe': True},
{'thread_id': 'test_thread_id'}
), (
'create_sub_comment',
'edx.forum.comment.created',
{'body': 'Another comment'},
{'comment_id': 'dummy_comment_id'}
))
@ddt.unpack
@pytest.mark.django111_expected_failure
def test_team_events(self, view_name, event_name, view_data, view_kwargs, mock_request, mock_emit):
user = self.student
team = CourseTeamFactory.create(discussion_topic_id=TEAM_COMMENTABLE_ID)
CourseTeamMembershipFactory.create(team=team, user=user)
mock_request.return_value.status_code = 200
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': TEAM_COMMENTABLE_ID,
'thread_id': 'test_thread_id',
})
request = RequestFactory().post('dummy_url', view_data)
request.user = user
request.view_name = view_name
getattr(views, view_name)(request, course_id=unicode(self.course.id), **view_kwargs)
name, event = mock_emit.call_args[0]
self.assertEqual(name, event_name)
self.assertEqual(event['team_id'], team.team_id)
@ddt.data(
('vote_for_thread', 'thread_id', 'thread'),
('undo_vote_for_thread', 'thread_id', 'thread'),
('vote_for_comment', 'comment_id', 'response'),
('undo_vote_for_comment', 'comment_id', 'response'),
)
@ddt.unpack
@patch('eventtracking.tracker.emit')
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def test_thread_voted_event(self, view_name, obj_id_name, obj_type, mock_request, mock_emit):
undo = view_name.startswith('undo')
self._set_mock_request_data(mock_request, {
'closed': False,
'commentable_id': 'test_commentable_id',
'username': 'gumprecht',
})
request = RequestFactory().post('dummy_url', {})
request.user = self.student
request.view_name = view_name
view_function = getattr(views, view_name)
kwargs = dict(course_id=unicode(self.course.id))
kwargs[obj_id_name] = obj_id_name
if not undo:
kwargs.update(value='up')
view_function(request, **kwargs)
self.assertTrue(mock_emit.called)
event_name, event = mock_emit.call_args[0]
self.assertEqual(event_name, 'edx.forum.{}.voted'.format(obj_type))
self.assertEqual(event['target_username'], 'gumprecht')
self.assertEqual(event['undo_vote'], undo)
self.assertEqual(event['vote_value'], 'up')
@attr(shard=2)
class UsersEndpointTestCase(ForumsEnableMixin, SharedModuleStoreTestCase, MockRequestSetupMixin):
@classmethod
def setUpClass(cls):
# pylint: disable=super-method-not-called
with super(UsersEndpointTestCase, cls).setUpClassAndTestData():
cls.course = CourseFactory.create()
@classmethod
def setUpTestData(cls):
super(UsersEndpointTestCase, cls).setUpTestData()
seed_permissions_roles(cls.course.id)
cls.student = UserFactory.create()
cls.enrollment = CourseEnrollmentFactory(user=cls.student, course_id=cls.course.id)
cls.other_user = UserFactory.create(username="other")
CourseEnrollmentFactory(user=cls.other_user, course_id=cls.course.id)
def set_post_counts(self, mock_request, threads_count=1, comments_count=1):
"""
sets up a mock response from the comments service for getting post counts for our other_user
"""
self._set_mock_request_data(mock_request, {
"threads_count": threads_count,
"comments_count": comments_count,
})
def make_request(self, method='get', course_id=None, **kwargs):
course_id = course_id or self.course.id
request = getattr(RequestFactory(), method)("dummy_url", kwargs)
request.user = self.student
request.view_name = "users"
return views.users(request, course_id=course_id.to_deprecated_string())
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def test_finds_exact_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(
json.loads(response.content)["users"],
[{"id": self.other_user.id, "username": self.other_user.username}]
)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def test_finds_no_match(self, mock_request):
self.set_post_counts(mock_request)
response = self.make_request(username="othor")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)["users"], [])
def test_requires_GET(self):
response = self.make_request(method='post', username="other")
self.assertEqual(response.status_code, 405)
def test_requires_username_param(self):
response = self.make_request()
self.assertEqual(response.status_code, 400)
content = json.loads(response.content)
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_course_does_not_exist(self):
course_id = CourseKey.from_string("does/not/exist")
response = self.make_request(course_id=course_id, username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content)
self.assertIn("errors", content)
self.assertNotIn("users", content)
def test_requires_requestor_enrolled_in_course(self):
# unenroll self.student from the course.
self.enrollment.delete()
response = self.make_request(username="other")
self.assertEqual(response.status_code, 404)
content = json.loads(response.content)
self.assertIn("errors", content)
self.assertNotIn("users", content)
@patch('lms.lib.comment_client.utils.requests.request', autospec=True)
def test_requires_matched_user_has_forum_content(self, mock_request):
self.set_post_counts(mock_request, 0, 0)
response = self.make_request(username="other")
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content)["users"], [])
@ddt.ddt
class SegmentIOForumThreadViewedEventTestCase(SegmentIOTrackingTestCaseBase):
def _raise_navigation_event(self, label, include_name):
middleware = TrackMiddleware()
kwargs = {'label': label}
if include_name:
kwargs['name'] = 'edx.bi.app.navigation.screen'
else:
kwargs['exclude_name'] = True
request = self.create_request(
data=self.create_segmentio_event_json(**kwargs),
content_type='application/json',
)
User.objects.create(pk=SEGMENTIO_TEST_USER_ID, username=str(mock.sentinel.username))
middleware.process_request(request)
try:
response = segmentio.segmentio_event(request)
self.assertEquals(response.status_code, 200)
finally:
middleware.process_response(request, None)
@ddt.data(True, False)
def test_thread_viewed(self, include_name):
"""
Tests that a SegmentIO thread viewed event is accepted and transformed.
Only tests that the transformation happens at all; does not
comprehensively test that it happens correctly.
ForumThreadViewedEventTransformerTestCase tests for correctness.
"""
self._raise_navigation_event('Forum: View Thread', include_name)
event = self.get_event()
self.assertEqual(event['name'], 'edx.forum.thread.viewed')
self.assertEqual(event['event_type'], event['name'])
@ddt.data(True, False)
def test_non_thread_viewed(self, include_name):
"""
Tests that other BI events are thrown out.
"""
self._raise_navigation_event('Forum: Create Thread', include_name)
self.assert_no_events_emitted()
def _get_transformed_event(input_event):
transformer = ForumThreadViewedEventTransformer(**input_event)
transformer.transform()
return transformer
def _create_event(
label='Forum: View Thread',
include_context=True,
inner_context=None,
username=None,
course_id=None,
**event_data
):
result = {'name': 'edx.bi.app.navigation.screen'}
if include_context:
result['context'] = {'label': label}
if course_id:
result['context']['course_id'] = str(course_id)
if username:
result['username'] = username
if event_data:
result['event'] = event_data
if inner_context:
if not event_data:
result['event'] = {}
result['event']['context'] = inner_context
return result
def _create_and_transform_event(**kwargs):
event = _create_event(**kwargs)
return event, _get_transformed_event(event)
@ddt.ddt
class ForumThreadViewedEventTransformerTestCase(ForumsEnableMixin, UrlResetMixin, ModuleStoreTestCase):
"""
Test that the ForumThreadViewedEventTransformer transforms events correctly
and without raising exceptions.
Because the events passed through the transformer can come from external
sources (e.g., a mobile app), we carefully test a myriad of cases, including
those with incomplete and malformed events.
"""
CATEGORY_ID = 'i4x-edx-discussion-id'
CATEGORY_NAME = 'Discussion 1'
PARENT_CATEGORY_NAME = 'Chapter 1'
TEAM_CATEGORY_ID = 'i4x-edx-team-discussion-id'
TEAM_CATEGORY_NAME = 'Team Chat'
TEAM_PARENT_CATEGORY_NAME = PARENT_CATEGORY_NAME
DUMMY_CATEGORY_ID = 'i4x-edx-dummy-commentable-id'
DUMMY_THREAD_ID = 'dummy_thread_id'
@mock.patch.dict("student.models.settings.FEATURES", {"ENABLE_DISCUSSION_SERVICE": True})
def setUp(self):
super(ForumThreadViewedEventTransformerTestCase, self).setUp()
self.courses_by_store = {
ModuleStoreEnum.Type.mongo: CourseFactory.create(
org='TestX',
course='TR-101',
run='Event_Transform_Test',
default_store=ModuleStoreEnum.Type.mongo,
),
ModuleStoreEnum.Type.split: CourseFactory.create(
org='TestX',
course='TR-101S',
run='Event_Transform_Test_Split',
default_store=ModuleStoreEnum.Type.split,
),
}
self.course = self.courses_by_store['mongo']
self.student = UserFactory.create()
self.staff = UserFactory.create(is_staff=True)
UserBasedRole(user=self.staff, role=CourseStaffRole.ROLE).add_course(self.course.id)
CourseEnrollmentFactory.create(user=self.student, course_id=self.course.id)
self.category = ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id=self.CATEGORY_ID,
discussion_category=self.PARENT_CATEGORY_NAME,
discussion_target=self.CATEGORY_NAME,
)
self.team_category = ItemFactory.create(
parent_location=self.course.location,
category='discussion',
discussion_id=self.TEAM_CATEGORY_ID,
discussion_category=self.TEAM_PARENT_CATEGORY_NAME,
discussion_target=self.TEAM_CATEGORY_NAME,
)
self.team = CourseTeamFactory.create(
name='Team 1',
course_id=self.course.id,
topic_id='arbitrary-topic-id',
discussion_topic_id=self.team_category.discussion_id,
)
def test_missing_context(self):
event = _create_event(include_context=False)
with self.assertRaises(EventEmissionExit):
_get_transformed_event(event)
def test_no_data(self):
event, event_trans = _create_and_transform_event()
event['name'] = 'edx.forum.thread.viewed'
event['event_type'] = event['name']
event['event'] = {}
self.assertDictEqual(event_trans, event)
def test_inner_context(self):
_, event_trans = _create_and_transform_event(inner_context={})
self.assertNotIn('context', event_trans['event'])
def test_non_thread_view(self):
event = _create_event(
label='Forum: Create Thread',
course_id=self.course.id,
topic_id=self.DUMMY_CATEGORY_ID,
thread_id=self.DUMMY_THREAD_ID,
)
with self.assertRaises(EventEmissionExit):
_get_transformed_event(event)
def test_bad_field_types(self):
event, event_trans = _create_and_transform_event(
course_id={},
topic_id=3,
thread_id=object(),
action=3.14,
)
event['name'] = 'edx.forum.thread.viewed'
event['event_type'] = event['name']
self.assertDictEqual(event_trans, event)
def test_bad_course_id(self):
event, event_trans = _create_and_transform_event(course_id='non-existent-course-id')
event_data = event_trans['event']
self.assertNotIn('category_id', event_data)
self.assertNotIn('category_name', event_data)
self.assertNotIn('url', event_data)
self.assertNotIn('user_forums_roles', event_data)
self.assertNotIn('user_course_roles', event_data)
def test_bad_username(self):
event, event_trans = _create_and_transform_event(username='non-existent-username')
event_data = event_trans['event']
self.assertNotIn('category_id', event_data)
self.assertNotIn('category_name', event_data)
self.assertNotIn('user_forums_roles', event_data)
self.assertNotIn('user_course_roles', event_data)
def test_bad_url(self):
event, event_trans = _create_and_transform_event(
course_id=self.course.id,
topic_id='malformed/commentable/id',
thread_id='malformed/thread/id',
)
self.assertNotIn('url', event_trans['event'])
def test_renamed_fields(self):
AUTHOR = 'joe-the-plumber'
event, event_trans = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.DUMMY_CATEGORY_ID,
thread_id=self.DUMMY_THREAD_ID,
author=AUTHOR,
)
self.assertEqual(event_trans['event']['commentable_id'], self.DUMMY_CATEGORY_ID)
self.assertEqual(event_trans['event']['id'], self.DUMMY_THREAD_ID)
self.assertEqual(event_trans['event']['target_username'], AUTHOR)
def test_titles(self):
# No title
_, event_1_trans = _create_and_transform_event()
self.assertNotIn('title', event_1_trans['event'])
self.assertNotIn('title_truncated', event_1_trans['event'])
# Short title
_, event_2_trans = _create_and_transform_event(
action='!',
)
self.assertIn('title', event_2_trans['event'])
self.assertIn('title_truncated', event_2_trans['event'])
self.assertFalse(event_2_trans['event']['title_truncated'])
# Long title
_, event_3_trans = _create_and_transform_event(
action=('covfefe' * 200),
)
self.assertIn('title', event_3_trans['event'])
self.assertIn('title_truncated', event_3_trans['event'])
self.assertTrue(event_3_trans['event']['title_truncated'])
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_urls(self, store):
course = self.courses_by_store[store]
commentable_id = self.DUMMY_CATEGORY_ID
thread_id = self.DUMMY_THREAD_ID
_, event_trans = _create_and_transform_event(
course_id=course.id,
topic_id=commentable_id,
thread_id=thread_id,
)
expected_path = '/courses/{0}/discussion/forum/{1}/threads/{2}'.format(
course.id, commentable_id, thread_id
)
self.assertTrue(event_trans['event'].get('url').endswith(expected_path))
def test_categories(self):
# Bad category
_, event_trans_1 = _create_and_transform_event(
username=self.student.username,
course_id=self.course.id,
topic_id='non-existent-category-id',
)
self.assertNotIn('category_id', event_trans_1['event'])
self.assertNotIn('category_name', event_trans_1['event'])
# Good category
_, event_trans_2 = _create_and_transform_event(
username=self.student.username,
course_id=self.course.id,
topic_id=self.category.discussion_id,
)
self.assertEqual(event_trans_2['event'].get('category_id'), self.category.discussion_id)
full_category_name = '{0} / {1}'.format(self.category.discussion_category, self.category.discussion_target)
self.assertEqual(event_trans_2['event'].get('category_name'), full_category_name)
def test_roles(self):
# No user
_, event_trans_1 = _create_and_transform_event(
course_id=self.course.id,
)
self.assertNotIn('user_forums_roles', event_trans_1['event'])
self.assertNotIn('user_course_roles', event_trans_1['event'])
# Student user
_, event_trans_2 = _create_and_transform_event(
course_id=self.course.id,
username=self.student.username,
)
self.assertEqual(event_trans_2['event'].get('user_forums_roles'), [FORUM_ROLE_STUDENT])
self.assertEqual(event_trans_2['event'].get('user_course_roles'), [])
# Course staff user
_, event_trans_3 = _create_and_transform_event(
course_id=self.course.id,
username=self.staff.username,
)
self.assertEqual(event_trans_3['event'].get('user_forums_roles'), [])
self.assertEqual(event_trans_3['event'].get('user_course_roles'), [CourseStaffRole.ROLE])
def test_teams(self):
# No category
_, event_trans_1 = _create_and_transform_event(
course_id=self.course.id,
)
self.assertNotIn('team_id', event_trans_1)
# Non-team category
_, event_trans_2 = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.CATEGORY_ID,
)
self.assertNotIn('team_id', event_trans_2)
# Team category
_, event_trans_3 = _create_and_transform_event(
course_id=self.course.id,
topic_id=self.TEAM_CATEGORY_ID,
)
self.assertEqual(event_trans_3['event'].get('team_id'), self.team.team_id)
| agpl-3.0 |
Catstyle/mongoengine | tests/migration/convert_to_new_inheritance_model.py | 30 | 1533 | # -*- coding: utf-8 -*-
import unittest
from mongoengine import Document, connect
from mongoengine.connection import get_db
from mongoengine.fields import StringField
__all__ = ('ConvertToNewInheritanceModel', )
class ConvertToNewInheritanceModel(unittest.TestCase):
def setUp(self):
connect(db='mongoenginetest')
self.db = get_db()
def tearDown(self):
for collection in self.db.collection_names():
if 'system.' in collection:
continue
self.db.drop_collection(collection)
def test_how_to_convert_to_the_new_inheritance_model(self):
"""Demonstrates migrating from 0.7 to 0.8
"""
# 1. Declaration of the class
class Animal(Document):
name = StringField()
meta = {
'allow_inheritance': True,
'indexes': ['name']
}
# 2. Remove _types
collection = Animal._get_collection()
collection.update({}, {"$unset": {"_types": 1}}, multi=True)
# 3. Confirm extra data is removed
count = collection.find({'_types': {"$exists": True}}).count()
self.assertEqual(0, count)
# 4. Remove indexes
info = collection.index_information()
indexes_to_drop = [key for key, value in info.iteritems()
if '_types' in dict(value['key'])]
for index in indexes_to_drop:
collection.drop_index(index)
# 5. Recreate indexes
Animal.ensure_indexes()
| mit |
nyalldawson/QGIS | tests/src/python/test_authmanager_password_ows.py | 15 | 10673 | # -*- coding: utf-8 -*-
"""
Tests for auth manager WMS/WFS using QGIS Server through HTTP Basic
enabled qgis_wrapped_server.py.
This is an integration test for QGIS Desktop Auth Manager WFS and WMS provider
and QGIS Server WFS/WMS that check if QGIS can use a stored auth manager auth
configuration to access an HTTP Basic protected endpoint.
From build dir, run from test directory:
LC_ALL=en_US.UTF-8 ctest -R PyQgsAuthManagerPasswordOWSTest -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
import os
import sys
import re
import subprocess
import tempfile
import random
import string
import urllib
from functools import partial
__author__ = 'Alessandro Pasotti'
__date__ = '18/09/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
from shutil import rmtree
from utilities import unitTestDataPath, waitServer
from qgis.core import (
QgsApplication,
QgsAuthMethodConfig,
QgsVectorLayer,
QgsRasterLayer,
QgsFileDownloader,
)
from qgis.testing import (
start_app,
unittest,
)
from qgis.PyQt.QtCore import (
QEventLoop,
QUrl,
)
try:
QGIS_SERVER_ENDPOINT_PORT = os.environ['QGIS_SERVER_ENDPOINT_PORT']
except:
QGIS_SERVER_ENDPOINT_PORT = '0' # Auto
QGIS_AUTH_DB_DIR_PATH = tempfile.mkdtemp()
os.environ['QGIS_AUTH_DB_DIR_PATH'] = QGIS_AUTH_DB_DIR_PATH
qgis_app = start_app()
class TestAuthManager(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests:
Creates an auth configuration"""
cls.port = QGIS_SERVER_ENDPOINT_PORT
# Clean env just to be sure
env_vars = ['QUERY_STRING', 'QGIS_PROJECT_FILE']
for ev in env_vars:
try:
del os.environ[ev]
except KeyError:
pass
cls.testdata_path = unitTestDataPath('qgis_server') + '/'
cls.project_path = cls.testdata_path + "test_project.qgs"
# Enable auth
# os.environ['QGIS_AUTH_PASSWORD_FILE'] = QGIS_AUTH_PASSWORD_FILE
authm = QgsApplication.authManager()
assert (authm.setMasterPassword('masterpassword', True))
cls.auth_config = QgsAuthMethodConfig('Basic')
cls.auth_config.setName('test_auth_config')
cls.username = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
cls.password = cls.username[::-1] # reversed
cls.auth_config.setConfig('username', cls.username)
cls.auth_config.setConfig('password', cls.password)
assert (authm.storeAuthenticationConfig(cls.auth_config)[0])
cls.hostname = '127.0.0.1'
cls.protocol = 'http'
os.environ['QGIS_SERVER_HTTP_BASIC_AUTH'] = '1'
os.environ['QGIS_SERVER_USERNAME'] = cls.username
os.environ['QGIS_SERVER_PASSWORD'] = cls.password
os.environ['QGIS_SERVER_PORT'] = str(cls.port)
os.environ['QGIS_SERVER_HOST'] = cls.hostname
server_path = os.path.dirname(os.path.realpath(__file__)) + \
'/qgis_wrapped_server.py'
cls.server = subprocess.Popen([sys.executable, server_path],
env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
line = cls.server.stdout.readline()
cls.port = int(re.findall(b':(\d+)', line)[0])
assert cls.port != 0
# Wait for the server process to start
assert waitServer('%s://%s:%s' % (cls.protocol, cls.hostname, cls.port)), "Server is not responding! %s://%s:%s" % (cls.protocol, cls.hostname, cls.port)
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
cls.server.terminate()
rmtree(QGIS_AUTH_DB_DIR_PATH)
del cls.server
def setUp(self):
"""Run before each test."""
pass
def tearDown(self):
"""Run after each test."""
pass
@classmethod
def _getWFSLayer(cls, type_name, layer_name=None, authcfg=None):
"""
WFS layer factory
"""
if layer_name is None:
layer_name = 'wfs_' + type_name
parms = {
'srsname': 'EPSG:4326',
'typename': type_name,
'url': '%s://%s:%s/?map=%s' % (cls.protocol, cls.hostname, cls.port, cls.project_path),
'version': 'auto',
'table': '',
}
if authcfg is not None:
parms.update({'authcfg': authcfg})
uri = ' '.join([("%s='%s'" % (k, v)) for k, v in list(parms.items())])
wfs_layer = QgsVectorLayer(uri, layer_name, 'WFS')
return wfs_layer
@classmethod
def _getWMSLayer(cls, layers, layer_name=None, authcfg=None):
"""
WMS layer factory
"""
if layer_name is None:
layer_name = 'wms_' + layers.replace(',', '')
parms = {
'crs': 'EPSG:4326',
'url': '%s://%s:%s/?map=%s' % (cls.protocol, cls.hostname, cls.port, cls.project_path),
# This is needed because of a really weird implementation in QGIS Server, that
# replaces _ in the the real layer name with spaces
'layers': urllib.parse.quote(layers.replace('_', ' ')),
'styles': '',
'version': 'auto',
#'sql': '',
}
if authcfg is not None:
parms.update({'authcfg': authcfg})
uri = '&'.join([("%s=%s" % (k, v.replace('=', '%3D'))) for k, v in list(parms.items())])
wms_layer = QgsRasterLayer(uri, layer_name, 'wms')
return wms_layer
@classmethod
def _getGeoJsonLayer(cls, type_name, layer_name=None, authcfg=None):
"""
OGR layer factory
"""
if layer_name is None:
layer_name = 'geojson_' + type_name
uri = '%s://%s:%s/?MAP=%s&SERVICE=WFS&REQUEST=GetFeature&TYPENAME=%s&VERSION=2.0.0&OUTPUTFORMAT=geojson' % (cls.protocol, cls.hostname, cls.port, cls.project_path, urllib.parse.quote(type_name))
if authcfg is not None:
uri += " authcfg='%s'" % authcfg
geojson_layer = QgsVectorLayer(uri, layer_name, 'ogr')
return geojson_layer
def testValidAuthAccess(self):
"""
Access the HTTP Basic protected layer with valid credentials
"""
wfs_layer = self._getWFSLayer('testlayer_èé', authcfg=self.auth_config.id())
self.assertTrue(wfs_layer.isValid())
wms_layer = self._getWMSLayer('testlayer_èé', authcfg=self.auth_config.id())
self.assertTrue(wms_layer.isValid())
geojson_layer = self._getGeoJsonLayer('testlayer_èé', authcfg=self.auth_config.id())
self.assertTrue(geojson_layer.isValid())
def testInvalidAuthAccess(self):
"""
Access the HTTP Basic protected layer with no credentials
"""
wfs_layer = self._getWFSLayer('testlayer èé')
self.assertFalse(wfs_layer.isValid())
wms_layer = self._getWMSLayer('testlayer_èé')
self.assertFalse(wms_layer.isValid())
geojson_layer = self._getGeoJsonLayer('testlayer_èé')
self.assertFalse(geojson_layer.isValid())
def testInvalidAuthFileDownload(self):
"""
Download a protected map tile without authcfg
"""
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.project_path),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "testlayer_èé".replace('_', '%20'),
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
url = '%s://%s:%s/%s' % (self.protocol, self.hostname, self.port, qs)
destination = tempfile.mktemp()
loop = QEventLoop()
downloader = QgsFileDownloader(QUrl(url), destination, None, False)
downloader.downloadCompleted.connect(partial(self._set_slot, 'completed'))
downloader.downloadExited.connect(partial(self._set_slot, 'exited'))
downloader.downloadCanceled.connect(partial(self._set_slot, 'canceled'))
downloader.downloadError.connect(partial(self._set_slot, 'error'))
downloader.downloadProgress.connect(partial(self._set_slot, 'progress'))
downloader.downloadExited.connect(loop.quit)
loop.exec_()
self.assertTrue(self.error_was_called)
self.assertTrue("Download failed: Host requires authentication" in str(self.error_args), "Error args is: %s" % str(self.error_args))
def testValidAuthFileDownload(self):
"""
Download a map tile with valid authcfg
"""
qs = "?" + "&".join(["%s=%s" % i for i in list({
"MAP": urllib.parse.quote(self.project_path),
"SERVICE": "WMS",
"VERSION": "1.1.1",
"REQUEST": "GetMap",
"LAYERS": "testlayer_èé".replace('_', '%20'),
"STYLES": "",
"FORMAT": "image/png",
"BBOX": "-16817707,-4710778,5696513,14587125",
"HEIGHT": "500",
"WIDTH": "500",
"CRS": "EPSG:3857"
}.items())])
url = '%s://%s:%s/%s' % (self.protocol, self.hostname, self.port, qs)
destination = tempfile.mktemp()
loop = QEventLoop()
downloader = QgsFileDownloader(QUrl(url), destination, self.auth_config.id(), False)
downloader.downloadCompleted.connect(partial(self._set_slot, 'completed'))
downloader.downloadExited.connect(partial(self._set_slot, 'exited'))
downloader.downloadCanceled.connect(partial(self._set_slot, 'canceled'))
downloader.downloadError.connect(partial(self._set_slot, 'error'))
downloader.downloadProgress.connect(partial(self._set_slot, 'progress'))
downloader.downloadExited.connect(loop.quit)
loop.exec_()
# Check the we've got a likely PNG image
self.assertTrue(self.completed_was_called)
self.assertTrue(os.path.getsize(destination) > 2000, "Image size: %s" % os.path.getsize(destination)) # > 1MB
with open(destination, 'rb') as f:
self.assertTrue(b'PNG' in f.read()) # is a PNG
def _set_slot(self, *args, **kwargs):
#print('_set_slot(%s) called' % args[0])
setattr(self, args[0] + '_was_called', True)
setattr(self, args[0] + '_args', args)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
vschw/smartmeter_release | odroidw_meter.py | 1 | 10958 | #! /usr/bin/env python
# Filename: odroidw_meter.py
# -*- coding: utf-8 -*-
"""Smart meter application module for Odroid W.
Setup Description
#################
This application module utilizes the external 12-bit MCP3208 ADC connected
to the Odroid W via SPI to be used as a real-time smart-power-meter. Current
clamps in series with burden resistors are connected to the input channels of
the MCP3208. The MCP3208 is connected to the GPIO port of the W using the
following configuration:
MCP3208
+------+
CH0 |1 16| V_DD --> 3V3 (Pin 1)
CH1 |2 15| V_REF --> 3V3 (Pin 1)
CH2 |3 14| AGND --> GND (Pin 6)
CH3 |4 13| CLK --> SCLK (Pin 23)
CH4 |5 12| D_OUT --> MISO (Pin 21)
CH5 |6 11| D_IN --> MOSI (Pin 19)
CH6 |7 10| CS/SHDN --> CE0 (Pin 26)
CH7 |8 9| DGND --> GND (Pin 6)
+------+
Method of Operation
###################
Current transducers (clamps) are used to measure AC currents. An AC current in
the primary winding of the transducer produces an alternating magnetic field
in the core, which then induces an alternating current in the secondary
winding circuit. A burdon resistor, which is connected in series within this
circuit, converts the current into a proportional voltage. This voltage is
measured by the ADC and send via SPI to the W.
The goal is to identify the amplitude of each phase as precisely as possible
to determine the power consumption in 60Hz intervals (main frequency).
Assuming a constant grid voltage, current can be translated into power.
The amplitude of each phase is identified, measured, and submitted to a remote
server. Assuming a constant grid voltage, the current is translated to power,
the data is communicated to the server and saved in a database. This package
includes two possible options for data communication:
1. send_http_get() sends data via http-get requests. A server-side scripts
listens to events and writes the data into a local database.
2. ssh_to_db() establishes a ssh connection to the server and uses paramiko
to directly write to the database.
This module makes use of threading to enable data collection while data is
communicated.
Dependencies
############
Refer to setup_instructions.rst for dependencies and installation
instructions.
Module
######
Attributes:
average (list): dynamic storage space for current amplitudes
"""
import spidev
import wiringpi2
import thread
import time
import datetime
import threading
from threading import Thread, enumerate
from urllib import urlopen
from time import sleep
import pygame
import os
import numpy
import paramiko
import ConfigParser
import socket
__author__ = "vschw"
__copyright__ = "Copyright 2015, University of Hawaii at Manoa"
__credits__ = ["Reza Ghorbani"]
__license__ = "GPL"
__version__ = "0.1"
__maintainer__ = "vschw"
__email__ = "volkers@hawaii.edu"
__status__ = "alpha"
average = [], []
UPDATE_INTERVAL = 0.0001
def init_wiringpi2():
"""Enables wiringPi2 on Odroid W.
This function enables access to all GPIOs of the W.
Furthermore, both analog (ADC) ports are opened.
"""
wiringpi2.wiringPiSetup()
print 'wiringPi loaded'
pass
def init_spidev():
"""Enables spidev on Odroid W.
This function enables access SPI on the W utilizing
spidev0.0 from the kernel driver.
"""
global spi
spi = spidev.SpiDev()
spi.open(0, 1)
print 'SPI initialized'
def init_tft():
"""Enables TFT screen on fb1.
"""
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ['SDL_VIDEODRIVER']="fbcon"
global screen
pygame.init()
pygame.mouse.set_visible(0)
size = width, height = 320, 240
screen = pygame.display.set_mode(size)
print 'TFT screen initialized'
def init_config():
"""Loads configuration data from config.ini
"""
global ip, usr, key_path, db_name, node
config = ConfigParser.ConfigParser()
config.read('config.ini')
ip = config.get('ssh_login', 'ip')
usr = config.get('ssh_login', 'username')
key_path = config.get('ssh_login', 'key_path')
db_name = config.get('db_login', 'db_name')
node = config.get('device_info', 'node')
def init_ssh():
"""SSH to the server using paramiko.
"""
global ssh
init_config()
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ip, username=usr, key_filename=key_path)
print 'SSH Connection established'
def bit_to_power(bit, **kwargs):
"""Converts ADC bits to power in Watt
Args:
bit (int): Input from ADC port in bit.
**kwargs: ADC precision and power conversion factor (int).
Returns:
Power in Watt (int).
"""
conversion = kwargs.get('conversion', 3400)
precision = kwargs.get('precision', 4096)
return [int(bit[0] * conversion / precision), int(bit[1] * conversion / precision)]
def average_bit():
"""Returns average bit value of current 'average'-list.
Returns:
Average of "average" list (float).
"""
global average
if average[0] and average[1]:
return [numpy.median(average[0]), numpy.median(average[1])]
elif average[0] and not average[1]:
return [numpy.median(average[0]), 0]
elif average[1] and not average[0]:
return [0, numpy.median(average[1])]
else:
return [0, 0]
def send_http_get(power, sleeptime):
"""Submits http get request to server.
"""
global average
timenow = int(datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')[:-4])
sites = ['http://redlab.colo.hawaii.edu/dirgtrams+'+str(power[0])+'+'+str(power[1])+'+'+str(timenow)]
print sites
average = [], []
multi_get(sites, timeout=1)
time.sleep(sleeptime)
print 'data submitted: ' + str(power) + 'W, list length: ' + str(len(average[0]))
display_power(power)
def ssh_to_db(power, sleeptime):
"""Uses Paramiko to ssh to server and insert to mongoDB.
"""
global average
timenow = int(time.time())
phase_data_1 = str(average[0]).replace('[','').replace(']','').replace(' ','')
phase_data_2 = str(average[1]).replace('[','').replace(']','').replace(' ','')
try:
ssh.exec_command('mongo '+db_name+' --eval "db.nodes.insert({'
'node: '+node+''
', timestamp: '+str(timenow)+''
', variable: \'circuit_1\''
', value: '+str(power[0])+''
', raw_adc_data: \''+phase_data_1+'\'})"')
ssh.exec_command('mongo '+db_name+' --eval "db.nodes.insert({'
'node: '+node+''
', timestamp: '+str(timenow)+''
', variable: \'circuit_2\''
', value: '+str(power[1])+''
', raw_adc_data: \''+phase_data_2+'\'})"')
except socket.error as e:
init_ssh()
average = [], []
time.sleep(sleeptime)
print 'data sent: '+str(power[0])+'W and '+str(power[1])+'W'
display_power(power)
def submit_data_thread(d, **kwargs):
"""Initializes power data submission.
"""
conversion = kwargs.get('conversion', 3400)
precision = kwargs.get('precision', 4096)
sleeptime = kwargs.get('sleeptime', 0.5)
while True:
power = bit_to_power(average_bit(), precision=precision, conversion=conversion)
#send_http_get(power, sleeptime)
ssh_to_db(power, sleeptime)
def adcread_W(channel, **kwargs):
"""Reads voltage signal of on-board Odroid W ADC.
"""
adc_port = kwargs.get('adc_port', 1)
sig = [0, 0, 0, 0]
while True:
signal = wiringpi2.analogRead(adc_port)
if signal < sig[0] <= sig[1] >= sig[2] > sig[3]:
average[channel].append(signal)
time.sleep(0.01)
sig = [signal] + sig
del sig[-1]
def adcread_MCP3208(channel0, channel1):
"""Reads voltage signal of external MCP3208 ADC.
"""
sig1 = [0, 0, 0, 0]
sig2 = [0, 0, 0, 0]
while True:
r1 = spi.xfer2([4 | 2 | (channel0 >> 2), (channel0 & 3) << 6, 0])
signal1 = ((r1[1] & 15) << 8) + r1[2]
if signal1 < sig1[0] <= sig1[1] >= sig1[2] > sig1[3]:
average[channel0].append(sig1[1])
#time.sleep(0.00001)
#print str(signal1)+' '+str(sig1[0])+' '+str(sig1[1])+' '+str(sig1[2])+' '+str(sig1[3])
r2 = spi.xfer2([4 | 2 | (channel1 >> 2), (channel1 & 3) << 6, 0])
signal2 = ((r2[1] & 15) << 8) + r2[2]
if signal2 < sig2[0] <= sig2[1] >= sig2[2] > sig2[3]:
average[channel1].append(sig2[1])
#time.sleep(0.00001)
#print str(signal2)+' '+str(sig2[0])+' '+str(sig2[1])+' '+str(sig2[2])+' '+str(sig2[3])
sig1 = [signal1] + sig1
del sig1[-1]
sig2 = [signal2] + sig2
del sig2[-1]
def multi_get(uris, timeout=1.0):
"""Enables threading.
"""
def alive_count(lst):
alive = map(lambda x: 1 if x.isAlive() else 0, lst)
return reduce(lambda a, b: a + b, alive)
threads = [ URLThread(uri) for uri in uris ]
for thread in threads:
thread.start()
while alive_count(threads) > 0 and timeout > 0.0:
timeout = timeout - UPDATE_INTERVAL
sleep(UPDATE_INTERVAL)
return [(x.url, x.response) for x in threads ]
def display_power(power):
"""Uses pygame to display real-time power data on LCD screen.
"""
screen.fill((0,0,0))
smallfont = pygame.font.SysFont("Monofonto", 30)
descr = smallfont.render("Current Power Consumption:", 1, (255, 255, 255))
screen.blit(descr, (10, 10))
c1font = pygame.font.SysFont("Monofonto", 35)
c1label = c1font.render("Circuit 1:", 1, (255, 255, 0))
screen.blit(c1label, (40, 45))
myfont = pygame.font.SysFont("Monofonto", 100)
powerlabel1 = myfont.render(str(power[0])+"W", 1, (0, 255, 30))
screen.blit(powerlabel1, (40, 65))
c2label = c1font.render("Circuit 2:", 1, (255, 255, 0))
screen.blit(c2label, (40, 140))
powerlabel2 = myfont.render(str(power[1])+"W", 1, (0, 255, 30))
screen.blit(powerlabel2, (40, 160))
pygame.display.update()
class URLThread(Thread):
def __init__(self, url):
super(URLThread, self).__init__()
self.url = url
self.response = None
def run(self):
self.request = urlopen(self.url)
self.response = self.request.read()
if __name__ == "__main__":
init_wiringpi2()
init_spidev()
init_tft()
init_ssh()
thread.start_new_thread(submit_data_thread, (1,), {'sleeptime': 0.9, 'conversion': 7400})
thread.start_new_thread(adcread_MCP3208(0, 1))
| gpl-2.0 |
GoogleChrome/chromium-dashboard | pages/schedule.py | 1 | 3561 | from __future__ import division
from __future__ import print_function
# -*- coding: utf-8 -*-
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'ericbidelman@chromium.org (Eric Bidelman)'
import json
import logging
import os
from framework import permissions
from framework import ramcache
import requests
# from google.appengine.api import users
from framework import users
from framework import basehandlers
from internals import models
import settings
from internals import fetchchannels
SCHEDULE_CACHE_TIME = 60 * 60 # 1 hour
# TODO(shivamag00): remove these methods and use Channels API instead
def fetch_chrome_release_info(version):
key = 'chromerelease|%s' % version
data = ramcache.get(key)
if data is None:
url = ('https://chromiumdash.appspot.com/fetch_milestone_schedule?'
'mstone=%s' % version)
result = requests.get(url, timeout=60)
if result.status_code == 200:
try:
logging.info('result.content is:\n%s', result.content)
result_json = json.loads(result.content)
if 'mstones' in result_json:
data = result_json['mstones'][0]
del data['owners']
del data['feature_freeze']
del data['ldaps']
ramcache.set(key, data, time=SCHEDULE_CACHE_TIME)
except ValueError:
pass # Handled by next statement
if not data:
data = {
'stable_date': None,
'earliest_beta': None,
'latest_beta': None,
'mstone': version,
'version': version,
}
# Note: we don't put placeholder data into ramcache.
return data
def construct_chrome_channels_details():
omaha_data = fetchchannels.get_omaha_data()
channels = {}
win_versions = omaha_data[0]['versions']
for v in win_versions:
channel = v['channel']
major_version = int(v['version'].split('.')[0])
channels[channel] = fetch_chrome_release_info(major_version)
channels[channel]['version'] = major_version
# Adjust for the brief period after next miletone gets promted to stable/beta
# channel and their major versions are the same.
if channels['stable']['version'] == channels['beta']['version']:
new_beta_version = channels['stable']['version'] + 1
channels['beta'] = fetch_chrome_release_info(new_beta_version)
channels['beta']['version'] = new_beta_version
new_dev_version = channels['beta']['version'] + 1
channels['dev'] = fetch_chrome_release_info(new_dev_version)
channels['dev']['version'] = new_dev_version
return channels
class ScheduleHandler(basehandlers.FlaskHandler):
TEMPLATE_PATH = 'schedule.html'
# TODO(shivamag00): fetch data from Channels API and Features API using JS instead of passing it here
def get_template_data(self):
template_data = {
'channels': json.dumps(construct_chrome_channels_details(),
indent=4)
}
return template_data
app = basehandlers.FlaskApplication([
('/features/schedule', ScheduleHandler),
], debug=settings.DEBUG)
| apache-2.0 |
zstackorg/zstack-woodpecker | integrationtest/vm/multihosts/volumes/test_volumes_robot_2h_wfairly.py | 4 | 2395 | '''
Robot testing for test volume operations for 2 hours. Will use weight fairly
strategy.
@author: Youyk
'''
import zstackwoodpecker.action_select as action_select
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.header.vm as vm_header
import time
_config_ = {
'timeout' : 9000,
'noparallel' : False
}
test_stub = test_lib.lib_get_test_stub()
test_dict = test_state.TestStateDict()
def test():
test_util.test_dsc('''
Will doing random test operations, including vm create/stop/start/reboot
/destroy, volume create/attach/detach/delete. It doesn't include SG
VIP and snapshots operations. If reach max 4 coexisting running vm,
testing will success and quit.
''')
target_running_vm = 4
test_util.test_dsc('Random Test Begin. Test target: 4 coexisting running VM (not include VR).')
robot_test_obj = test_util.Robot_Test_Object()
robot_test_obj.set_test_dict(test_dict)
robot_test_obj.set_exclusive_actions_list(\
test_state.TestAction.sg_actions \
+ test_state.TestAction.vip_actions \
+ test_state.TestAction.snapshot_actions)
priority_actions = test_state.TestAction.volume_actions * 2
priority_action_obj = action_select.ActionPriority()
priority_action_obj.add_priority_action_list(priority_actions)
robot_test_obj.set_priority_actions(priority_action_obj)
robot_test_obj.set_random_type(action_select.weight_fair_strategy)
rounds = 1
current_time = time.time()
timeout_time = current_time + 7200
while time.time() <= timeout_time:
print "test_dict: %s" % test_dict
test_util.test_dsc('New round %s starts: random operation pickup.' % rounds)
test_lib.lib_vm_random_operation(robot_test_obj)
test_util.test_dsc('Round %s finished. Begin status checking.' % rounds)
rounds += 1
test_lib.lib_robot_status_check(test_dict)
test_util.test_dsc('Reach test pass exit criterial.')
test_lib.lib_robot_cleanup(test_dict)
test_util.test_pass('Create random VM Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_cleanup(test_dict)
| apache-2.0 |
alex-dot/upwdchg | setup.py | 2 | 3635 | #!/usr/bin/env python
# -*- mode:python; tab-width:4; c-basic-offset:4; intent-tabs-mode:nil; -*-
# ex: filetype=python tabstop=4 softtabstop=4 shiftwidth=4 expandtab autoindent smartindent
# Modules
from distutils.core import setup
import os
# Helpers
def filesInDir( sDirectory ):
__lsfilesInDir = list()
for __sFile in os.listdir( sDirectory ):
__sFile = sDirectory.rstrip( os.sep )+os.sep+__sFile
if os.path.isfile( __sFile ):
__lsfilesInDir.append( __sFile )
return __lsfilesInDir
# Setup
setup(
name = 'upwdpkg',
description = 'Universal Password Changer (UPwdChg)',
long_description = \
"""
The Universal Password Changer (UPwdChg) allows one to synchronize passwords
between multiple and different user directory systems - LDAP, MIT Kerberos,
Microsoft Active Directory, etc. - with an emphasis on flexibility, customiz-
ability and untrusted frontends security.
The Universal Password Changer (UPwdChg) is split in two parts:
- a frontend, running on any user-accessible (untrusted) host, which allows
users to request password changes
- a backend, running on a (trusted) management host, where password change
requests are processed
In order to deal with the lower security of the frontend host, public key
cryptography is used:
- on the frontend, password change requests are encrypted as password
change tokens, using the RSA public key of the processing backend
- password change tokens are saved in a storage location shared between
the frontend and the backend (e.g. NFS, CIFS, SSHFS, rsync, etc.)
- on the backend, password change tokens are decrypted using the backend
RSA private key, and processed through customizable plugins
Password change tokens are actually made of:
- the password change data - request timestamp, username, old and new
passwords - along corresponding SHA-256 digest, encrypted using
AES-256-CBC symetric cipher and base64 encoded
- the symetric cipher key and initialization vector (IV), encrypted with
the supplied RSA public key and base64-encoded
Once decrypted, password change tokens/requests are processed through various
user-customizable plugins:
- validation plugins, checking credentials validity, password policies
compliance, etc.
- actual password change plugins, performing the requested password change
on multiple and different backends, such as LDAP, MIT Kerberos, Microsoft
Active Directory, etc.
- any other tasks that may be required as part of a password change operation
""",
version = os.environ.get('VERSION'),
author = 'Cedric Dufour',
author_email = 'http://cedric.dufour.name',
license = 'GPL-3',
url = 'http://cedric.dufour.name/software/upwdchg',
download_url = 'https://github.com/cedric-dufour/upwdchg',
packages = [ 'UPwdChg', 'UPwdChg.Util' ],
package_dir = { '': 'backend' },
requires = [ 'M2Crypto', 'argparse', 'configobj', 'daemon', 'ldap' ],
scripts = [ 'backend/upwdchg-token', 'backend/upwdchg-process', 'backend/upwdchg-daemon' ],
data_files = [
( 'share/upwdchg/backend/plugins', filesInDir( 'backend/plugins' ) ),
( 'share/upwdchg/backend/plugins/2to3', filesInDir( 'backend/plugins/2to3' ) ),
( 'share/upwdchg/backend', [ 'backend/upwdchg.conf.spec', 'backend/upwdchg.conf.sample' ] ),
],
)
| gpl-3.0 |
repotvsupertuga/tvsupertuga.repository | script.module.beautifulsoup4/lib/bs4/builder/_lxml.py | 446 | 8661 | __all__ = [
'LXMLTreeBuilderForXML',
'LXMLTreeBuilder',
]
from io import BytesIO
from StringIO import StringIO
import collections
from lxml import etree
from bs4.element import Comment, Doctype, NamespacedAttribute
from bs4.builder import (
FAST,
HTML,
HTMLTreeBuilder,
PERMISSIVE,
ParserRejectedMarkup,
TreeBuilder,
XML)
from bs4.dammit import EncodingDetector
LXML = 'lxml'
class LXMLTreeBuilderForXML(TreeBuilder):
DEFAULT_PARSER_CLASS = etree.XMLParser
is_xml = True
# Well, it's permissive by XML parser standards.
features = [LXML, XML, FAST, PERMISSIVE]
CHUNK_SIZE = 512
# This namespace mapping is specified in the XML Namespace
# standard.
DEFAULT_NSMAPS = {'http://www.w3.org/XML/1998/namespace' : "xml"}
def default_parser(self, encoding):
# This can either return a parser object or a class, which
# will be instantiated with default arguments.
if self._default_parser is not None:
return self._default_parser
return etree.XMLParser(
target=self, strip_cdata=False, recover=True, encoding=encoding)
def parser_for(self, encoding):
# Use the default parser.
parser = self.default_parser(encoding)
if isinstance(parser, collections.Callable):
# Instantiate the parser with default arguments
parser = parser(target=self, strip_cdata=False, encoding=encoding)
return parser
def __init__(self, parser=None, empty_element_tags=None):
# TODO: Issue a warning if parser is present but not a
# callable, since that means there's no way to create new
# parsers for different encodings.
self._default_parser = parser
if empty_element_tags is not None:
self.empty_element_tags = set(empty_element_tags)
self.soup = None
self.nsmaps = [self.DEFAULT_NSMAPS]
def _getNsTag(self, tag):
# Split the namespace URL out of a fully-qualified lxml tag
# name. Copied from lxml's src/lxml/sax.py.
if tag[0] == '{':
return tuple(tag[1:].split('}', 1))
else:
return (None, tag)
def prepare_markup(self, markup, user_specified_encoding=None,
document_declared_encoding=None):
"""
:yield: A series of 4-tuples.
(markup, encoding, declared encoding,
has undergone character replacement)
Each 4-tuple represents a strategy for parsing the document.
"""
if isinstance(markup, unicode):
# We were given Unicode. Maybe lxml can parse Unicode on
# this system?
yield markup, None, document_declared_encoding, False
if isinstance(markup, unicode):
# No, apparently not. Convert the Unicode to UTF-8 and
# tell lxml to parse it as UTF-8.
yield (markup.encode("utf8"), "utf8",
document_declared_encoding, False)
# Instead of using UnicodeDammit to convert the bytestring to
# Unicode using different encodings, use EncodingDetector to
# iterate over the encodings, and tell lxml to try to parse
# the document as each one in turn.
is_html = not self.is_xml
try_encodings = [user_specified_encoding, document_declared_encoding]
detector = EncodingDetector(markup, try_encodings, is_html)
for encoding in detector.encodings:
yield (detector.markup, encoding, document_declared_encoding, False)
def feed(self, markup):
if isinstance(markup, bytes):
markup = BytesIO(markup)
elif isinstance(markup, unicode):
markup = StringIO(markup)
# Call feed() at least once, even if the markup is empty,
# or the parser won't be initialized.
data = markup.read(self.CHUNK_SIZE)
try:
self.parser = self.parser_for(self.soup.original_encoding)
self.parser.feed(data)
while len(data) != 0:
# Now call feed() on the rest of the data, chunk by chunk.
data = markup.read(self.CHUNK_SIZE)
if len(data) != 0:
self.parser.feed(data)
self.parser.close()
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
raise ParserRejectedMarkup(str(e))
def close(self):
self.nsmaps = [self.DEFAULT_NSMAPS]
def start(self, name, attrs, nsmap={}):
# Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
attrs = dict(attrs)
nsprefix = None
# Invert each namespace map as it comes in.
if len(self.nsmaps) > 1:
# There are no new namespaces for this tag, but
# non-default namespaces are in play, so we need a
# separate tag stack to know when they end.
self.nsmaps.append(None)
elif len(nsmap) > 0:
# A new namespace mapping has come into play.
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
self.nsmaps.append(inverted_nsmap)
# Also treat the namespace mapping as a set of attributes on the
# tag, so we can recreate it later.
attrs = attrs.copy()
for prefix, namespace in nsmap.items():
attribute = NamespacedAttribute(
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
attrs[attribute] = namespace
# Namespaces are in play. Find any attributes that came in
# from lxml with namespaces attached to their names, and
# turn then into NamespacedAttribute objects.
new_attrs = {}
for attr, value in attrs.items():
namespace, attr = self._getNsTag(attr)
if namespace is None:
new_attrs[attr] = value
else:
nsprefix = self._prefix_for_namespace(namespace)
attr = NamespacedAttribute(nsprefix, attr, namespace)
new_attrs[attr] = value
attrs = new_attrs
namespace, name = self._getNsTag(name)
nsprefix = self._prefix_for_namespace(namespace)
self.soup.handle_starttag(name, namespace, nsprefix, attrs)
def _prefix_for_namespace(self, namespace):
"""Find the currently active prefix for the given namespace."""
if namespace is None:
return None
for inverted_nsmap in reversed(self.nsmaps):
if inverted_nsmap is not None and namespace in inverted_nsmap:
return inverted_nsmap[namespace]
return None
def end(self, name):
self.soup.endData()
completed_tag = self.soup.tagStack[-1]
namespace, name = self._getNsTag(name)
nsprefix = None
if namespace is not None:
for inverted_nsmap in reversed(self.nsmaps):
if inverted_nsmap is not None and namespace in inverted_nsmap:
nsprefix = inverted_nsmap[namespace]
break
self.soup.handle_endtag(name, nsprefix)
if len(self.nsmaps) > 1:
# This tag, or one of its parents, introduced a namespace
# mapping, so pop it off the stack.
self.nsmaps.pop()
def pi(self, target, data):
pass
def data(self, content):
self.soup.handle_data(content)
def doctype(self, name, pubid, system):
self.soup.endData()
doctype = Doctype.for_name_and_ids(name, pubid, system)
self.soup.object_was_parsed(doctype)
def comment(self, content):
"Handle comments as Comment objects."
self.soup.endData()
self.soup.handle_data(content)
self.soup.endData(Comment)
def test_fragment_to_document(self, fragment):
"""See `TreeBuilder`."""
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
features = [LXML, HTML, FAST, PERMISSIVE]
is_xml = False
def default_parser(self, encoding):
return etree.HTMLParser
def feed(self, markup):
encoding = self.soup.original_encoding
try:
self.parser = self.parser_for(encoding)
self.parser.feed(markup)
self.parser.close()
except (UnicodeDecodeError, LookupError, etree.ParserError), e:
raise ParserRejectedMarkup(str(e))
def test_fragment_to_document(self, fragment):
"""See `TreeBuilder`."""
return u'<html><body>%s</body></html>' % fragment
| gpl-2.0 |
exelearning/iteexe | twisted/internet/_pollingfile.py | 14 | 7804 | # -*- test-case-name: twisted.web2.test -*-
"""
Implements a simple polling interface for file descriptors that don't work with
select() - this is pretty much only useful on Windows.
"""
from zope.interface import implements
from twisted.internet.interfaces import IConsumer, IProducer
MIN_TIMEOUT = 0.000000001
MAX_TIMEOUT = 0.1
class _PollableResource:
active = True
def activate(self):
self.active = True
def deactivate(self):
self.active = False
class _PollingTimer:
# Everything is private here because it is really an implementation detail.
def __init__(self, reactor):
self.reactor = reactor
self._resources = []
self._pollTimer = None
self._currentTimeout = MAX_TIMEOUT
self._paused = False
def _addPollableResource(self, res):
self._resources.append(res)
self._checkPollingState()
def _checkPollingState(self):
for resource in self._resources:
if resource.active:
self._startPolling()
break
else:
self._stopPolling()
def _startPolling(self):
if self._pollTimer is None:
self._pollTimer = self._reschedule()
def _stopPolling(self):
if self._pollTimer is not None:
self._pollTimer.cancel()
self._pollTimer = None
def _pause(self):
self._paused = True
def _unpause(self):
self._paused = False
self._checkPollingState()
def _reschedule(self):
if not self._paused:
return self.reactor.callLater(self._currentTimeout, self._pollEvent)
def _pollEvent(self):
workUnits = 0.
anyActive = []
for resource in self._resources:
if resource.active:
workUnits += resource.checkWork()
# Check AFTER work has been done
if resource.active:
anyActive.append(resource)
newTimeout = self._currentTimeout
if workUnits:
newTimeout = self._currentTimeout / (workUnits + 1.)
if newTimeout < MIN_TIMEOUT:
newTimeout = MIN_TIMEOUT
else:
newTimeout = self._currentTimeout * 2.
if newTimeout > MAX_TIMEOUT:
newTimeout = MAX_TIMEOUT
self._currentTimeout = newTimeout
if anyActive:
self._pollTimer = self._reschedule()
# If we ever (let's hope not) need the above functionality on UNIX, this could
# be factored into a different module.
import win32pipe
import win32file
import win32api
import pywintypes
class _PollableReadPipe(_PollableResource):
implements(IProducer)
def __init__(self, pipe, receivedCallback, lostCallback):
# security attributes for pipes
self.pipe = pipe
self.receivedCallback = receivedCallback
self.lostCallback = lostCallback
def checkWork(self):
numBytesRead = 0
finished = 0
while 1:
try:
buffer, bytesToRead, result = win32pipe.PeekNamedPipe(self.pipe, 1)
# finished = (result == -1)
if not bytesToRead:
break
hr, data = win32file.ReadFile(self.pipe, bytesToRead, None)
numBytesRead += len(data)
self.receivedCallback(data)
except win32api.error:
finished = 1
break
if finished:
self.cleanup()
return numBytesRead
def cleanup(self):
self.deactivate()
self.lostCallback()
def close(self):
try:
win32api.CloseHandle(self.pipe)
except pywintypes.error:
# You can't close std handles...?
pass
FULL_BUFFER_SIZE = 64 * 1024
class _PollableWritePipe(_PollableResource):
implements(IConsumer)
def __init__(self, writePipe, lostCallback):
self.disconnecting = False
self.producer = None
self.producerPaused = 0
self.streamingProducer = 0
self.outQueue = []
self.writePipe = writePipe
self.lostCallback = lostCallback
try:
win32pipe.SetNamedPipeHandleState(writePipe,
win32pipe.PIPE_NOWAIT,
None,
None)
except pywintypes.error:
# Maybe it's an invalid handle. Who knows.
pass
def close(self):
self.disconnecting = True
def bufferFull(self):
if self.producer is not None:
self.producerPaused = 1
self.producer.pauseProducing()
def bufferEmpty(self):
if self.producer is not None and ((not self.streamingProducer) or
self.producerPaused):
self.producer.producerPaused = 0
self.producer.resumeProducing()
return True
return False
# almost-but-not-quite-exact copy-paste from abstract.FileDescriptor... ugh
def registerProducer(self, producer, streaming):
"""Register to receive data from a producer.
This sets this selectable to be a consumer for a producer. When this
selectable runs out of data on a write() call, it will ask the producer
to resumeProducing(). A producer should implement the IProducer
interface.
FileDescriptor provides some infrastructure for producer methods.
"""
if self.producer is not None:
raise RuntimeError("Cannot register producer %s, because producer %s was never unregistered." % (producer, self.producer))
if not self.active:
producer.stopProducing()
else:
self.producer = producer
self.streamingProducer = streaming
if not streaming:
producer.resumeProducing()
def unregisterProducer(self):
"""Stop consuming data from a producer, without disconnecting.
"""
self.producer = None
def writeConnectionLost(self):
self.deactivate()
try:
win32api.CloseHandle(self.writePipe)
except pywintypes.error:
# OMG what
pass
self.lostCallback()
def writeSequence(self, seq):
self.outQueue.extend(seq)
def write(self, data):
if self.disconnecting:
return
self.outQueue.append(data)
if sum(map(len, self.outQueue)) > FULL_BUFFER_SIZE:
self.bufferFull()
def checkWork(self):
numBytesWritten = 0
if not self.outQueue:
if self.disconnecting:
self.writeConnectionLost()
return 0
try:
win32file.WriteFile(self.writePipe, '', None)
except pywintypes.error:
self.writeConnectionLost()
return numBytesWritten
while self.outQueue:
data = self.outQueue.pop(0)
errCode = 0
try:
errCode, nBytesWritten = win32file.WriteFile(self.writePipe,
data, None)
except win32api.error:
self.writeConnectionLost()
break
else:
# assert not errCode, "wtf an error code???"
numBytesWritten += nBytesWritten
if len(data) > nBytesWritten:
self.outQueue.insert(0, data[nBytesWritten:])
break
else:
resumed = self.bufferEmpty()
if not resumed and self.disconnecting:
self.writeConnectionLost()
return numBytesWritten
| gpl-2.0 |
TheBigW/DRC | DRCFileTool.py | 1 | 11014 | # DRC.py
# Copyright (C) 2013 - Tobias Wenig
# tobiaswenig@yahoo.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
import os
import subprocess
import struct
import math
from array import array
class WaveParams:
def __init__(self, numChannels=None):
if numChannels is None:
self.numChannels = 2
else:
self.numChannels = numChannels
self.DataOffset = 0
self.sampleByteSize = 4
self.maxSampleValue = []
self.minSampleValue = []
self.maxSampleValuePos = []
self.minSampleValuePos = []
self.data = []
for chanel in range(0, self.numChannels):
dataArr = []
self.data.append(dataArr)
self.maxSampleValue.append(-1)
self.minSampleValue.append(1)
self.maxSampleValuePos.append(-1)
self.minSampleValuePos.append(-1)
# WavHeader.py
# Extract basic header information from a WAV file
# ...taken from http://blog.theroyweb.com/extracting-wav-file-header
# -information-using-a-python-script
# slightly modified to pass out parse result
def PrintWavHeader(strWAVFile):
""" Extracts data in the first 44 bytes in a WAV file and writes it
out in a human-readable format
"""
def DumpHeaderOutput(structHeaderFields):
for key in structHeaderFields.keys():
print(("%s: " % (key), structHeaderFields[key]))
# end for
# Open file
fileIn = open(strWAVFile, 'rb')
# end try
# Read in all data
bufHeader = fileIn.read(38)
# Verify that the correct identifiers are present
# print( "bufHeader[0:4]", bufHeader[0:4].decode("utf-8"), " : ",
# str(bufHeader[0:4].decode("utf-8")) == 'RIFF' )
if (bufHeader[0:4].decode("utf-8") != "RIFF") or \
(bufHeader[12:16].decode("utf-8") != "fmt "):
print("Input file not a standard WAV file")
return
# endif
stHeaderFields = {'ChunkSize': 0, 'Format': '',
'Subchunk1Size': 0, 'AudioFormat': 0,
'NumChannels': 0, 'SampleRate': 0,
'ByteRate': 0, 'BlockAlign': 0,
'BitsPerSample': 0, 'Filename': ''}
# Parse fields
stHeaderFields['ChunkSize'] = struct.unpack('<L', bufHeader[4:8])[0]
stHeaderFields['Format'] = bufHeader[8:12]
stHeaderFields['Subchunk1Size'] = struct.unpack('<L', bufHeader[16:20])[0]
stHeaderFields['AudioFormat'] = struct.unpack('<H', bufHeader[20:22])[0]
stHeaderFields['NumChannels'] = struct.unpack('<H', bufHeader[22:24])[0]
stHeaderFields['SampleRate'] = struct.unpack('<L', bufHeader[24:28])[0]
stHeaderFields['ByteRate'] = struct.unpack('<L', bufHeader[28:32])[0]
stHeaderFields['BlockAlign'] = struct.unpack('<H', bufHeader[32:34])[0]
stHeaderFields['BitsPerSample'] = struct.unpack('<H', bufHeader[34:36])[0]
# Locate & read data chunk
chunksList = []
dataChunkLocation = 0
fileIn.seek(0, 2) # Seek to end of file
inputFileSize = fileIn.tell()
nextChunkLocation = 12 # skip the RIFF header
while 1:
# Read subchunk header
fileIn.seek(nextChunkLocation)
bufHeader = fileIn.read(8)
if bufHeader[0:4].decode("utf-8") == "data":
print(("data section found at : ", fileIn.tell()))
dataChunkLocation = nextChunkLocation
# endif
nextChunkLocation += (8 + struct.unpack('<L', bufHeader[4:8])[0])
chunksList.append(bufHeader[0:4])
if nextChunkLocation >= inputFileSize:
break
# endif
# end while
# Dump subchunk list
print("Subchunks Found: ")
for chunkName in chunksList:
print(("%s, " % (chunkName), ))
# end for
print("\n")
# Dump data chunk information
if dataChunkLocation != 0:
fileIn.seek(dataChunkLocation)
bufHeader = fileIn.read(8)
print(("Data Chunk located at offset [%s] of data length [%s] bytes" %
(dataChunkLocation, struct.unpack('<L', bufHeader[4:8])[0])))
# endif
# Print output
stHeaderFields['Filename'] = os.path.basename(strWAVFile)
DumpHeaderOutput(stHeaderFields)
# Close file
fileIn.close()
params = WaveParams(int(stHeaderFields['NumChannels']))
params.DataOffset = int(dataChunkLocation) + 8
params.sampleByteSize = int(stHeaderFields['BitsPerSample'] / 8)
return params
def getNumChannels(filename):
waveParams = PrintWavHeader(filename)
return waveParams.numChannels
def dumpSoundDataToFile(data, filename, writeAsText=False):
f = open(
filename,
'wb')
print("convert to array")
float_array = array('f', data)
print("writing to file...")
float_array.tofile(f)
f.close()
# dump textual representation too
if writeAsText:
theFile = open(
filename + '.txt',
'w')
for item in data:
theFile.write("%s\n" % item)
theFile.close()
def LoadRawFile(filename, params):
print(("LoadRawFile : ", filename, " numChanels : ", params.numChannels, "sampleByteSize", params.sampleByteSize))
filterFile = open(filename, "rb")
filterFile.seek(params.DataOffset)
readData = filterFile.read(params.sampleByteSize)
while len(readData) == params.sampleByteSize:
for chanel in range(0, params.numChannels):
if len(readData) == params.sampleByteSize:
floatSample = float(struct.unpack('f', readData)[0])
readData = filterFile.read(params.sampleByteSize)
#print ("floatSample : ", floatSample)
if math.isnan(floatSample):
print(("value is: ", floatSample, "resetting to 0"))
floatSample = float(0.0)
if params.maxSampleValue[chanel] < floatSample:
params.maxSampleValue[chanel] = floatSample
params.maxSampleValuePos[chanel] = len(params.data[chanel])
#print("found max : ",params.maxSampleValue,
# params.maxSampleValuePos)
if params.minSampleValue[chanel] > floatSample:
params.minSampleValue[chanel] = floatSample
params.minSampleValuePos[chanel] = len(params.data[chanel])
params.data[chanel].append(floatSample)
else:
print("buffer underrun for chanel : ", chanel, "at sample:", len(params.data[chanel]) )
print("addin 0 sample to compensate and keep length consistent across chanels")
params.data[chanel].append(0.0)
# dump the filter to check
if params.numChannels > 1:
print(("loaded r/l: " + str(len(params.data[0])) + "/" +
str(len(params.data[1])) + " samples per channel successfully"))
else:
print(("loaded one chanel filter: " + str(len(params.data[0])) +
" with samples successfully"))
print(("numChanels : ", params.numChannels, "maxPos : ",
str(params.maxSampleValuePos), "maxValue : ",
str(params.maxSampleValue), "file: ", filename))
#dumpSoundDataToFile(params.data[0], /tmp/filterdmp.pcm, True)
return params
def WriteWaveFile(params, outFileName):
#poor mans way to write a wave file - we write 2 temporary pcm files
#and convert to wav using sox :)
commandLine = ['sox', '-M']
pcmParams = ['-traw', '-c1', '-r41100', '-efloat', '-b32']
#TODO : do properly once prototype works
for chanel in range(0, params.numChannels):
strFileName = "/tmp/channel_" + str(chanel) + ".pcm"
commandLine.extend(pcmParams)
print(("numChanels : ", params.numChannels, "chanel: ", chanel))
dumpSoundDataToFile(params.data[chanel], strFileName)
commandLine.append(strFileName)
if params.numChannels > 1:
commandLine.extend(['-twav'])
else:
commandLine = ['sox', '-traw', '-c1', '-r41100',
'-efloat', '-b32']
commandLine.append(strFileName)
commandLine.extend(['-twav'])
commandLine.append(outFileName)
print(("executing sox to create wave file : " + str(commandLine)))
p = subprocess.Popen(commandLine, 0, None, None, subprocess.PIPE,
subprocess.PIPE)
(out, err) = p.communicate()
print(("output from sox conversion : " + str(out) + " error : " + str(err)))
def LoadWaveFile(filename):
#we have some trouble with or very limited wavefile handling
#as we can load sox creaed wave files we use sox to make sure that we have a format we can handle
strTmpWaveFileName = '/tmp/tmpWaveFile.wav'
commandLine = ['sox']
commandLine.append(filename)
commandLine.extend(['-twav', '-r 44100'])
commandLine.append( strTmpWaveFileName )
print(("executing sox to create tmp wave file for loading: " + str(commandLine)))
p = subprocess.Popen(commandLine, 0, None, None, subprocess.PIPE,
subprocess.PIPE)
(out, err) = p.communicate()
print(("output from sox conversion : " + str(out) + " error : " + str(err)))
params = PrintWavHeader(strTmpWaveFileName)
print(("LoadWaveFile: numChannels : ", params.numChannels))
params = LoadRawFile(strTmpWaveFileName, params)
return params
def fillTestFilter(filter_kernel):
filter_array = filter_kernel
itFilter = iter(filter_array)
next(itFilter)
for i in itFilter:
filter_array.insert(0, i)
next(itFilter)
print(("test filter : " + str(filter_array)))
return filter_array
def LoadAudioFile(filename, numChannels):
# return fillTestFilter( [0.25, 0.23, 0.15, 0.06, 0, -0.06, -0.06,
# -0.02, 0.0, 0.01, 0.01, 0] ) + fillTestFilter([0.25, 0.06, 0, -0.06,
# -0.06, -0.02, 0, 0.01, 0.01, 0.0, 0.0, 0.0])
print(("LoadAudioFile: ", filename, " numChanels : ", numChannels))
if filename != '':
fileExt = os.path.splitext(filename)[-1]
print("ext = " + fileExt)
if fileExt == ".wav":
return LoadWaveFile(filename)
params = WaveParams(numChannels)
return LoadRawFile(filename, params)
def LoadAudioFileStereoChannels(filename, numChannels, numChannelsToLoad):
data = LoadAudioFile(filename, numChannels)
if data.numChannels > 2:
#TODO extract first 2 channels and re-package
print(("work in progress ... "))
return data
| gpl-3.0 |
kkdd/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/encodings/cp1253.py | 593 | 13350 | """ Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1253',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\ufffe' # 0x81 -> UNDEFINED
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\ufffe' # 0x88 -> UNDEFINED
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\ufffe' # 0x8A -> UNDEFINED
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x8C -> UNDEFINED
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\ufffe' # 0x98 -> UNDEFINED
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\ufffe' # 0x9A -> UNDEFINED
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x9C -> UNDEFINED
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\ufffe' # 0x9F -> UNDEFINED
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0385' # 0xA1 -> GREEK DIALYTIKA TONOS
u'\u0386' # 0xA2 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\ufffe' # 0xAA -> UNDEFINED
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\u2015' # 0xAF -> HORIZONTAL BAR
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\u0384' # 0xB4 -> GREEK TONOS
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
u'\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS
u'\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS
u'\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
u'\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
u'\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA
u'\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA
u'\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA
u'\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA
u'\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON
u'\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA
u'\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA
u'\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA
u'\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA
u'\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA
u'\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA
u'\u039c' # 0xCC -> GREEK CAPITAL LETTER MU
u'\u039d' # 0xCD -> GREEK CAPITAL LETTER NU
u'\u039e' # 0xCE -> GREEK CAPITAL LETTER XI
u'\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON
u'\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI
u'\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO
u'\ufffe' # 0xD2 -> UNDEFINED
u'\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA
u'\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU
u'\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON
u'\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI
u'\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI
u'\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI
u'\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA
u'\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
u'\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
u'\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS
u'\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS
u'\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS
u'\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS
u'\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
u'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
u'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
u'\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA
u'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
u'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
u'\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA
u'\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA
u'\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA
u'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
u'\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA
u'\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA
u'\u03bc' # 0xEC -> GREEK SMALL LETTER MU
u'\u03bd' # 0xED -> GREEK SMALL LETTER NU
u'\u03be' # 0xEE -> GREEK SMALL LETTER XI
u'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
u'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
u'\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO
u'\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA
u'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
u'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
u'\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON
u'\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI
u'\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI
u'\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI
u'\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA
u'\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
u'\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
u'\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS
u'\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS
u'\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS
u'\ufffe' # 0xFF -> UNDEFINED
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
rexshihaoren/scikit-learn | sklearn/pipeline.py | 162 | 21103 | """
The :mod:`sklearn.pipeline` module implements utilities to build a composite
estimator, as a chain of transforms and estimators.
"""
# Author: Edouard Duchesnay
# Gael Varoquaux
# Virgile Fritsch
# Alexandre Gramfort
# Lars Buitinck
# Licence: BSD
from collections import defaultdict
import numpy as np
from scipy import sparse
from .base import BaseEstimator, TransformerMixin
from .externals.joblib import Parallel, delayed
from .externals import six
from .utils import tosequence
from .utils.metaestimators import if_delegate_has_method
from .externals.six import iteritems
__all__ = ['Pipeline', 'FeatureUnion']
class Pipeline(BaseEstimator):
"""Pipeline of transforms with a final estimator.
Sequentially apply a list of transforms and a final estimator.
Intermediate steps of the pipeline must be 'transforms', that is, they
must implement fit and transform methods.
The final estimator only needs to implement fit.
The purpose of the pipeline is to assemble several steps that can be
cross-validated together while setting different parameters.
For this, it enables setting parameters of the various steps using their
names and the parameter name separated by a '__', as in the example below.
Read more in the :ref:`User Guide <pipeline>`.
Parameters
----------
steps : list
List of (name, transform) tuples (implementing fit/transform) that are
chained, in the order in which they are chained, with the last object
an estimator.
Attributes
----------
named_steps : dict
Read-only attribute to access any step parameter by user given name.
Keys are step names and values are steps parameters.
Examples
--------
>>> from sklearn import svm
>>> from sklearn.datasets import samples_generator
>>> from sklearn.feature_selection import SelectKBest
>>> from sklearn.feature_selection import f_regression
>>> from sklearn.pipeline import Pipeline
>>> # generate some data to play with
>>> X, y = samples_generator.make_classification(
... n_informative=5, n_redundant=0, random_state=42)
>>> # ANOVA SVM-C
>>> anova_filter = SelectKBest(f_regression, k=5)
>>> clf = svm.SVC(kernel='linear')
>>> anova_svm = Pipeline([('anova', anova_filter), ('svc', clf)])
>>> # You can set the parameters using the names issued
>>> # For instance, fit using a k of 10 in the SelectKBest
>>> # and a parameter 'C' of the svm
>>> anova_svm.set_params(anova__k=10, svc__C=.1).fit(X, y)
... # doctest: +ELLIPSIS
Pipeline(steps=[...])
>>> prediction = anova_svm.predict(X)
>>> anova_svm.score(X, y) # doctest: +ELLIPSIS
0.77...
>>> # getting the selected features chosen by anova_filter
>>> anova_svm.named_steps['anova'].get_support()
... # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, False, False, True, False, True, True, True,
False, False, True, False, True, False, False, False, False,
True], dtype=bool)
"""
# BaseEstimator interface
def __init__(self, steps):
names, estimators = zip(*steps)
if len(dict(steps)) != len(steps):
raise ValueError("Provided step names are not unique: %s" % (names,))
# shallow copy of steps
self.steps = tosequence(steps)
transforms = estimators[:-1]
estimator = estimators[-1]
for t in transforms:
if (not (hasattr(t, "fit") or hasattr(t, "fit_transform")) or not
hasattr(t, "transform")):
raise TypeError("All intermediate steps of the chain should "
"be transforms and implement fit and transform"
" '%s' (type %s) doesn't)" % (t, type(t)))
if not hasattr(estimator, "fit"):
raise TypeError("Last step of chain should implement fit "
"'%s' (type %s) doesn't)"
% (estimator, type(estimator)))
@property
def _estimator_type(self):
return self.steps[-1][1]._estimator_type
def get_params(self, deep=True):
if not deep:
return super(Pipeline, self).get_params(deep=False)
else:
out = self.named_steps
for name, step in six.iteritems(self.named_steps):
for key, value in six.iteritems(step.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
out.update(super(Pipeline, self).get_params(deep=False))
return out
@property
def named_steps(self):
return dict(self.steps)
@property
def _final_estimator(self):
return self.steps[-1][1]
# Estimator interface
def _pre_transform(self, X, y=None, **fit_params):
fit_params_steps = dict((step, {}) for step, _ in self.steps)
for pname, pval in six.iteritems(fit_params):
step, param = pname.split('__', 1)
fit_params_steps[step][param] = pval
Xt = X
for name, transform in self.steps[:-1]:
if hasattr(transform, "fit_transform"):
Xt = transform.fit_transform(Xt, y, **fit_params_steps[name])
else:
Xt = transform.fit(Xt, y, **fit_params_steps[name]) \
.transform(Xt)
return Xt, fit_params_steps[self.steps[-1][0]]
def fit(self, X, y=None, **fit_params):
"""Fit all the transforms one after the other and transform the
data, then fit the transformed data using the final estimator.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
self.steps[-1][-1].fit(Xt, y, **fit_params)
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit all the transforms one after the other and transform the
data, then use fit_transform on transformed data using the final
estimator.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
if hasattr(self.steps[-1][-1], 'fit_transform'):
return self.steps[-1][-1].fit_transform(Xt, y, **fit_params)
else:
return self.steps[-1][-1].fit(Xt, y, **fit_params).transform(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def predict(self, X):
"""Applies transforms to the data, and the predict method of the
final estimator. Valid only if the final estimator implements
predict.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def fit_predict(self, X, y=None, **fit_params):
"""Applies fit_predict of last step in pipeline after transforms.
Applies fit_transforms of a pipeline to the data, followed by the
fit_predict method of the final estimator in the pipeline. Valid
only if the final estimator implements fit_predict.
Parameters
----------
X : iterable
Training data. Must fulfill input requirements of first step of
the pipeline.
y : iterable, default=None
Training targets. Must fulfill label requirements for all steps
of the pipeline.
"""
Xt, fit_params = self._pre_transform(X, y, **fit_params)
return self.steps[-1][-1].fit_predict(Xt, y, **fit_params)
@if_delegate_has_method(delegate='_final_estimator')
def predict_proba(self, X):
"""Applies transforms to the data, and the predict_proba method of the
final estimator. Valid only if the final estimator implements
predict_proba.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_proba(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def decision_function(self, X):
"""Applies transforms to the data, and the decision_function method of
the final estimator. Valid only if the final estimator implements
decision_function.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].decision_function(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def predict_log_proba(self, X):
"""Applies transforms to the data, and the predict_log_proba method of
the final estimator. Valid only if the final estimator implements
predict_log_proba.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].predict_log_proba(Xt)
@if_delegate_has_method(delegate='_final_estimator')
def transform(self, X):
"""Applies transforms to the data, and the transform method of the
final estimator. Valid only if the final estimator implements
transform.
Parameters
----------
X : iterable
Data to predict on. Must fulfill input requirements of first step of
the pipeline.
"""
Xt = X
for name, transform in self.steps:
Xt = transform.transform(Xt)
return Xt
@if_delegate_has_method(delegate='_final_estimator')
def inverse_transform(self, X):
"""Applies inverse transform to the data.
Starts with the last step of the pipeline and applies ``inverse_transform`` in
inverse order of the pipeline steps.
Valid only if all steps of the pipeline implement inverse_transform.
Parameters
----------
X : iterable
Data to inverse transform. Must fulfill output requirements of the
last step of the pipeline.
"""
if X.ndim == 1:
X = X[None, :]
Xt = X
for name, step in self.steps[::-1]:
Xt = step.inverse_transform(Xt)
return Xt
@if_delegate_has_method(delegate='_final_estimator')
def score(self, X, y=None):
"""Applies transforms to the data, and the score method of the
final estimator. Valid only if the final estimator implements
score.
Parameters
----------
X : iterable
Data to score. Must fulfill input requirements of first step of the
pipeline.
y : iterable, default=None
Targets used for scoring. Must fulfill label requirements for all steps of
the pipeline.
"""
Xt = X
for name, transform in self.steps[:-1]:
Xt = transform.transform(Xt)
return self.steps[-1][-1].score(Xt, y)
@property
def classes_(self):
return self.steps[-1][-1].classes_
@property
def _pairwise(self):
# check if first estimator expects pairwise input
return getattr(self.steps[0][1], '_pairwise', False)
def _name_estimators(estimators):
"""Generate names for estimators."""
names = [type(estimator).__name__.lower() for estimator in estimators]
namecount = defaultdict(int)
for est, name in zip(estimators, names):
namecount[name] += 1
for k, v in list(six.iteritems(namecount)):
if v == 1:
del namecount[k]
for i in reversed(range(len(estimators))):
name = names[i]
if name in namecount:
names[i] += "-%d" % namecount[name]
namecount[name] -= 1
return list(zip(names, estimators))
def make_pipeline(*steps):
"""Construct a Pipeline from the given estimators.
This is a shorthand for the Pipeline constructor; it does not require, and
does not permit, naming the estimators. Instead, they will be given names
automatically based on their types.
Examples
--------
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.preprocessing import StandardScaler
>>> make_pipeline(StandardScaler(), GaussianNB()) # doctest: +NORMALIZE_WHITESPACE
Pipeline(steps=[('standardscaler',
StandardScaler(copy=True, with_mean=True, with_std=True)),
('gaussiannb', GaussianNB())])
Returns
-------
p : Pipeline
"""
return Pipeline(_name_estimators(steps))
def _fit_one_transformer(transformer, X, y):
return transformer.fit(X, y)
def _transform_one(transformer, name, X, transformer_weights):
if transformer_weights is not None and name in transformer_weights:
# if we have a weight for this transformer, muliply output
return transformer.transform(X) * transformer_weights[name]
return transformer.transform(X)
def _fit_transform_one(transformer, name, X, y, transformer_weights,
**fit_params):
if transformer_weights is not None and name in transformer_weights:
# if we have a weight for this transformer, muliply output
if hasattr(transformer, 'fit_transform'):
X_transformed = transformer.fit_transform(X, y, **fit_params)
return X_transformed * transformer_weights[name], transformer
else:
X_transformed = transformer.fit(X, y, **fit_params).transform(X)
return X_transformed * transformer_weights[name], transformer
if hasattr(transformer, 'fit_transform'):
X_transformed = transformer.fit_transform(X, y, **fit_params)
return X_transformed, transformer
else:
X_transformed = transformer.fit(X, y, **fit_params).transform(X)
return X_transformed, transformer
class FeatureUnion(BaseEstimator, TransformerMixin):
"""Concatenates results of multiple transformer objects.
This estimator applies a list of transformer objects in parallel to the
input data, then concatenates the results. This is useful to combine
several feature extraction mechanisms into a single transformer.
Read more in the :ref:`User Guide <feature_union>`.
Parameters
----------
transformer_list: list of (string, transformer) tuples
List of transformer objects to be applied to the data. The first
half of each tuple is the name of the transformer.
n_jobs: int, optional
Number of jobs to run in parallel (default 1).
transformer_weights: dict, optional
Multiplicative weights for features per transformer.
Keys are transformer names, values the weights.
"""
def __init__(self, transformer_list, n_jobs=1, transformer_weights=None):
self.transformer_list = transformer_list
self.n_jobs = n_jobs
self.transformer_weights = transformer_weights
def get_feature_names(self):
"""Get feature names from all transformers.
Returns
-------
feature_names : list of strings
Names of the features produced by transform.
"""
feature_names = []
for name, trans in self.transformer_list:
if not hasattr(trans, 'get_feature_names'):
raise AttributeError("Transformer %s does not provide"
" get_feature_names." % str(name))
feature_names.extend([name + "__" + f for f in
trans.get_feature_names()])
return feature_names
def fit(self, X, y=None):
"""Fit all transformers using X.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data, used to fit transformers.
"""
transformers = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_one_transformer)(trans, X, y)
for name, trans in self.transformer_list)
self._update_transformer_list(transformers)
return self
def fit_transform(self, X, y=None, **fit_params):
"""Fit all transformers using X, transform the data and concatenate
results.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data to be transformed.
Returns
-------
X_t : array-like or sparse matrix, shape (n_samples, sum_n_components)
hstack of results of transformers. sum_n_components is the
sum of n_components (output dimension) over transformers.
"""
result = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_transform_one)(trans, name, X, y,
self.transformer_weights, **fit_params)
for name, trans in self.transformer_list)
Xs, transformers = zip(*result)
self._update_transformer_list(transformers)
if any(sparse.issparse(f) for f in Xs):
Xs = sparse.hstack(Xs).tocsr()
else:
Xs = np.hstack(Xs)
return Xs
def transform(self, X):
"""Transform X separately by each transformer, concatenate results.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
Input data to be transformed.
Returns
-------
X_t : array-like or sparse matrix, shape (n_samples, sum_n_components)
hstack of results of transformers. sum_n_components is the
sum of n_components (output dimension) over transformers.
"""
Xs = Parallel(n_jobs=self.n_jobs)(
delayed(_transform_one)(trans, name, X, self.transformer_weights)
for name, trans in self.transformer_list)
if any(sparse.issparse(f) for f in Xs):
Xs = sparse.hstack(Xs).tocsr()
else:
Xs = np.hstack(Xs)
return Xs
def get_params(self, deep=True):
if not deep:
return super(FeatureUnion, self).get_params(deep=False)
else:
out = dict(self.transformer_list)
for name, trans in self.transformer_list:
for key, value in iteritems(trans.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
out.update(super(FeatureUnion, self).get_params(deep=False))
return out
def _update_transformer_list(self, transformers):
self.transformer_list[:] = [
(name, new)
for ((name, old), new) in zip(self.transformer_list, transformers)
]
# XXX it would be nice to have a keyword-only n_jobs argument to this function,
# but that's not allowed in Python 2.x.
def make_union(*transformers):
"""Construct a FeatureUnion from the given transformers.
This is a shorthand for the FeatureUnion constructor; it does not require,
and does not permit, naming the transformers. Instead, they will be given
names automatically based on their types. It also does not allow weighting.
Examples
--------
>>> from sklearn.decomposition import PCA, TruncatedSVD
>>> make_union(PCA(), TruncatedSVD()) # doctest: +NORMALIZE_WHITESPACE
FeatureUnion(n_jobs=1,
transformer_list=[('pca', PCA(copy=True, n_components=None,
whiten=False)),
('truncatedsvd',
TruncatedSVD(algorithm='randomized',
n_components=2, n_iter=5,
random_state=None, tol=0.0))],
transformer_weights=None)
Returns
-------
f : FeatureUnion
"""
return FeatureUnion(_name_estimators(transformers))
| bsd-3-clause |
nhippenmeyer/django | tests/utils_tests/test_baseconv.py | 326 | 1787 | from unittest import TestCase
from django.utils.baseconv import (
BaseConverter, base2, base16, base36, base56, base62, base64,
)
from django.utils.six.moves import range
class TestBaseConv(TestCase):
def test_baseconv(self):
nums = [-10 ** 10, 10 ** 10] + list(range(-100, 100))
for converter in [base2, base16, base36, base56, base62, base64]:
for i in nums:
self.assertEqual(i, converter.decode(converter.encode(i)))
def test_base11(self):
base11 = BaseConverter('0123456789-', sign='$')
self.assertEqual(base11.encode(1234), '-22')
self.assertEqual(base11.decode('-22'), 1234)
self.assertEqual(base11.encode(-1234), '$-22')
self.assertEqual(base11.decode('$-22'), -1234)
def test_base20(self):
base20 = BaseConverter('0123456789abcdefghij')
self.assertEqual(base20.encode(1234), '31e')
self.assertEqual(base20.decode('31e'), 1234)
self.assertEqual(base20.encode(-1234), '-31e')
self.assertEqual(base20.decode('-31e'), -1234)
def test_base64(self):
self.assertEqual(base64.encode(1234), 'JI')
self.assertEqual(base64.decode('JI'), 1234)
self.assertEqual(base64.encode(-1234), '$JI')
self.assertEqual(base64.decode('$JI'), -1234)
def test_base7(self):
base7 = BaseConverter('cjdhel3', sign='g')
self.assertEqual(base7.encode(1234), 'hejd')
self.assertEqual(base7.decode('hejd'), 1234)
self.assertEqual(base7.encode(-1234), 'ghejd')
self.assertEqual(base7.decode('ghejd'), -1234)
def test_exception(self):
self.assertRaises(ValueError, BaseConverter, 'abc', sign='a')
self.assertIsInstance(BaseConverter('abc', sign='d'), BaseConverter)
| bsd-3-clause |
seyko2/openvz_rhel6_kernel_mirror | tools/perf/scripts/python/syscall-counts-by-pid.py | 34 | 1923 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
popazerty/dvbapp-gui2 | lib/python/Components/Converter/ServiceInfo.py | 2 | 9104 | from Components.Converter.Converter import Converter
from enigma import iServiceInformation, iPlayableService
from Components.Element import cached
from os import path
WIDESCREEN = [3, 4, 7, 8, 0xB, 0xC, 0xF, 0x10]
class ServiceInfo(Converter, object):
HAS_TELETEXT = 0
IS_MULTICHANNEL = 1
IS_CRYPTED = 2
IS_WIDESCREEN = 3
SUBSERVICES_AVAILABLE = 4
XRES = 5
YRES = 6
APID = 7
VPID = 8
PCRPID = 9
PMTPID = 10
TXTPID = 11
TSID = 12
ONID = 13
SID = 14
FRAMERATE = 15
TRANSFERBPS = 16
HAS_HBBTV = 17
AUDIOTRACKS_AVAILABLE = 18
SUBTITLES_AVAILABLE = 19
EDITMODE = 20
IS_STREAM = 21
IS_SD = 22
IS_HD = 23
IS_SD_AND_WIDESCREEN = 24
IS_SD_AND_NOT_WIDESCREEN = 25
def __init__(self, type):
Converter.__init__(self, type)
self.type, self.interesting_events = {
"HasTelext": (self.HAS_TELETEXT, (iPlayableService.evUpdatedInfo,)),
"IsMultichannel": (self.IS_MULTICHANNEL, (iPlayableService.evUpdatedInfo,)),
"IsCrypted": (self.IS_CRYPTED, (iPlayableService.evUpdatedInfo,)),
"IsWidescreen": (self.IS_WIDESCREEN, (iPlayableService.evVideoSizeChanged,)),
"SubservicesAvailable": (self.SUBSERVICES_AVAILABLE, (iPlayableService.evUpdatedEventInfo,)),
"VideoWidth": (self.XRES, (iPlayableService.evVideoSizeChanged,)),
"VideoHeight": (self.YRES, (iPlayableService.evVideoSizeChanged,)),
"AudioPid": (self.APID, (iPlayableService.evUpdatedInfo,)),
"VideoPid": (self.VPID, (iPlayableService.evUpdatedInfo,)),
"PcrPid": (self.PCRPID, (iPlayableService.evUpdatedInfo,)),
"PmtPid": (self.PMTPID, (iPlayableService.evUpdatedInfo,)),
"TxtPid": (self.TXTPID, (iPlayableService.evUpdatedInfo,)),
"TsId": (self.TSID, (iPlayableService.evUpdatedInfo,)),
"OnId": (self.ONID, (iPlayableService.evUpdatedInfo,)),
"Sid": (self.SID, (iPlayableService.evUpdatedInfo,)),
"Framerate": (self.FRAMERATE, (iPlayableService.evVideoSizeChanged,iPlayableService.evUpdatedInfo,)),
"TransferBPS": (self.TRANSFERBPS, (iPlayableService.evUpdatedInfo,)),
"HasHBBTV": (self.HAS_HBBTV, (iPlayableService.evUpdatedInfo,iPlayableService.evHBBTVInfo,)),
"AudioTracksAvailable": (self.AUDIOTRACKS_AVAILABLE, (iPlayableService.evUpdatedInfo,)),
"SubtitlesAvailable": (self.SUBTITLES_AVAILABLE, (iPlayableService.evUpdatedInfo,)),
"Editmode": (self.EDITMODE, (iPlayableService.evUpdatedInfo,)),
"IsStream": (self.IS_STREAM, (iPlayableService.evUpdatedInfo,)),
"IsSD": (self.IS_SD, (iPlayableService.evVideoSizeChanged,)),
"IsHD": (self.IS_HD, (iPlayableService.evVideoSizeChanged,)),
"IsSDAndWidescreen": (self.IS_SD_AND_WIDESCREEN, (iPlayableService.evVideoSizeChanged,)),
"IsSDAndNotWidescreen": (self.IS_SD_AND_NOT_WIDESCREEN, (iPlayableService.evVideoSizeChanged,)),
}[type]
def getServiceInfoString(self, info, what, convert = lambda x: "%d" % x):
v = info.getInfo(what)
if v == -1:
return "N/A"
if v == -2:
return info.getInfoString(what)
return convert(v)
@cached
def getBoolean(self):
service = self.source.service
info = service and service.info()
if not info:
return False
video_height = None
video_aspect = None
if self.type in (self.IS_SD, self.IS_HD, self.IS_SD_AND_WIDESCREEN, self.IS_SD_AND_NOT_WIDESCREEN):
if path.exists("/proc/stb/vmpeg/0/yres"):
f = open("/proc/stb/vmpeg/0/yres", "r")
video_height = int(f.read(),16)
f.close()
if path.exists("/proc/stb/vmpeg/0/aspect"):
f = open("/proc/stb/vmpeg/0/aspect", "r")
video_aspect = int(f.read())
f.close()
if not video_height:
video_height = info.getInfo(iServiceInformation.sVideoHeight)
if not video_aspect:
video_aspect = info.getInfo(iServiceInformation.sAspect)
if self.type == self.HAS_TELETEXT:
tpid = info.getInfo(iServiceInformation.sTXTPID)
return tpid != -1
elif self.type == self.IS_MULTICHANNEL:
# FIXME. but currently iAudioTrackInfo doesn't provide more information.
audio = service.audioTracks()
if audio:
n = audio.getNumberOfTracks()
idx = 0
while idx < n:
i = audio.getTrackInfo(idx)
description = i.getDescription()
if "AC3" in description or "AC-3" in description or "DTS" in description:
return True
idx += 1
return False
elif self.type == self.IS_CRYPTED:
return info.getInfo(iServiceInformation.sIsCrypted) == 1
elif self.type == self.IS_WIDESCREEN:
return info.getInfo(iServiceInformation.sAspect) in WIDESCREEN
elif self.type == self.SUBSERVICES_AVAILABLE:
subservices = service.subServices()
return subservices and subservices.getNumberOfSubservices() > 0
elif self.type == self.HAS_HBBTV:
return info.getInfoString(iServiceInformation.sHBBTVUrl) != ""
elif self.type == self.AUDIOTRACKS_AVAILABLE:
audio = service.audioTracks()
return audio and audio.getNumberOfTracks() > 1
elif self.type == self.SUBTITLES_AVAILABLE:
subtitle = service and service.subtitle()
subtitlelist = subtitle and subtitle.getSubtitleList()
if subtitlelist:
return len(subtitlelist) > 0
return False
elif self.type == self.EDITMODE:
return hasattr(self.source, "editmode") and not not self.source.editmode
elif self.type == self.IS_STREAM:
return service.streamed() is not None
elif self.type == self.IS_SD:
return video_height < 720
elif self.type == self.IS_HD:
return video_height >= 720
elif self.type == self.IS_SD_AND_WIDESCREEN:
return video_height < 720 and video_aspect in WIDESCREEN
elif self.type == self.IS_SD_AND_NOT_WIDESCREEN:
return video_height < 720 and video_aspect not in WIDESCREEN
return False
boolean = property(getBoolean)
@cached
def getText(self):
service = self.source.service
info = service and service.info()
if not info:
return ""
if self.type == self.XRES:
video_width = None
if path.exists("/proc/stb/vmpeg/0/xres"):
f = open("/proc/stb/vmpeg/0/xres", "r")
video_width = int(f.read(),16)
f.close()
if not video_width:
video_width = self.getServiceInfoString(info, iServiceInformation.sVideoWidth)
return "%d" % video_width
elif self.type == self.YRES:
video_height = None
if path.exists("/proc/stb/vmpeg/0/yres"):
f = open("/proc/stb/vmpeg/0/yres", "r")
video_height = int(f.read(),16)
f.close()
if not video_height:
video_height = self.getServiceInfoString(info, iServiceInformation.sVideoHeight)
return "%d" % video_height
elif self.type == self.APID:
return self.getServiceInfoString(info, iServiceInformation.sAudioPID)
elif self.type == self.VPID:
return self.getServiceInfoString(info, iServiceInformation.sVideoPID)
elif self.type == self.PCRPID:
return self.getServiceInfoString(info, iServiceInformation.sPCRPID)
elif self.type == self.PMTPID:
return self.getServiceInfoString(info, iServiceInformation.sPMTPID)
elif self.type == self.TXTPID:
return self.getServiceInfoString(info, iServiceInformation.sTXTPID)
elif self.type == self.TSID:
return self.getServiceInfoString(info, iServiceInformation.sTSID)
elif self.type == self.ONID:
return self.getServiceInfoString(info, iServiceInformation.sONID)
elif self.type == self.SID:
return self.getServiceInfoString(info, iServiceInformation.sSID)
elif self.type == self.FRAMERATE:
video_rate = None
if path.exists("/proc/stb/vmpeg/0/framerate"):
f = open("/proc/stb/vmpeg/0/framerate", "r")
video_rate = int(f.read())
f.close()
if not video_rate:
video_rate = self.getServiceInfoString(info, iServiceInformation.sFrameRate)
return video_rate, lambda x: "%d fps" % ((x+500)/1000)
elif self.type == self.TRANSFERBPS:
return self.getServiceInfoString(info, iServiceInformation.sTransferBPS, lambda x: "%d kB/s" % (x/1024))
elif self.type == self.HAS_HBBTV:
return info.getInfoString(iServiceInformation.sHBBTVUrl)
return ""
text = property(getText)
@cached
def getValue(self):
service = self.source.service
info = service and service.info()
if not info:
return -1
if self.type == self.XRES:
video_width = None
if path.exists("/proc/stb/vmpeg/0/xres"):
f = open("/proc/stb/vmpeg/0/xres", "r")
video_width = int(f.read(),16)
f.close()
if not video_width:
video_width = info.getInfo(iServiceInformation.sVideoWidth)
return str(video_width)
elif self.type == self.YRES:
video_height = None
if path.exists("/proc/stb/vmpeg/0/yres"):
f = open("/proc/stb/vmpeg/0/yres", "r")
video_height = int(f.read(),16)
f.close()
if not video_height:
video_height = info.getInfo(iServiceInformation.sVideoHeight)
return str(video_height)
elif self.type == self.FRAMERATE:
video_rate = None
if path.exists("/proc/stb/vmpeg/0/framerate"):
f = open("/proc/stb/vmpeg/0/framerate", "r")
video_rate = int(f.read())
f.close()
if not video_rate:
video_rate = info.getInfo(iServiceInformation.sFrameRate)
return str(video_rate)
return -1
value = property(getValue)
def changed(self, what):
if what[0] != self.CHANGED_SPECIFIC or what[1] in self.interesting_events:
Converter.changed(self, what)
| gpl-2.0 |
dstanek/keystone | keystone/contrib/federation/schema.py | 14 | 2401 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.common import validation
from keystone.common.validation import parameter_types
basic_property_id = {
'type': 'object',
'properties': {
'id': {
'type': 'string'
}
},
'required': ['id'],
'additionalProperties': False
}
saml_create = {
'type': 'object',
'properties': {
'identity': {
'type': 'object',
'properties': {
'token': basic_property_id,
'methods': {
'type': 'array'
}
},
'required': ['token'],
'additionalProperties': False
},
'scope': {
'type': 'object',
'properties': {
'service_provider': basic_property_id
},
'required': ['service_provider'],
'additionalProperties': False
},
},
'required': ['identity', 'scope'],
'additionalProperties': False
}
_service_provider_properties = {
# NOTE(rodrigods): The database accepts URLs with 256 as max length,
# but parameter_types.url uses 225 as max length.
'auth_url': parameter_types.url,
'sp_url': parameter_types.url,
'description': validation.nullable(parameter_types.description),
'enabled': parameter_types.boolean,
'relay_state_prefix': validation.nullable(parameter_types.description)
}
service_provider_create = {
'type': 'object',
'properties': _service_provider_properties,
# NOTE(rodrigods): 'id' is not required since it is passed in the URL
'required': ['auth_url', 'sp_url'],
'additionalProperties': False
}
service_provider_update = {
'type': 'object',
'properties': _service_provider_properties,
# Make sure at least one property is being updated
'minProperties': 1,
'additionalProperties': False
}
| apache-2.0 |
bhansa/fireball | pyvenv/Lib/site-packages/wheel/wininst2wheel.py | 62 | 7772 | #!/usr/bin/env python
import distutils.dist
import os.path
import re
import sys
import tempfile
import zipfile
from argparse import ArgumentParser
from glob import iglob
from shutil import rmtree
import wheel.bdist_wheel
from wheel.archive import archive_wheelfile
egg_info_re = re.compile(r'''(^|/)(?P<name>[^/]+?)-(?P<ver>.+?)
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg-info(/|$)''', re.VERBOSE)
def parse_info(wininfo_name, egginfo_name):
"""Extract metadata from filenames.
Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
the installer filename and the name of the egg-info directory embedded in
the zipfile (if any).
The egginfo filename has the format::
name-ver(-pyver)(-arch).egg-info
The installer filename has the format::
name-ver.arch(-pyver).exe
Some things to note:
1. The installer filename is not definitive. An installer can be renamed
and work perfectly well as an installer. So more reliable data should
be used whenever possible.
2. The egg-info data should be preferred for the name and version, because
these come straight from the distutils metadata, and are mandatory.
3. The pyver from the egg-info data should be ignored, as it is
constructed from the version of Python used to build the installer,
which is irrelevant - the installer filename is correct here (even to
the point that when it's not there, any version is implied).
4. The architecture must be taken from the installer filename, as it is
not included in the egg-info data.
5. Architecture-neutral installers still have an architecture because the
installer format itself (being executable) is architecture-specific. We
should therefore ignore the architecture if the content is pure-python.
"""
egginfo = None
if egginfo_name:
egginfo = egg_info_re.search(egginfo_name)
if not egginfo:
raise ValueError("Egg info filename %s is not valid" % (egginfo_name,))
# Parse the wininst filename
# 1. Distribution name (up to the first '-')
w_name, sep, rest = wininfo_name.partition('-')
if not sep:
raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
# Strip '.exe'
rest = rest[:-4]
# 2. Python version (from the last '-', must start with 'py')
rest2, sep, w_pyver = rest.rpartition('-')
if sep and w_pyver.startswith('py'):
rest = rest2
w_pyver = w_pyver.replace('.', '')
else:
# Not version specific - use py2.py3. While it is possible that
# pure-Python code is not compatible with both Python 2 and 3, there
# is no way of knowing from the wininst format, so we assume the best
# here (the user can always manually rename the wheel to be more
# restrictive if needed).
w_pyver = 'py2.py3'
# 3. Version and architecture
w_ver, sep, w_arch = rest.rpartition('.')
if not sep:
raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
if egginfo:
w_name = egginfo.group('name')
w_ver = egginfo.group('ver')
return dict(name=w_name, ver=w_ver, arch=w_arch, pyver=w_pyver)
def bdist_wininst2wheel(path, dest_dir=os.path.curdir):
bdw = zipfile.ZipFile(path)
# Search for egg-info in the archive
egginfo_name = None
for filename in bdw.namelist():
if '.egg-info' in filename:
egginfo_name = filename
break
info = parse_info(os.path.basename(path), egginfo_name)
root_is_purelib = True
for zipinfo in bdw.infolist():
if zipinfo.filename.startswith('PLATLIB'):
root_is_purelib = False
break
if root_is_purelib:
paths = {'purelib': ''}
else:
paths = {'platlib': ''}
dist_info = "%(name)s-%(ver)s" % info
datadir = "%s.data/" % dist_info
# rewrite paths to trick ZipFile into extracting an egg
# XXX grab wininst .ini - between .exe, padding, and first zip file.
members = []
egginfo_name = ''
for zipinfo in bdw.infolist():
key, basename = zipinfo.filename.split('/', 1)
key = key.lower()
basepath = paths.get(key, None)
if basepath is None:
basepath = datadir + key.lower() + '/'
oldname = zipinfo.filename
newname = basepath + basename
zipinfo.filename = newname
del bdw.NameToInfo[oldname]
bdw.NameToInfo[newname] = zipinfo
# Collect member names, but omit '' (from an entry like "PLATLIB/"
if newname:
members.append(newname)
# Remember egg-info name for the egg2dist call below
if not egginfo_name:
if newname.endswith('.egg-info'):
egginfo_name = newname
elif '.egg-info/' in newname:
egginfo_name, sep, _ = newname.rpartition('/')
dir = tempfile.mkdtemp(suffix="_b2w")
bdw.extractall(dir, members)
# egg2wheel
abi = 'none'
pyver = info['pyver']
arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')
# Wininst installers always have arch even if they are not
# architecture-specific (because the format itself is).
# So, assume the content is architecture-neutral if root is purelib.
if root_is_purelib:
arch = 'any'
# If the installer is architecture-specific, it's almost certainly also
# CPython-specific.
if arch != 'any':
pyver = pyver.replace('py', 'cp')
wheel_name = '-'.join((
dist_info,
pyver,
abi,
arch
))
if root_is_purelib:
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
else:
bw = _bdist_wheel_tag(distutils.dist.Distribution())
bw.root_is_pure = root_is_purelib
bw.python_tag = pyver
bw.plat_name_supplied = True
bw.plat_name = info['arch'] or 'any'
if not root_is_purelib:
bw.full_tag_supplied = True
bw.full_tag = (pyver, abi, arch)
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')
bw.write_record(dir, dist_info_dir)
archive_wheelfile(os.path.join(dest_dir, wheel_name), dir)
rmtree(dir)
class _bdist_wheel_tag(wheel.bdist_wheel.bdist_wheel):
# allow the client to override the default generated wheel tag
# The default bdist_wheel implementation uses python and abi tags
# of the running python process. This is not suitable for
# generating/repackaging prebuild binaries.
full_tag_supplied = False
full_tag = None # None or a (pytag, soabitag, plattag) triple
def get_tag(self):
if self.full_tag_supplied and self.full_tag is not None:
return self.full_tag
else:
return super(_bdist_wheel_tag, self).get_tag()
def main():
parser = ArgumentParser()
parser.add_argument('installers', nargs='*', help="Installers to convert")
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
parser.add_argument('--verbose', '-v', action='store_true')
args = parser.parse_args()
for pat in args.installers:
for installer in iglob(pat):
if args.verbose:
sys.stdout.write("{0}... ".format(installer))
bdist_wininst2wheel(installer, args.dest_dir)
if args.verbose:
sys.stdout.write("OK\n")
if __name__ == "__main__":
main()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.