repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
pinterb/st2 | st2common/st2common/persistence/trigger.py | 2 | 1627 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from st2common import transport
from st2common.models.db.trigger import triggertype_access, trigger_access, triggerinstance_access
from st2common.persistence.base import (Access, ContentPackResource)
class TriggerType(ContentPackResource):
impl = triggertype_access
@classmethod
def _get_impl(cls):
return cls.impl
class Trigger(ContentPackResource):
impl = trigger_access
publisher = None
@classmethod
def _get_impl(cls):
return cls.impl
@classmethod
def _get_publisher(cls):
if not cls.publisher:
cls.publisher = transport.reactor.TriggerCUDPublisher(cfg.CONF.messaging.url)
return cls.publisher
class TriggerInstance(Access):
impl = triggerinstance_access
@classmethod
def _get_impl(cls):
return cls.impl
| apache-2.0 |
mhvk/numpy | numpy/f2py/tests/test_array_from_pyobj.py | 7 | 22810 | import os
import sys
import copy
import platform
import pytest
import numpy as np
from numpy.testing import assert_, assert_equal
from numpy.core.multiarray import typeinfo
from . import util
wrap = None
def setup_module():
"""
Build the required testing extension module
"""
global wrap
# Check compiler availability first
if not util.has_c_compiler():
pytest.skip("No C compiler available")
if wrap is None:
config_code = """
config.add_extension('test_array_from_pyobj_ext',
sources=['wrapmodule.c', 'fortranobject.c'],
define_macros=[])
"""
d = os.path.dirname(__file__)
src = [os.path.join(d, 'src', 'array_from_pyobj', 'wrapmodule.c'),
os.path.join(d, '..', 'src', 'fortranobject.c'),
os.path.join(d, '..', 'src', 'fortranobject.h')]
wrap = util.build_module_distutils(src, config_code,
'test_array_from_pyobj_ext')
def flags_info(arr):
flags = wrap.array_attrs(arr)[6]
return flags2names(flags)
def flags2names(flags):
info = []
for flagname in ['CONTIGUOUS', 'FORTRAN', 'OWNDATA', 'ENSURECOPY',
'ENSUREARRAY', 'ALIGNED', 'NOTSWAPPED', 'WRITEABLE',
'WRITEBACKIFCOPY', 'UPDATEIFCOPY', 'BEHAVED', 'BEHAVED_RO',
'CARRAY', 'FARRAY'
]:
if abs(flags) & getattr(wrap, flagname, 0):
info.append(flagname)
return info
class Intent:
def __init__(self, intent_list=[]):
self.intent_list = intent_list[:]
flags = 0
for i in intent_list:
if i == 'optional':
flags |= wrap.F2PY_OPTIONAL
else:
flags |= getattr(wrap, 'F2PY_INTENT_' + i.upper())
self.flags = flags
def __getattr__(self, name):
name = name.lower()
if name == 'in_':
name = 'in'
return self.__class__(self.intent_list + [name])
def __str__(self):
return 'intent(%s)' % (','.join(self.intent_list))
def __repr__(self):
return 'Intent(%r)' % (self.intent_list)
def is_intent(self, *names):
for name in names:
if name not in self.intent_list:
return False
return True
def is_intent_exact(self, *names):
return len(self.intent_list) == len(names) and self.is_intent(*names)
intent = Intent()
_type_names = ['BOOL', 'BYTE', 'UBYTE', 'SHORT', 'USHORT', 'INT', 'UINT',
'LONG', 'ULONG', 'LONGLONG', 'ULONGLONG',
'FLOAT', 'DOUBLE', 'CFLOAT']
_cast_dict = {'BOOL': ['BOOL']}
_cast_dict['BYTE'] = _cast_dict['BOOL'] + ['BYTE']
_cast_dict['UBYTE'] = _cast_dict['BOOL'] + ['UBYTE']
_cast_dict['BYTE'] = ['BYTE']
_cast_dict['UBYTE'] = ['UBYTE']
_cast_dict['SHORT'] = _cast_dict['BYTE'] + ['UBYTE', 'SHORT']
_cast_dict['USHORT'] = _cast_dict['UBYTE'] + ['BYTE', 'USHORT']
_cast_dict['INT'] = _cast_dict['SHORT'] + ['USHORT', 'INT']
_cast_dict['UINT'] = _cast_dict['USHORT'] + ['SHORT', 'UINT']
_cast_dict['LONG'] = _cast_dict['INT'] + ['LONG']
_cast_dict['ULONG'] = _cast_dict['UINT'] + ['ULONG']
_cast_dict['LONGLONG'] = _cast_dict['LONG'] + ['LONGLONG']
_cast_dict['ULONGLONG'] = _cast_dict['ULONG'] + ['ULONGLONG']
_cast_dict['FLOAT'] = _cast_dict['SHORT'] + ['USHORT', 'FLOAT']
_cast_dict['DOUBLE'] = _cast_dict['INT'] + ['UINT', 'FLOAT', 'DOUBLE']
_cast_dict['CFLOAT'] = _cast_dict['FLOAT'] + ['CFLOAT']
# 32 bit system malloc typically does not provide the alignment required by
# 16 byte long double types this means the inout intent cannot be satisfied
# and several tests fail as the alignment flag can be randomly true or fals
# when numpy gains an aligned allocator the tests could be enabled again
#
# Furthermore, on macOS ARM64, LONGDOUBLE is an alias for DOUBLE.
if ((np.intp().dtype.itemsize != 4 or np.clongdouble().dtype.alignment <= 8) and
sys.platform != 'win32' and
(platform.system(), platform.processor()) != ('Darwin', 'arm')):
_type_names.extend(['LONGDOUBLE', 'CDOUBLE', 'CLONGDOUBLE'])
_cast_dict['LONGDOUBLE'] = _cast_dict['LONG'] + \
['ULONG', 'FLOAT', 'DOUBLE', 'LONGDOUBLE']
_cast_dict['CLONGDOUBLE'] = _cast_dict['LONGDOUBLE'] + \
['CFLOAT', 'CDOUBLE', 'CLONGDOUBLE']
_cast_dict['CDOUBLE'] = _cast_dict['DOUBLE'] + ['CFLOAT', 'CDOUBLE']
class Type:
_type_cache = {}
def __new__(cls, name):
if isinstance(name, np.dtype):
dtype0 = name
name = None
for n, i in typeinfo.items():
if not isinstance(i, type) and dtype0.type is i.type:
name = n
break
obj = cls._type_cache.get(name.upper(), None)
if obj is not None:
return obj
obj = object.__new__(cls)
obj._init(name)
cls._type_cache[name.upper()] = obj
return obj
def _init(self, name):
self.NAME = name.upper()
info = typeinfo[self.NAME]
self.type_num = getattr(wrap, 'NPY_' + self.NAME)
assert_equal(self.type_num, info.num)
self.dtype = np.dtype(info.type)
self.type = info.type
self.elsize = info.bits / 8
self.dtypechar = info.char
def cast_types(self):
return [self.__class__(_m) for _m in _cast_dict[self.NAME]]
def all_types(self):
return [self.__class__(_m) for _m in _type_names]
def smaller_types(self):
bits = typeinfo[self.NAME].alignment
types = []
for name in _type_names:
if typeinfo[name].alignment < bits:
types.append(Type(name))
return types
def equal_types(self):
bits = typeinfo[self.NAME].alignment
types = []
for name in _type_names:
if name == self.NAME:
continue
if typeinfo[name].alignment == bits:
types.append(Type(name))
return types
def larger_types(self):
bits = typeinfo[self.NAME].alignment
types = []
for name in _type_names:
if typeinfo[name].alignment > bits:
types.append(Type(name))
return types
class Array:
def __init__(self, typ, dims, intent, obj):
self.type = typ
self.dims = dims
self.intent = intent
self.obj_copy = copy.deepcopy(obj)
self.obj = obj
# arr.dtypechar may be different from typ.dtypechar
self.arr = wrap.call(typ.type_num, dims, intent.flags, obj)
assert_(isinstance(self.arr, np.ndarray), repr(type(self.arr)))
self.arr_attr = wrap.array_attrs(self.arr)
if len(dims) > 1:
if self.intent.is_intent('c'):
assert_(intent.flags & wrap.F2PY_INTENT_C)
assert_(not self.arr.flags['FORTRAN'],
repr((self.arr.flags, getattr(obj, 'flags', None))))
assert_(self.arr.flags['CONTIGUOUS'])
assert_(not self.arr_attr[6] & wrap.FORTRAN)
else:
assert_(not intent.flags & wrap.F2PY_INTENT_C)
assert_(self.arr.flags['FORTRAN'])
assert_(not self.arr.flags['CONTIGUOUS'])
assert_(self.arr_attr[6] & wrap.FORTRAN)
if obj is None:
self.pyarr = None
self.pyarr_attr = None
return
if intent.is_intent('cache'):
assert_(isinstance(obj, np.ndarray), repr(type(obj)))
self.pyarr = np.array(obj).reshape(*dims).copy()
else:
self.pyarr = np.array(
np.array(obj, dtype=typ.dtypechar).reshape(*dims),
order=self.intent.is_intent('c') and 'C' or 'F')
assert_(self.pyarr.dtype == typ,
repr((self.pyarr.dtype, typ)))
self.pyarr.setflags(write=self.arr.flags['WRITEABLE'])
assert_(self.pyarr.flags['OWNDATA'], (obj, intent))
self.pyarr_attr = wrap.array_attrs(self.pyarr)
if len(dims) > 1:
if self.intent.is_intent('c'):
assert_(not self.pyarr.flags['FORTRAN'])
assert_(self.pyarr.flags['CONTIGUOUS'])
assert_(not self.pyarr_attr[6] & wrap.FORTRAN)
else:
assert_(self.pyarr.flags['FORTRAN'])
assert_(not self.pyarr.flags['CONTIGUOUS'])
assert_(self.pyarr_attr[6] & wrap.FORTRAN)
assert_(self.arr_attr[1] == self.pyarr_attr[1]) # nd
assert_(self.arr_attr[2] == self.pyarr_attr[2]) # dimensions
if self.arr_attr[1] <= 1:
assert_(self.arr_attr[3] == self.pyarr_attr[3],
repr((self.arr_attr[3], self.pyarr_attr[3],
self.arr.tobytes(), self.pyarr.tobytes()))) # strides
assert_(self.arr_attr[5][-2:] == self.pyarr_attr[5][-2:],
repr((self.arr_attr[5], self.pyarr_attr[5]))) # descr
assert_(self.arr_attr[6] == self.pyarr_attr[6],
repr((self.arr_attr[6], self.pyarr_attr[6],
flags2names(0 * self.arr_attr[6] - self.pyarr_attr[6]),
flags2names(self.arr_attr[6]), intent))) # flags
if intent.is_intent('cache'):
assert_(self.arr_attr[5][3] >= self.type.elsize,
repr((self.arr_attr[5][3], self.type.elsize)))
else:
assert_(self.arr_attr[5][3] == self.type.elsize,
repr((self.arr_attr[5][3], self.type.elsize)))
assert_(self.arr_equal(self.pyarr, self.arr))
if isinstance(self.obj, np.ndarray):
if typ.elsize == Type(obj.dtype).elsize:
if not intent.is_intent('copy') and self.arr_attr[1] <= 1:
assert_(self.has_shared_memory())
def arr_equal(self, arr1, arr2):
if arr1.shape != arr2.shape:
return False
return (arr1 == arr2).all()
def __str__(self):
return str(self.arr)
def has_shared_memory(self):
"""Check that created array shares data with input array.
"""
if self.obj is self.arr:
return True
if not isinstance(self.obj, np.ndarray):
return False
obj_attr = wrap.array_attrs(self.obj)
return obj_attr[0] == self.arr_attr[0]
class TestIntent:
def test_in_out(self):
assert_equal(str(intent.in_.out), 'intent(in,out)')
assert_(intent.in_.c.is_intent('c'))
assert_(not intent.in_.c.is_intent_exact('c'))
assert_(intent.in_.c.is_intent_exact('c', 'in'))
assert_(intent.in_.c.is_intent_exact('in', 'c'))
assert_(not intent.in_.is_intent('c'))
class TestSharedMemory:
num2seq = [1, 2]
num23seq = [[1, 2, 3], [4, 5, 6]]
@pytest.fixture(autouse=True, scope='class', params=_type_names)
def setup_type(self, request):
request.cls.type = Type(request.param)
request.cls.array = lambda self, dims, intent, obj: \
Array(Type(request.param), dims, intent, obj)
def test_in_from_2seq(self):
a = self.array([2], intent.in_, self.num2seq)
assert_(not a.has_shared_memory())
def test_in_from_2casttype(self):
for t in self.type.cast_types():
obj = np.array(self.num2seq, dtype=t.dtype)
a = self.array([len(self.num2seq)], intent.in_, obj)
if t.elsize == self.type.elsize:
assert_(
a.has_shared_memory(), repr((self.type.dtype, t.dtype)))
else:
assert_(not a.has_shared_memory(), repr(t.dtype))
@pytest.mark.parametrize('write', ['w', 'ro'])
@pytest.mark.parametrize('order', ['C', 'F'])
@pytest.mark.parametrize('inp', ['2seq', '23seq'])
def test_in_nocopy(self, write, order, inp):
"""Test if intent(in) array can be passed without copies
"""
seq = getattr(self, 'num' + inp)
obj = np.array(seq, dtype=self.type.dtype, order=order)
obj.setflags(write=(write == 'w'))
a = self.array(obj.shape, ((order=='C' and intent.in_.c) or intent.in_), obj)
assert a.has_shared_memory()
def test_inout_2seq(self):
obj = np.array(self.num2seq, dtype=self.type.dtype)
a = self.array([len(self.num2seq)], intent.inout, obj)
assert_(a.has_shared_memory())
try:
a = self.array([2], intent.in_.inout, self.num2seq)
except TypeError as msg:
if not str(msg).startswith('failed to initialize intent'
'(inout|inplace|cache) array'):
raise
else:
raise SystemError('intent(inout) should have failed on sequence')
def test_f_inout_23seq(self):
obj = np.array(self.num23seq, dtype=self.type.dtype, order='F')
shape = (len(self.num23seq), len(self.num23seq[0]))
a = self.array(shape, intent.in_.inout, obj)
assert_(a.has_shared_memory())
obj = np.array(self.num23seq, dtype=self.type.dtype, order='C')
shape = (len(self.num23seq), len(self.num23seq[0]))
try:
a = self.array(shape, intent.in_.inout, obj)
except ValueError as msg:
if not str(msg).startswith('failed to initialize intent'
'(inout) array'):
raise
else:
raise SystemError(
'intent(inout) should have failed on improper array')
def test_c_inout_23seq(self):
obj = np.array(self.num23seq, dtype=self.type.dtype)
shape = (len(self.num23seq), len(self.num23seq[0]))
a = self.array(shape, intent.in_.c.inout, obj)
assert_(a.has_shared_memory())
def test_in_copy_from_2casttype(self):
for t in self.type.cast_types():
obj = np.array(self.num2seq, dtype=t.dtype)
a = self.array([len(self.num2seq)], intent.in_.copy, obj)
assert_(not a.has_shared_memory(), repr(t.dtype))
def test_c_in_from_23seq(self):
a = self.array([len(self.num23seq), len(self.num23seq[0])],
intent.in_, self.num23seq)
assert_(not a.has_shared_memory())
def test_in_from_23casttype(self):
for t in self.type.cast_types():
obj = np.array(self.num23seq, dtype=t.dtype)
a = self.array([len(self.num23seq), len(self.num23seq[0])],
intent.in_, obj)
assert_(not a.has_shared_memory(), repr(t.dtype))
def test_f_in_from_23casttype(self):
for t in self.type.cast_types():
obj = np.array(self.num23seq, dtype=t.dtype, order='F')
a = self.array([len(self.num23seq), len(self.num23seq[0])],
intent.in_, obj)
if t.elsize == self.type.elsize:
assert_(a.has_shared_memory(), repr(t.dtype))
else:
assert_(not a.has_shared_memory(), repr(t.dtype))
def test_c_in_from_23casttype(self):
for t in self.type.cast_types():
obj = np.array(self.num23seq, dtype=t.dtype)
a = self.array([len(self.num23seq), len(self.num23seq[0])],
intent.in_.c, obj)
if t.elsize == self.type.elsize:
assert_(a.has_shared_memory(), repr(t.dtype))
else:
assert_(not a.has_shared_memory(), repr(t.dtype))
def test_f_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = np.array(self.num23seq, dtype=t.dtype, order='F')
a = self.array([len(self.num23seq), len(self.num23seq[0])],
intent.in_.copy, obj)
assert_(not a.has_shared_memory(), repr(t.dtype))
def test_c_copy_in_from_23casttype(self):
for t in self.type.cast_types():
obj = np.array(self.num23seq, dtype=t.dtype)
a = self.array([len(self.num23seq), len(self.num23seq[0])],
intent.in_.c.copy, obj)
assert_(not a.has_shared_memory(), repr(t.dtype))
def test_in_cache_from_2casttype(self):
for t in self.type.all_types():
if t.elsize != self.type.elsize:
continue
obj = np.array(self.num2seq, dtype=t.dtype)
shape = (len(self.num2seq),)
a = self.array(shape, intent.in_.c.cache, obj)
assert_(a.has_shared_memory(), repr(t.dtype))
a = self.array(shape, intent.in_.cache, obj)
assert_(a.has_shared_memory(), repr(t.dtype))
obj = np.array(self.num2seq, dtype=t.dtype, order='F')
a = self.array(shape, intent.in_.c.cache, obj)
assert_(a.has_shared_memory(), repr(t.dtype))
a = self.array(shape, intent.in_.cache, obj)
assert_(a.has_shared_memory(), repr(t.dtype))
try:
a = self.array(shape, intent.in_.cache, obj[::-1])
except ValueError as msg:
if not str(msg).startswith('failed to initialize'
' intent(cache) array'):
raise
else:
raise SystemError(
'intent(cache) should have failed on multisegmented array')
def test_in_cache_from_2casttype_failure(self):
for t in self.type.all_types():
if t.elsize >= self.type.elsize:
continue
obj = np.array(self.num2seq, dtype=t.dtype)
shape = (len(self.num2seq),)
try:
self.array(shape, intent.in_.cache, obj) # Should succeed
except ValueError as msg:
if not str(msg).startswith('failed to initialize'
' intent(cache) array'):
raise
else:
raise SystemError(
'intent(cache) should have failed on smaller array')
def test_cache_hidden(self):
shape = (2,)
a = self.array(shape, intent.cache.hide, None)
assert_(a.arr.shape == shape)
shape = (2, 3)
a = self.array(shape, intent.cache.hide, None)
assert_(a.arr.shape == shape)
shape = (-1, 3)
try:
a = self.array(shape, intent.cache.hide, None)
except ValueError as msg:
if not str(msg).startswith('failed to create intent'
'(cache|hide)|optional array'):
raise
else:
raise SystemError(
'intent(cache) should have failed on undefined dimensions')
def test_hidden(self):
shape = (2,)
a = self.array(shape, intent.hide, None)
assert_(a.arr.shape == shape)
assert_(a.arr_equal(a.arr, np.zeros(shape, dtype=self.type.dtype)))
shape = (2, 3)
a = self.array(shape, intent.hide, None)
assert_(a.arr.shape == shape)
assert_(a.arr_equal(a.arr, np.zeros(shape, dtype=self.type.dtype)))
assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS'])
shape = (2, 3)
a = self.array(shape, intent.c.hide, None)
assert_(a.arr.shape == shape)
assert_(a.arr_equal(a.arr, np.zeros(shape, dtype=self.type.dtype)))
assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS'])
shape = (-1, 3)
try:
a = self.array(shape, intent.hide, None)
except ValueError as msg:
if not str(msg).startswith('failed to create intent'
'(cache|hide)|optional array'):
raise
else:
raise SystemError('intent(hide) should have failed'
' on undefined dimensions')
def test_optional_none(self):
shape = (2,)
a = self.array(shape, intent.optional, None)
assert_(a.arr.shape == shape)
assert_(a.arr_equal(a.arr, np.zeros(shape, dtype=self.type.dtype)))
shape = (2, 3)
a = self.array(shape, intent.optional, None)
assert_(a.arr.shape == shape)
assert_(a.arr_equal(a.arr, np.zeros(shape, dtype=self.type.dtype)))
assert_(a.arr.flags['FORTRAN'] and not a.arr.flags['CONTIGUOUS'])
shape = (2, 3)
a = self.array(shape, intent.c.optional, None)
assert_(a.arr.shape == shape)
assert_(a.arr_equal(a.arr, np.zeros(shape, dtype=self.type.dtype)))
assert_(not a.arr.flags['FORTRAN'] and a.arr.flags['CONTIGUOUS'])
def test_optional_from_2seq(self):
obj = self.num2seq
shape = (len(obj),)
a = self.array(shape, intent.optional, obj)
assert_(a.arr.shape == shape)
assert_(not a.has_shared_memory())
def test_optional_from_23seq(self):
obj = self.num23seq
shape = (len(obj), len(obj[0]))
a = self.array(shape, intent.optional, obj)
assert_(a.arr.shape == shape)
assert_(not a.has_shared_memory())
a = self.array(shape, intent.optional.c, obj)
assert_(a.arr.shape == shape)
assert_(not a.has_shared_memory())
def test_inplace(self):
obj = np.array(self.num23seq, dtype=self.type.dtype)
assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS'])
shape = obj.shape
a = self.array(shape, intent.inplace, obj)
assert_(obj[1][2] == a.arr[1][2], repr((obj, a.arr)))
a.arr[1][2] = 54
assert_(obj[1][2] == a.arr[1][2] ==
np.array(54, dtype=self.type.dtype), repr((obj, a.arr)))
assert_(a.arr is obj)
assert_(obj.flags['FORTRAN']) # obj attributes are changed inplace!
assert_(not obj.flags['CONTIGUOUS'])
def test_inplace_from_casttype(self):
for t in self.type.cast_types():
if t is self.type:
continue
obj = np.array(self.num23seq, dtype=t.dtype)
assert_(obj.dtype.type == t.type)
assert_(obj.dtype.type is not self.type.type)
assert_(not obj.flags['FORTRAN'] and obj.flags['CONTIGUOUS'])
shape = obj.shape
a = self.array(shape, intent.inplace, obj)
assert_(obj[1][2] == a.arr[1][2], repr((obj, a.arr)))
a.arr[1][2] = 54
assert_(obj[1][2] == a.arr[1][2] ==
np.array(54, dtype=self.type.dtype), repr((obj, a.arr)))
assert_(a.arr is obj)
assert_(obj.flags['FORTRAN']) # obj attributes changed inplace!
assert_(not obj.flags['CONTIGUOUS'])
assert_(obj.dtype.type is self.type.type) # obj changed inplace!
| bsd-3-clause |
rvmoura96/projeto-almoxarifado | myvenv/Lib/site-packages/pip/baseparser.py | 339 | 10465 | """Base option parser setup"""
from __future__ import absolute_import
import sys
import optparse
import os
import re
import textwrap
from distutils.util import strtobool
from pip._vendor.six import string_types
from pip._vendor.six.moves import configparser
from pip.locations import (
legacy_config_file, config_basename, running_under_virtualenv,
site_config_files
)
from pip.utils import appdirs, get_terminal_size
_environ_prefix_re = re.compile(r"^PIP_", re.I)
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
"""A prettier/less verbose help formatter for optparse."""
def __init__(self, *args, **kwargs):
# help position must be aligned with __init__.parseopts.description
kwargs['max_help_position'] = 30
kwargs['indent_increment'] = 1
kwargs['width'] = get_terminal_size()[0] - 2
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
def format_option_strings(self, option):
return self._format_option_strings(option, ' <%s>', ', ')
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
"""
Return a comma-separated list of option strings and metavars.
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
:param optsep: separator
"""
opts = []
if option._short_opts:
opts.append(option._short_opts[0])
if option._long_opts:
opts.append(option._long_opts[0])
if len(opts) > 1:
opts.insert(1, optsep)
if option.takes_value():
metavar = option.metavar or option.dest.lower()
opts.append(mvarfmt % metavar.lower())
return ''.join(opts)
def format_heading(self, heading):
if heading == 'Options':
return ''
return heading + ':\n'
def format_usage(self, usage):
"""
Ensure there is only one newline between usage and the first heading
if there is no description.
"""
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
return msg
def format_description(self, description):
# leave full control over description to us
if description:
if hasattr(self.parser, 'main'):
label = 'Commands'
else:
label = 'Description'
# some doc strings have initial newlines, some don't
description = description.lstrip('\n')
# some doc strings have final newlines and spaces, some don't
description = description.rstrip()
# dedent, then reindent
description = self.indent_lines(textwrap.dedent(description), " ")
description = '%s:\n%s\n' % (label, description)
return description
else:
return ''
def format_epilog(self, epilog):
# leave full control over epilog to us
if epilog:
return epilog
else:
return ''
def indent_lines(self, text, indent):
new_lines = [indent + line for line in text.split('\n')]
return "\n".join(new_lines)
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
"""Custom help formatter for use in ConfigOptionParser.
This is updates the defaults before expanding them, allowing
them to show up correctly in the help listing.
"""
def expand_default(self, option):
if self.parser is not None:
self.parser._update_defaults(self.parser.defaults)
return optparse.IndentedHelpFormatter.expand_default(self, option)
class CustomOptionParser(optparse.OptionParser):
def insert_option_group(self, idx, *args, **kwargs):
"""Insert an OptionGroup at a given position."""
group = self.add_option_group(*args, **kwargs)
self.option_groups.pop()
self.option_groups.insert(idx, group)
return group
@property
def option_list_all(self):
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
res.extend(i.option_list)
return res
class ConfigOptionParser(CustomOptionParser):
"""Custom option parser which updates its defaults by checking the
configuration files and environmental variables"""
isolated = False
def __init__(self, *args, **kwargs):
self.config = configparser.RawConfigParser()
self.name = kwargs.pop('name')
self.isolated = kwargs.pop("isolated", False)
self.files = self.get_config_files()
if self.files:
self.config.read(self.files)
assert self.name
optparse.OptionParser.__init__(self, *args, **kwargs)
def get_config_files(self):
# the files returned by this method will be parsed in order with the
# first files listed being overridden by later files in standard
# ConfigParser fashion
config_file = os.environ.get('PIP_CONFIG_FILE', False)
if config_file == os.devnull:
return []
# at the base we have any site-wide configuration
files = list(site_config_files)
# per-user configuration next
if not self.isolated:
if config_file and os.path.exists(config_file):
files.append(config_file)
else:
# This is the legacy config file, we consider it to be a lower
# priority than the new file location.
files.append(legacy_config_file)
# This is the new config file, we consider it to be a higher
# priority than the legacy file.
files.append(
os.path.join(
appdirs.user_config_dir("pip"),
config_basename,
)
)
# finally virtualenv configuration first trumping others
if running_under_virtualenv():
venv_config_file = os.path.join(
sys.prefix,
config_basename,
)
if os.path.exists(venv_config_file):
files.append(venv_config_file)
return files
def check_default(self, option, key, val):
try:
return option.check_value(key, val)
except optparse.OptionValueError as exc:
print("An error occurred during configuration: %s" % exc)
sys.exit(3)
def _update_defaults(self, defaults):
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
# Then go and look for the other sources of configuration:
config = {}
# 1. config files
for section in ('global', self.name):
config.update(
self.normalize_keys(self.get_config_section(section))
)
# 2. environmental variables
if not self.isolated:
config.update(self.normalize_keys(self.get_environ_vars()))
# Accumulate complex default state.
self.values = optparse.Values(self.defaults)
late_eval = set()
# Then set the options with those values
for key, val in config.items():
# ignore empty values
if not val:
continue
option = self.get_option(key)
# Ignore options not present in this parser. E.g. non-globals put
# in [global] by users that want them to apply to all applicable
# commands.
if option is None:
continue
if option.action in ('store_true', 'store_false', 'count'):
val = strtobool(val)
elif option.action == 'append':
val = val.split()
val = [self.check_default(option, key, v) for v in val]
elif option.action == 'callback':
late_eval.add(option.dest)
opt_str = option.get_opt_string()
val = option.convert_value(opt_str, val)
# From take_action
args = option.callback_args or ()
kwargs = option.callback_kwargs or {}
option.callback(option, opt_str, val, self, *args, **kwargs)
else:
val = self.check_default(option, key, val)
defaults[option.dest] = val
for key in late_eval:
defaults[key] = getattr(self.values, key)
self.values = None
return defaults
def normalize_keys(self, items):
"""Return a config dictionary with normalized keys regardless of
whether the keys were specified in environment variables or in config
files"""
normalized = {}
for key, val in items:
key = key.replace('_', '-')
if not key.startswith('--'):
key = '--%s' % key # only prefer long opts
normalized[key] = val
return normalized
def get_config_section(self, name):
"""Get a section of a configuration"""
if self.config.has_section(name):
return self.config.items(name)
return []
def get_environ_vars(self):
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
if _environ_prefix_re.search(key):
yield (_environ_prefix_re.sub("", key).lower(), val)
def get_default_values(self):
"""Overriding to make updating the defaults after instantiation of
the option parser possible, _update_defaults() does the dirty work."""
if not self.process_default_values:
# Old, pre-Optik 1.5 behaviour.
return optparse.Values(self.defaults)
defaults = self._update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
default = defaults.get(option.dest)
if isinstance(default, string_types):
opt_str = option.get_opt_string()
defaults[option.dest] = option.check_value(opt_str, default)
return optparse.Values(defaults)
def error(self, msg):
self.print_usage(sys.stderr)
self.exit(2, "%s\n" % msg)
| mit |
vaginessa/python-for-android | src/buildlib/jinja2.egg/jinja2/tests.py | 285 | 3313 | # -*- coding: utf-8 -*-
"""
jinja2.tests
~~~~~~~~~~~~
Jinja test functions. Used with the "is" operator.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
from jinja2.runtime import Undefined
# nose, nothing here to test
__test__ = False
number_re = re.compile(r'^-?\d+(\.\d+)?$')
regex_type = type(number_re)
try:
test_callable = callable
except NameError:
def test_callable(x):
return hasattr(x, '__call__')
def test_odd(value):
"""Return true if the variable is odd."""
return value % 2 == 1
def test_even(value):
"""Return true if the variable is even."""
return value % 2 == 0
def test_divisibleby(value, num):
"""Check if a variable is divisible by a number."""
return value % num == 0
def test_defined(value):
"""Return true if the variable is defined:
.. sourcecode:: jinja
{% if variable is defined %}
value of variable: {{ variable }}
{% else %}
variable is not defined
{% endif %}
See the :func:`default` filter for a simple way to set undefined
variables.
"""
return not isinstance(value, Undefined)
def test_undefined(value):
"""Like :func:`defined` but the other way round."""
return isinstance(value, Undefined)
def test_none(value):
"""Return true if the variable is none."""
return value is None
def test_lower(value):
"""Return true if the variable is lowercased."""
return unicode(value).islower()
def test_upper(value):
"""Return true if the variable is uppercased."""
return unicode(value).isupper()
def test_string(value):
"""Return true if the object is a string."""
return isinstance(value, basestring)
def test_number(value):
"""Return true if the variable is a number."""
return isinstance(value, (int, long, float, complex))
def test_sequence(value):
"""Return true if the variable is a sequence. Sequences are variables
that are iterable.
"""
try:
len(value)
value.__getitem__
except:
return False
return True
def test_sameas(value, other):
"""Check if an object points to the same memory address than another
object:
.. sourcecode:: jinja
{% if foo.attribute is sameas false %}
the foo attribute really is the `False` singleton
{% endif %}
"""
return value is other
def test_iterable(value):
"""Check if it's possible to iterate over an object."""
try:
iter(value)
except TypeError:
return False
return True
def test_escaped(value):
"""Check if the value is escaped."""
return hasattr(value, '__html__')
TESTS = {
'odd': test_odd,
'even': test_even,
'divisibleby': test_divisibleby,
'defined': test_defined,
'undefined': test_undefined,
'none': test_none,
'lower': test_lower,
'upper': test_upper,
'string': test_string,
'number': test_number,
'sequence': test_sequence,
'iterable': test_iterable,
'callable': test_callable,
'sameas': test_sameas,
'escaped': test_escaped
}
| lgpl-2.1 |
ticklemepierce/osf.io | website/files/models/ext.py | 39 | 1578 | """website.files.models.ext is home to subclasses of FileNode that provide
additional functionality and have no place in website.files.models.base
"""
import os
from website.files.models.base import FileNode
class PathFollowingFileNode(FileNode):
"""A helper class that will attempt to track the its file
through changes in the parent addons settings
ie: Moving you dropbox director up or down X levels
stored_object's path will always be the full path
from the providers root directory
"""
FOLDER_ATTR_NAME = 'folder'
@classmethod
def get_or_create(cls, node, path):
"""Forces path to extend to the add-on's root directory
"""
node_settings = node.get_addon(cls.provider)
path = os.path.join(getattr(node_settings, cls.FOLDER_ATTR_NAME).strip('/'), path.lstrip('/'))
return super(PathFollowingFileNode, cls).get_or_create(node, '/' + path)
@property
def path(self):
"""Mutates the underlying stored_object's path to be relative to _get_connected_path
"""
return '/' + self.stored_object.path.replace(self._get_connected_path(), '', 1).lstrip('/')
def _get_connected_path(self):
"""Returns the path of the connected provider add-on
>>> pffn._get_connected_path() # /MyDropbox/FolderImSharingOnTheOsf
"""
node_settings = self.node.get_addon(self.provider)
assert node_settings is not None, 'Connected node has no {} account'.format(self.provider)
return getattr(node_settings, self.FOLDER_ATTR_NAME).strip('/')
| apache-2.0 |
crisely09/horton | horton/meanfield/scf_diis.py | 1 | 18828 | # -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2016 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
'''Abstract DIIS code used by the different DIIS implementations'''
import numpy as np
from horton.log import log, timer
from horton.exceptions import NoSCFConvergence
from horton.meanfield.utils import compute_commutator, check_dm
from horton.meanfield.convergence import convergence_error_commutator
__all__ = []
class DIISSCFSolver(object):
'''Base class for all DIIS SCF solvers'''
kind = 'dm' # input/output variable is the density matrix
def __init__(self, DIISHistoryClass, threshold=1e-6, maxiter=128, nvector=6, skip_energy=False, prune_old_states=False):
'''
**Arguments:**
DIISHistoryClass
A DIIS history class.
**Optional arguments:**
maxiter
The maximum number of iterations. When set to None, the SCF loop
will go one until convergence is reached.
threshold
The convergence threshold for the wavefunction
skip_energy
When set to True, the final energy is not computed. Note that some
DIIS variants need to compute the energy anyway. for these methods
this option is irrelevant.
prune_old_states
When set to True, old states are pruned from the history when their
coefficient is zero. Pruning starts at the oldest state and stops
as soon as a state is encountered with a non-zero coefficient. Even
if some newer states have a zero coefficient.
'''
self.DIISHistoryClass = DIISHistoryClass
self.threshold = threshold
self.maxiter = maxiter
self.nvector = nvector
self.skip_energy = skip_energy
self.prune_old_states = prune_old_states
@timer.with_section('SCF')
def __call__(self, ham, lf, overlap, occ_model, *dms):
'''Find a self-consistent set of density matrices.
**Arguments:**
ham
An effective Hamiltonian.
lf
The linalg factory to be used.
overlap
The overlap operator.
occ_model
Model for the orbital occupations.
dm1, dm2, ...
The initial density matrices. The number of dms must match
ham.ndm.
'''
# Some type checking
if ham.ndm != len(dms):
raise TypeError('The number of initial density matrices does not match the Hamiltonian.')
# Check input density matrices.
for i in xrange(ham.ndm):
check_dm(dms[i], overlap, lf)
occ_model.check_dms(overlap, *dms)
# keep local variables as attributes for inspection/debugging by caller
self._history = self.DIISHistoryClass(lf, self.nvector, ham.ndm, ham.deriv_scale, overlap)
self._focks = [lf.create_two_index() for i in xrange(ham.ndm)]
self._exps = [lf.create_expansion() for i in xrange(ham.ndm)]
if log.do_medium:
log('Starting restricted closed-shell %s-SCF' % self._history.name)
log.hline()
log('Iter Error CN Last nv Method Energy Change')
log.hline()
converged = False
counter = 0
while self.maxiter is None or counter < self.maxiter:
# Construct the Fock operator from scratch if the history is empty:
if self._history.nused == 0:
# feed the latest density matrices in the hamiltonian
ham.reset(*dms)
# Construct the Fock operators
ham.compute_fock(*self._focks)
# Compute the energy if needed by the history
energy = ham.compute_energy() if self._history.need_energy \
else None
# Add the current fock+dm pair to the history
error = self._history.add(energy, dms, self._focks)
# Screen logging
if log.do_high:
log(' DIIS add')
if error < self.threshold:
converged = True
break
if log.do_high:
log.blank()
if log.do_medium:
energy_str = ' '*20 if energy is None else '% 20.13f' % energy
log('%4i %12.5e %2i %20s' % (
counter, error, self._history.nused, energy_str
))
if log.do_high:
log.blank()
fock_interpolated = False
else:
energy = None
fock_interpolated = True
# Take a regular SCF step using the current fock matrix. Then
# construct a new density matrix and fock matrix.
for i in xrange(ham.ndm):
self._exps[i].from_fock(self._focks[i], overlap)
occ_model.assign(*self._exps)
for i in xrange(ham.ndm):
self._exps[i].to_dm(dms[i])
ham.reset(*dms)
energy = ham.compute_energy() if self._history.need_energy else None
ham.compute_fock(*self._focks)
# Add the current (dm, fock) pair to the history
if log.do_high:
log(' DIIS add')
error = self._history.add(energy, dms, self._focks)
# break when converged
if error < self.threshold:
converged = True
break
# Screen logging
if log.do_high:
log.blank()
if log.do_medium:
energy_str = ' '*20 if energy is None else '% 20.13f' % energy
log('%4i %12.5e %2i %20s' % (
counter, error, self._history.nused, energy_str
))
if log.do_high:
log.blank()
# get extra/intra-polated Fock matrix
while True:
# The following method writes the interpolated dms and focks
# in-place.
energy_approx, coeffs, cn, method, error = self._history.solve(dms, self._focks)
# if the error is small on the interpolated state, we have
# converged to a solution that may have fractional occupation
# numbers.
if error < self.threshold:
converged = True
break
#if coeffs[coeffs<0].sum() < -1:
# if log.do_high:
# log(' DIIS (coeffs too negative) -> drop %i and retry' % self._history.stack[0].identity)
# self._history.shrink()
if self._history.nused <= 2:
break
if coeffs[-1] == 0.0:
if log.do_high:
log(' DIIS (last coeff zero) -> drop %i and retry' % self._history.stack[0].identity)
self._history.shrink()
else:
break
if False and len(coeffs) == 2:
dms_tmp = [dm.copy() for dm in dms]
import matplotlib.pyplot as pt
xs = np.linspace(0.0, 1.0, 25)
a, b = self._history._setup_equations()
energies1 = []
energies2 = []
for x in xs:
x_coeffs = np.array([1-x, x])
energies1.append(np.dot(x_coeffs, 0.5*np.dot(a, x_coeffs) - b))
self._history._build_combinations(x_coeffs, dms_tmp, None)
ham.reset(*dms_tmp)
energies2.append(ham.compute_energy())
print x, energies1[-1], energies2[-1]
pt.clf()
pt.plot(xs, energies1, label='est')
pt.plot(xs, energies2, label='ref')
pt.axvline(coeffs[1], color='k')
pt.legend(loc=0)
pt.savefig('diis_test_%05i.png' % counter)
if energy_approx is not None:
energy_change = energy_approx - min(state.energy for state in self._history.stack)
else:
energy_change = None
# log
if log.do_high:
self._history.log(coeffs)
if log.do_medium:
change_str = ' '*10 if energy_change is None else '% 12.7f' % energy_change
log('%4i %10.3e %12.7f %2i %s %12s' % (
counter, cn, coeffs[-1], self._history.nused, method,
change_str
))
if log.do_high:
log.blank()
if self.prune_old_states:
# get rid of old states with zero coeff
for i in xrange(self._history.nused):
if coeffs[i] == 0.0:
if log.do_high:
log(' DIIS insignificant -> drop %i' % self._history.stack[0].identity)
self._history.shrink()
else:
break
# counter
counter += 1
if log.do_medium:
if converged:
log('%4i %12.5e (converged)' % (counter, error))
log.blank()
if not self.skip_energy or self._history.need_energy:
if not self._history.need_energy:
ham.compute_energy()
if log.do_medium:
ham.log()
if not converged:
raise NoSCFConvergence
return counter
def error(self, ham, lf, overlap, *dms):
return convergence_error_commutator(ham, lf, overlap, *dms)
class DIISState(object):
'''A single record (vector) in a DIIS history object.'''
def __init__(self, lf, ndm, work, overlap):
'''
**Arguments:**
lf
The LinalgFactor used to create the two-index operators.
ndm
The number of density matrices (and fock matrices) in one
state.
work
A two index operator to be used as a temporary variable. This
object is allocated by the history object.
overlap
The overlap matrix.
'''
# Not all of these need to be used.
self.ndm = ndm
self.work = work
self.overlap = overlap
self.energy = np.nan
self.normsq = np.nan
self.dms = [lf.create_two_index() for i in xrange(self.ndm)]
self.focks = [lf.create_two_index() for i in xrange(self.ndm)]
self.commutators = [lf.create_two_index() for i in xrange(self.ndm)]
self.identity = None # every state has a different id.
def clear(self):
'''Reset this record.'''
self.energy = np.nan
self.normsq = np.nan
for i in xrange(self.ndm):
self.dms[i].clear()
self.focks[i].clear()
self.commutators[i].clear()
def assign(self, identity, energy, dms, focks):
'''Assign a new state.
**Arguments:**
identity
A unique id for the new state.
energy
The energy of the new state.
dm
The density matrix of the new state.
fock
The Fock matrix of the new state.
'''
self.identity = identity
self.energy = energy
self.normsq = 0.0
for i in xrange(self.ndm):
self.dms[i].assign(dms[i])
self.focks[i].assign(focks[i])
compute_commutator(dms[i], focks[i], self.overlap, self.work, self.commutators[i])
self.normsq += self.commutators[i].contract_two('ab,ab', self.commutators[i])
class DIISHistory(object):
'''A base class of DIIS histories'''
name = None
need_energy = None
def __init__(self, lf, nvector, ndm, deriv_scale, overlap, dots_matrices):
'''
**Arguments:**
lf
The LinalgFactor used to create the two-index operators.
nvector
The maximum size of the history.
ndm
The number of density matrices (and fock matrices) in one
state.
deriv_scale
The deriv_scale attribute of the Effective Hamiltonian
overlap
The overlap matrix.
dots_matrices
Matrices in which dot products will be stored
**Useful attributes:**
used
The actual number of vectors in the history.
'''
self.work = lf.create_two_index()
self.stack = [DIISState(lf, ndm, self.work, overlap) for i in xrange(nvector)]
self.ndm = ndm
self.deriv_scale = deriv_scale
self.overlap = overlap
self.dots_matrices = dots_matrices
self.nused = 0
self.idcounter = 0
self.commutator = lf.create_two_index()
def _get_nvector(self):
'''The maximum size of the history'''
return len(self.stack)
nvector = property(_get_nvector)
def log(self, coeffs):
eref = min(state.energy for state in self.stack[:self.nused])
if eref is None:
log(' DIIS history normsq coeff id')
for i in xrange(self.nused):
state = self.stack[i]
log(' DIIS history %12.5e %12.7f %8i' % (state.normsq, coeffs[i], state.identity))
else:
log(' DIIS history normsq energy coeff id')
for i in xrange(self.nused):
state = self.stack[i]
log(' DIIS history %12.5e %12.5e %12.7f %8i' % (state.normsq, state.energy-eref, coeffs[i], state.identity))
log.blank()
def solve(self, dms_output, focks_output):
'''Inter- or extrapolate new density and/or fock matrices.
**Arguments:**
dms_output
The output for the density matrices. If set to None, this is
argument is ignored.
focks_output
The output for the Fock matrices. If set to None, this is
argument is ignored.
'''
raise NotImplementedError
def shrink(self):
'''Remove the oldest item from the history'''
self.nused -= 1
state = self.stack.pop(0)
state.clear()
self.stack.append(state)
for dots in self.dots_matrices:
dots[:-1] = dots[1:]
dots[:,:-1] = dots[:,1:]
dots[-1] = np.nan
dots[:,-1] = np.nan
def add(self, energy, dms, focks):
'''Add new state to the history.
**Arguments:**
energy
The energy of the new state.
dms
A list of density matrices of the new state.
focks
A list of Fock matrix of the new state.
**Returns**: the square root of commutator error for the given pairs
of density and Fock matrices.
'''
if len(dms) != self.ndm or len(focks) != self.ndm:
raise TypeError('The number of density and Fock matrices must match the ndm parameter.')
# There must be a free spot. If needed, make one.
if self.nused == self.nvector:
self.shrink()
# assign dm and fock
state = self.stack[self.nused]
state.assign(self.idcounter, energy, dms, focks)
self.idcounter += 1
# prepare for next iteration
self.nused += 1
return np.sqrt(state.normsq)
def _build_combinations(self, coeffs, dms_output, focks_output):
'''Construct a linear combination of density/fock matrices
**Arguments:**
coeffs
The linear mixing coefficients for the previous SCF states.
dms_output
A list of output density matrices. (Ignored if None)
focks_output
A list of output density matrices. (Ignored if None)
**Returns:** the commutator error, only when both dms_output and
focks_output are given.
'''
if dms_output is not None:
if len(dms_output) != self.ndm:
raise TypeError('The number of density matrices must match the ndm parameter.')
for i in xrange(self.ndm):
dms_stack = [self.stack[j].dms[i] for j in xrange(self.nused)]
self._linear_combination(coeffs, dms_stack, dms_output[i])
if focks_output is not None:
if len(focks_output) != self.ndm:
raise TypeError('The number of Fock matrices must match the ndm parameter.')
for i in xrange(self.ndm):
focks_stack = [self.stack[j].focks[i] for j in xrange(self.nused)]
self._linear_combination(coeffs, focks_stack, focks_output[i])
if not (dms_output is None or focks_output is None):
errorsq = 0.0
for i in xrange(self.ndm):
compute_commutator(dms_output[i], focks_output[i], self.overlap, self.work, self.commutator)
errorsq += self.commutator.contract_two('ab,ab', self.commutator)
return errorsq**0.5
def _linear_combination(self, coeffs, ops, output):
'''Make a linear combination of two-index objects
**Arguments:**
coeffs
The linear mixing coefficients for the previous SCF states.
ops
A list of input operators.
output
The output operator.
'''
output.clear()
for i in xrange(self.nused):
output.iadd(ops[i], factor=coeffs[i])
| gpl-3.0 |
AndersHoglund/betaflight | src/utils/dfuse-pack.py | 8 | 8243 | #!/usr/bin/python
# Written by Antonio Galea - 2010/11/18
# Distributed under Gnu LGPL 3.0
# see http://www.gnu.org/licenses/lgpl-3.0.txt
#
# based on a modified version of this script from https://sourceforge.net/p/dfu-util/tickets/35/#357c
# with the patch supplied in https://sourceforge.net/p/dfu-util/tickets/35/#a2b6
import sys,struct,zlib,os
import binascii
from optparse import OptionParser
from intelhex import IntelHex
DEFAULT_DEVICE="0x0483:0xdf11"
DEFAULT_NAME=b'ST...'
def named(tuple,names):
return dict(list(zip(names.split(),tuple)))
def consume(fmt,data,names):
n = struct.calcsize(fmt)
return named(struct.unpack(fmt,data[:n]),names),data[n:]
def cstring(bytestring):
return bytestring.partition(b'\0')[0]
def compute_crc(data):
return 0xFFFFFFFF & -zlib.crc32(data) -1
def parse(file,dump_images=False):
print('File: "%s"' % file)
data = open(file,'rb').read()
crc = compute_crc(data[:-4])
prefix, data = consume('<5sBIB',data,'signature version size targets')
print('%(signature)s v%(version)d, image size: %(size)d, targets: %(targets)d' % prefix)
for t in range(prefix['targets']):
tprefix, data = consume('<6sBI255s2I',data,'signature altsetting named name size elements')
tprefix['num'] = t
if tprefix['named']:
tprefix['name'] = cstring(tprefix['name'])
else:
tprefix['name'] = ''
print('%(signature)s %(num)d, alt setting: %(altsetting)s, name: "%(name)s", size: %(size)d, elements: %(elements)d' % tprefix)
tsize = tprefix['size']
target, data = data[:tsize], data[tsize:]
for e in range(tprefix['elements']):
eprefix, target = consume('<2I',target,'address size')
eprefix['num'] = e
print(' %(num)d, address: 0x%(address)08x, size: %(size)d' % eprefix)
esize = eprefix['size']
image, target = target[:esize], target[esize:]
if dump_images:
out = '%s.target%d.image%d.bin' % (file,t,e)
open(out,'wb').write(image)
print(' DUMPED IMAGE TO "%s"' % out)
if len(target):
print("target %d: PARSE ERROR" % t)
suffix = named(struct.unpack('<4H3sBI',data[:16]),'device product vendor dfu ufd len crc')
print('usb: %(vendor)04x:%(product)04x, device: 0x%(device)04x, dfu: 0x%(dfu)04x, %(ufd)s, %(len)d, 0x%(crc)08x' % suffix)
if crc != suffix['crc']:
print("CRC ERROR: computed crc32 is 0x%08x" % crc)
data = data[16:]
if data:
print("PARSE ERROR")
def checkbin(binfile):
data = open(binfile,'rb').read()
if (len(data) < 16):
return
crc = compute_crc(data[:-4])
suffix = named(struct.unpack('<4H3sBI',data[-16:]),'device product vendor dfu ufd len crc')
if crc == suffix['crc'] and suffix['ufd'] == b'UFD':
print('usb: %(vendor)04x:%(product)04x, device: 0x%(device)04x, dfu: 0x%(dfu)04x, %(ufd)s, %(len)d, 0x%(crc)08x' % suffix)
print("It looks like the file %s has a DFU suffix!" % binfile)
print("Please remove any DFU suffix and retry.")
sys.exit(1)
def build(file,targets,name=DEFAULT_NAME,device=DEFAULT_DEVICE):
data = b''
for t,target in enumerate(targets):
tdata = b''
for image in target:
tdata += struct.pack('<2I',image['address'],len(image['data']))+image['data']
tdata = struct.pack('<6sBI255s2I',b'Target',0,1,name,len(tdata),len(target)) + tdata
data += tdata
data = struct.pack('<5sBIB',b'DfuSe',1,len(data)+11,len(targets)) + data
v,d=[int(x,0) & 0xFFFF for x in device.split(':',1)]
data += struct.pack('<4H3sB',0x2200,d,v,0x011a,b'UFD',16)
crc = compute_crc(data)
data += struct.pack('<I',crc)
open(file,'wb').write(data)
if __name__=="__main__":
usage = """
%prog [-d|--dump] infile.dfu
%prog {-b|--build} address:file.bin [-b address:file.bin ...] [{-D|--device}=vendor:device] outfile.dfu
%prog {-s|--build-s19} file.s19 [{-D|--device}=vendor:device] outfile.dfu
%prog {-i|--ihex} file.hex [-i file.hex ...] [{-D|--device}=vendor:device] outfile.dfu"""
parser = OptionParser(usage=usage)
parser.add_option("-b", "--build", action="append", dest="binfiles",
help="build a DFU file from given BINFILES. Note that the BINFILES must not have any DFU suffix!", metavar="BINFILES")
parser.add_option("-i", "--ihex", action="append", dest="hexfiles",
help="build a DFU file from given HEXFILES", metavar="HEXFILES")
parser.add_option("-s", "--build-s19", type="string", dest="s19files",
help="build a DFU file from given S19 S-record file.", metavar="S19FILE")
parser.add_option("-D", "--device", action="store", dest="device",
help="build for DEVICE, defaults to %s" % DEFAULT_DEVICE, metavar="DEVICE")
parser.add_option("-d", "--dump", action="store_true", dest="dump_images",
default=False, help="dump contained images to current directory")
(options, args) = parser.parse_args()
if (options.binfiles or options.hexfiles) and len(args)==1:
target = []
if options.binfiles:
for arg in options.binfiles:
try:
address,binfile = arg.split(':',1)
except ValueError:
print("Address:file couple '%s' invalid." % arg)
sys.exit(1)
try:
address = int(address,0) & 0xFFFFFFFF
except ValueError:
print("Address %s invalid." % address)
sys.exit(1)
if not os.path.isfile(binfile):
print("Unreadable file '%s'." % binfile)
sys.exit(1)
checkbin(binfile)
target.append({ 'address': address, 'data': open(binfile,'rb').read() })
if options.hexfiles:
for hex in options.hexfiles:
ih = IntelHex(hex)
for (address,end) in ih.segments():
try:
address = address & 0xFFFFFFFF
except ValueError:
print("Address %s invalid." % address)
sys.exit(1)
target.append({ 'address': address, 'data': ih.tobinstr(start=address, end=end-1)})
outfile = args[0]
device = DEFAULT_DEVICE
if options.device:
device=options.device
try:
v,d=[int(x,0) & 0xFFFF for x in device.split(':',1)]
except:
print("Invalid device '%s'." % device)
sys.exit(1)
build(outfile,[target],DEFAULT_NAME,device)
elif options.s19files and len(args)==1:
address = 0
data = ""
target = []
name = DEFAULT_NAME
with open(options.s19files) as f:
lines = f.readlines()
for line in lines:
curaddress = 0
curdata = ""
line = line.rstrip()
if line.startswith ( "S0" ):
name = binascii.a2b_hex(line[8:len(line) - 2]).replace(".s19", "")
elif line.startswith ( "S3" ):
try:
curaddress = int(line[4:12], 16) & 0xFFFFFFFF
except ValueError:
print("Address %s invalid." % address)
sys.exit(1)
curdata = binascii.unhexlify(line[12:-2])
elif line.startswith ( "S2" ):
try:
curaddress = int(line[4:10], 16) & 0xFFFFFFFF
except ValueError:
print("Address %s invalid." % address)
sys.exit(1)
curdata = binascii.unhexlify(line[10:-2])
elif line.startswith ( "S1" ):
try:
curaddress = int(line[4:8], 16) & 0xFFFFFFFF
except ValueError:
print("Address %s invalid." % address)
sys.exit(1)
curdata = binascii.unhexlify(line[8:-2])
if address == 0:
address = curaddress
data = curdata
elif address + len(data) != curaddress:
target.append({ 'address': address, 'data': data })
address = curaddress
data = curdata
else:
data += curdata
outfile = args[0]
device = DEFAULT_DEVICE
if options.device:
device=options.device
try:
v,d=[int(x,0) & 0xFFFF for x in device.split(':',1)]
except:
print("Invalid device '%s'." % device)
sys.exit(1)
build(outfile,[target],name,device)
elif len(args)==1:
infile = args[0]
if not os.path.isfile(infile):
print("Unreadable file '%s'." % infile)
sys.exit(1)
parse(infile, dump_images=options.dump_images)
else:
parser.print_help()
sys.exit(1)
| gpl-3.0 |
alanjw/GreenOpenERP-Win-X86 | python/Lib/site-packages/pychart/afm/NewCenturySchlbk_Bold.py | 12 | 1501 | # AFM font NewCenturySchlbk-Bold (path: /usr/share/fonts/afms/adobe/pncb8a.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
import dir
dir.afm["NewCenturySchlbk-Bold"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 287, 296, 333, 574, 574, 833, 852, 241, 389, 389, 500, 606, 278, 333, 278, 278, 574, 574, 574, 574, 574, 574, 574, 574, 574, 574, 278, 278, 606, 606, 606, 500, 747, 759, 778, 778, 833, 759, 722, 833, 870, 444, 648, 815, 722, 981, 833, 833, 759, 833, 815, 667, 722, 833, 759, 981, 722, 722, 667, 389, 606, 389, 606, 500, 241, 611, 648, 556, 667, 574, 389, 611, 685, 370, 352, 667, 352, 963, 685, 611, 667, 648, 519, 500, 426, 685, 611, 889, 611, 611, 537, 389, 606, 389, 606, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 296, 574, 574, 167, 574, 574, 500, 574, 241, 481, 500, 333, 333, 685, 685, 500, 500, 500, 500, 278, 500, 747, 606, 241, 481, 481, 500, 1000, 1000, 500, 500, 500, 333, 333, 333, 333, 333, 333, 333, 333, 500, 333, 333, 500, 333, 333, 333, 1000, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 981, 500, 367, 500, 500, 500, 500, 722, 833, 1000, 367, 500, 500, 500, 500, 500, 870, 500, 500, 500, 370, 500, 500, 352, 611, 907, 611, )
| agpl-3.0 |
louisLouL/pair_trading | capstone_env/lib/python3.6/site-packages/pandas/tests/test_lib.py | 6 | 9172 | # -*- coding: utf-8 -*-
import pytest
import numpy as np
import pandas as pd
import pandas._libs.lib as lib
import pandas.util.testing as tm
class TestMisc(object):
def test_max_len_string_array(self):
arr = a = np.array(['foo', 'b', np.nan], dtype='object')
assert lib.max_len_string_array(arr), 3
# unicode
arr = a.astype('U').astype(object)
assert lib.max_len_string_array(arr), 3
# bytes for python3
arr = a.astype('S').astype(object)
assert lib.max_len_string_array(arr), 3
# raises
pytest.raises(TypeError,
lambda: lib.max_len_string_array(arr.astype('U')))
def test_fast_unique_multiple_list_gen_sort(self):
keys = [['p', 'a'], ['n', 'd'], ['a', 's']]
gen = (key for key in keys)
expected = np.array(['a', 'd', 'n', 'p', 's'])
out = lib.fast_unique_multiple_list_gen(gen, sort=True)
tm.assert_numpy_array_equal(np.array(out), expected)
gen = (key for key in keys)
expected = np.array(['p', 'a', 'n', 'd', 's'])
out = lib.fast_unique_multiple_list_gen(gen, sort=False)
tm.assert_numpy_array_equal(np.array(out), expected)
class TestIndexing(object):
def test_maybe_indices_to_slice_left_edge(self):
target = np.arange(100)
# slice
indices = np.array([], dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices], target[maybe_slice])
for end in [1, 2, 5, 20, 99]:
for step in [1, 2, 4]:
indices = np.arange(0, end, step, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices],
target[maybe_slice])
# reverse
indices = indices[::-1]
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices],
target[maybe_slice])
# not slice
for case in [[2, 1, 2, 0], [2, 2, 1, 0], [0, 1, 2, 1], [-2, 0, 2],
[2, 0, -2]]:
indices = np.array(case, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert not isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(maybe_slice, indices)
tm.assert_numpy_array_equal(target[indices], target[maybe_slice])
def test_maybe_indices_to_slice_right_edge(self):
target = np.arange(100)
# slice
for start in [0, 2, 5, 20, 97, 98]:
for step in [1, 2, 4]:
indices = np.arange(start, 99, step, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices],
target[maybe_slice])
# reverse
indices = indices[::-1]
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices],
target[maybe_slice])
# not slice
indices = np.array([97, 98, 99, 100], dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert not isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(maybe_slice, indices)
with pytest.raises(IndexError):
target[indices]
with pytest.raises(IndexError):
target[maybe_slice]
indices = np.array([100, 99, 98, 97], dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert not isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(maybe_slice, indices)
with pytest.raises(IndexError):
target[indices]
with pytest.raises(IndexError):
target[maybe_slice]
for case in [[99, 97, 99, 96], [99, 99, 98, 97], [98, 98, 97, 96]]:
indices = np.array(case, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert not isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(maybe_slice, indices)
tm.assert_numpy_array_equal(target[indices], target[maybe_slice])
def test_maybe_indices_to_slice_both_edges(self):
target = np.arange(10)
# slice
for step in [1, 2, 4, 5, 8, 9]:
indices = np.arange(0, 9, step, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices], target[maybe_slice])
# reverse
indices = indices[::-1]
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices], target[maybe_slice])
# not slice
for case in [[4, 2, 0, -2], [2, 2, 1, 0], [0, 1, 2, 1]]:
indices = np.array(case, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert not isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(maybe_slice, indices)
tm.assert_numpy_array_equal(target[indices], target[maybe_slice])
def test_maybe_indices_to_slice_middle(self):
target = np.arange(100)
# slice
for start, end in [(2, 10), (5, 25), (65, 97)]:
for step in [1, 2, 4, 20]:
indices = np.arange(start, end, step, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices],
target[maybe_slice])
# reverse
indices = indices[::-1]
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(target[indices],
target[maybe_slice])
# not slice
for case in [[14, 12, 10, 12], [12, 12, 11, 10], [10, 11, 12, 11]]:
indices = np.array(case, dtype=np.int64)
maybe_slice = lib.maybe_indices_to_slice(indices, len(target))
assert not isinstance(maybe_slice, slice)
tm.assert_numpy_array_equal(maybe_slice, indices)
tm.assert_numpy_array_equal(target[indices], target[maybe_slice])
def test_maybe_booleans_to_slice(self):
arr = np.array([0, 0, 1, 1, 1, 0, 1], dtype=np.uint8)
result = lib.maybe_booleans_to_slice(arr)
assert result.dtype == np.bool_
result = lib.maybe_booleans_to_slice(arr[:0])
assert result == slice(0, 0)
def test_get_reverse_indexer(self):
indexer = np.array([-1, -1, 1, 2, 0, -1, 3, 4], dtype=np.int64)
result = lib.get_reverse_indexer(indexer, 5)
expected = np.array([4, 2, 3, 6, 7], dtype=np.int64)
assert np.array_equal(result, expected)
class TestNullObj(object):
_1d_methods = ['isnullobj', 'isnullobj_old']
_2d_methods = ['isnullobj2d', 'isnullobj2d_old']
def _check_behavior(self, arr, expected):
for method in TestNullObj._1d_methods:
result = getattr(lib, method)(arr)
tm.assert_numpy_array_equal(result, expected)
arr = np.atleast_2d(arr)
expected = np.atleast_2d(expected)
for method in TestNullObj._2d_methods:
result = getattr(lib, method)(arr)
tm.assert_numpy_array_equal(result, expected)
def test_basic(self):
arr = np.array([1, None, 'foo', -5.1, pd.NaT, np.nan])
expected = np.array([False, True, False, False, True, True])
self._check_behavior(arr, expected)
def test_non_obj_dtype(self):
arr = np.array([1, 3, np.nan, 5], dtype=float)
expected = np.array([False, False, True, False])
self._check_behavior(arr, expected)
def test_empty_arr(self):
arr = np.array([])
expected = np.array([], dtype=bool)
self._check_behavior(arr, expected)
def test_empty_str_inp(self):
arr = np.array([""]) # empty but not null
expected = np.array([False])
self._check_behavior(arr, expected)
def test_empty_like(self):
# see gh-13717: no segfaults!
arr = np.empty_like([None])
expected = np.array([True])
self._check_behavior(arr, expected)
| mit |
1stvamp/pip | tests/lib/test_lib.py | 7 | 1699 | """Test the test support."""
from __future__ import absolute_import
import filecmp
import re
from os.path import join, isdir
from tests.lib import SRC_DIR
def test_tmp_dir_exists_in_env(script):
"""
Test that $TMPDIR == env.temp_path and path exists and env.assert_no_temp()
passes (in fast env)
"""
# need these tests to ensure the assert_no_temp feature of scripttest is
# working
script.assert_no_temp() # this fails if env.tmp_path doesn't exist
assert script.environ['TMPDIR'] == script.temp_path
assert isdir(script.temp_path)
def test_correct_pip_version(script):
"""
Check we are running proper version of pip in run_pip.
"""
# output is like:
# pip PIPVERSION from PIPDIRECTORY (python PYVERSION)
result = script.pip('--version')
# compare the directory tree of the invoked pip with that of this source
# distribution
dir = re.match(
r'pip \d(\.[\d])+(\.?(rc|dev|pre|post)\d+)? from (.*) '
r'\(python \d(.[\d])+\)$',
result.stdout
).group(4)
pip_folder = join(SRC_DIR, 'pip')
pip_folder_outputed = join(dir, 'pip')
diffs = filecmp.dircmp(pip_folder, pip_folder_outputed)
# If any non-matching .py files exist, we have a problem: run_pip
# is picking up some other version! N.B. if this project acquires
# primary resources other than .py files, this code will need
# maintenance
mismatch_py = [
x for x in diffs.left_only + diffs.right_only + diffs.diff_files
if x.endswith('.py')
]
assert not mismatch_py, (
'mismatched source files in %r and %r: %r' %
(pip_folder, pip_folder_outputed, mismatch_py)
)
| mit |
goliate/sarakha63-persomov | libs/html5lib/ihatexml.py | 1727 | 16581 | from __future__ import absolute_import, division, unicode_literals
import re
import warnings
from .constants import DataLossWarning
baseChar = """
[#x0041-#x005A] | [#x0061-#x007A] | [#x00C0-#x00D6] | [#x00D8-#x00F6] |
[#x00F8-#x00FF] | [#x0100-#x0131] | [#x0134-#x013E] | [#x0141-#x0148] |
[#x014A-#x017E] | [#x0180-#x01C3] | [#x01CD-#x01F0] | [#x01F4-#x01F5] |
[#x01FA-#x0217] | [#x0250-#x02A8] | [#x02BB-#x02C1] | #x0386 |
[#x0388-#x038A] | #x038C | [#x038E-#x03A1] | [#x03A3-#x03CE] |
[#x03D0-#x03D6] | #x03DA | #x03DC | #x03DE | #x03E0 | [#x03E2-#x03F3] |
[#x0401-#x040C] | [#x040E-#x044F] | [#x0451-#x045C] | [#x045E-#x0481] |
[#x0490-#x04C4] | [#x04C7-#x04C8] | [#x04CB-#x04CC] | [#x04D0-#x04EB] |
[#x04EE-#x04F5] | [#x04F8-#x04F9] | [#x0531-#x0556] | #x0559 |
[#x0561-#x0586] | [#x05D0-#x05EA] | [#x05F0-#x05F2] | [#x0621-#x063A] |
[#x0641-#x064A] | [#x0671-#x06B7] | [#x06BA-#x06BE] | [#x06C0-#x06CE] |
[#x06D0-#x06D3] | #x06D5 | [#x06E5-#x06E6] | [#x0905-#x0939] | #x093D |
[#x0958-#x0961] | [#x0985-#x098C] | [#x098F-#x0990] | [#x0993-#x09A8] |
[#x09AA-#x09B0] | #x09B2 | [#x09B6-#x09B9] | [#x09DC-#x09DD] |
[#x09DF-#x09E1] | [#x09F0-#x09F1] | [#x0A05-#x0A0A] | [#x0A0F-#x0A10] |
[#x0A13-#x0A28] | [#x0A2A-#x0A30] | [#x0A32-#x0A33] | [#x0A35-#x0A36] |
[#x0A38-#x0A39] | [#x0A59-#x0A5C] | #x0A5E | [#x0A72-#x0A74] |
[#x0A85-#x0A8B] | #x0A8D | [#x0A8F-#x0A91] | [#x0A93-#x0AA8] |
[#x0AAA-#x0AB0] | [#x0AB2-#x0AB3] | [#x0AB5-#x0AB9] | #x0ABD | #x0AE0 |
[#x0B05-#x0B0C] | [#x0B0F-#x0B10] | [#x0B13-#x0B28] | [#x0B2A-#x0B30] |
[#x0B32-#x0B33] | [#x0B36-#x0B39] | #x0B3D | [#x0B5C-#x0B5D] |
[#x0B5F-#x0B61] | [#x0B85-#x0B8A] | [#x0B8E-#x0B90] | [#x0B92-#x0B95] |
[#x0B99-#x0B9A] | #x0B9C | [#x0B9E-#x0B9F] | [#x0BA3-#x0BA4] |
[#x0BA8-#x0BAA] | [#x0BAE-#x0BB5] | [#x0BB7-#x0BB9] | [#x0C05-#x0C0C] |
[#x0C0E-#x0C10] | [#x0C12-#x0C28] | [#x0C2A-#x0C33] | [#x0C35-#x0C39] |
[#x0C60-#x0C61] | [#x0C85-#x0C8C] | [#x0C8E-#x0C90] | [#x0C92-#x0CA8] |
[#x0CAA-#x0CB3] | [#x0CB5-#x0CB9] | #x0CDE | [#x0CE0-#x0CE1] |
[#x0D05-#x0D0C] | [#x0D0E-#x0D10] | [#x0D12-#x0D28] | [#x0D2A-#x0D39] |
[#x0D60-#x0D61] | [#x0E01-#x0E2E] | #x0E30 | [#x0E32-#x0E33] |
[#x0E40-#x0E45] | [#x0E81-#x0E82] | #x0E84 | [#x0E87-#x0E88] | #x0E8A |
#x0E8D | [#x0E94-#x0E97] | [#x0E99-#x0E9F] | [#x0EA1-#x0EA3] | #x0EA5 |
#x0EA7 | [#x0EAA-#x0EAB] | [#x0EAD-#x0EAE] | #x0EB0 | [#x0EB2-#x0EB3] |
#x0EBD | [#x0EC0-#x0EC4] | [#x0F40-#x0F47] | [#x0F49-#x0F69] |
[#x10A0-#x10C5] | [#x10D0-#x10F6] | #x1100 | [#x1102-#x1103] |
[#x1105-#x1107] | #x1109 | [#x110B-#x110C] | [#x110E-#x1112] | #x113C |
#x113E | #x1140 | #x114C | #x114E | #x1150 | [#x1154-#x1155] | #x1159 |
[#x115F-#x1161] | #x1163 | #x1165 | #x1167 | #x1169 | [#x116D-#x116E] |
[#x1172-#x1173] | #x1175 | #x119E | #x11A8 | #x11AB | [#x11AE-#x11AF] |
[#x11B7-#x11B8] | #x11BA | [#x11BC-#x11C2] | #x11EB | #x11F0 | #x11F9 |
[#x1E00-#x1E9B] | [#x1EA0-#x1EF9] | [#x1F00-#x1F15] | [#x1F18-#x1F1D] |
[#x1F20-#x1F45] | [#x1F48-#x1F4D] | [#x1F50-#x1F57] | #x1F59 | #x1F5B |
#x1F5D | [#x1F5F-#x1F7D] | [#x1F80-#x1FB4] | [#x1FB6-#x1FBC] | #x1FBE |
[#x1FC2-#x1FC4] | [#x1FC6-#x1FCC] | [#x1FD0-#x1FD3] | [#x1FD6-#x1FDB] |
[#x1FE0-#x1FEC] | [#x1FF2-#x1FF4] | [#x1FF6-#x1FFC] | #x2126 |
[#x212A-#x212B] | #x212E | [#x2180-#x2182] | [#x3041-#x3094] |
[#x30A1-#x30FA] | [#x3105-#x312C] | [#xAC00-#xD7A3]"""
ideographic = """[#x4E00-#x9FA5] | #x3007 | [#x3021-#x3029]"""
combiningCharacter = """
[#x0300-#x0345] | [#x0360-#x0361] | [#x0483-#x0486] | [#x0591-#x05A1] |
[#x05A3-#x05B9] | [#x05BB-#x05BD] | #x05BF | [#x05C1-#x05C2] | #x05C4 |
[#x064B-#x0652] | #x0670 | [#x06D6-#x06DC] | [#x06DD-#x06DF] |
[#x06E0-#x06E4] | [#x06E7-#x06E8] | [#x06EA-#x06ED] | [#x0901-#x0903] |
#x093C | [#x093E-#x094C] | #x094D | [#x0951-#x0954] | [#x0962-#x0963] |
[#x0981-#x0983] | #x09BC | #x09BE | #x09BF | [#x09C0-#x09C4] |
[#x09C7-#x09C8] | [#x09CB-#x09CD] | #x09D7 | [#x09E2-#x09E3] | #x0A02 |
#x0A3C | #x0A3E | #x0A3F | [#x0A40-#x0A42] | [#x0A47-#x0A48] |
[#x0A4B-#x0A4D] | [#x0A70-#x0A71] | [#x0A81-#x0A83] | #x0ABC |
[#x0ABE-#x0AC5] | [#x0AC7-#x0AC9] | [#x0ACB-#x0ACD] | [#x0B01-#x0B03] |
#x0B3C | [#x0B3E-#x0B43] | [#x0B47-#x0B48] | [#x0B4B-#x0B4D] |
[#x0B56-#x0B57] | [#x0B82-#x0B83] | [#x0BBE-#x0BC2] | [#x0BC6-#x0BC8] |
[#x0BCA-#x0BCD] | #x0BD7 | [#x0C01-#x0C03] | [#x0C3E-#x0C44] |
[#x0C46-#x0C48] | [#x0C4A-#x0C4D] | [#x0C55-#x0C56] | [#x0C82-#x0C83] |
[#x0CBE-#x0CC4] | [#x0CC6-#x0CC8] | [#x0CCA-#x0CCD] | [#x0CD5-#x0CD6] |
[#x0D02-#x0D03] | [#x0D3E-#x0D43] | [#x0D46-#x0D48] | [#x0D4A-#x0D4D] |
#x0D57 | #x0E31 | [#x0E34-#x0E3A] | [#x0E47-#x0E4E] | #x0EB1 |
[#x0EB4-#x0EB9] | [#x0EBB-#x0EBC] | [#x0EC8-#x0ECD] | [#x0F18-#x0F19] |
#x0F35 | #x0F37 | #x0F39 | #x0F3E | #x0F3F | [#x0F71-#x0F84] |
[#x0F86-#x0F8B] | [#x0F90-#x0F95] | #x0F97 | [#x0F99-#x0FAD] |
[#x0FB1-#x0FB7] | #x0FB9 | [#x20D0-#x20DC] | #x20E1 | [#x302A-#x302F] |
#x3099 | #x309A"""
digit = """
[#x0030-#x0039] | [#x0660-#x0669] | [#x06F0-#x06F9] | [#x0966-#x096F] |
[#x09E6-#x09EF] | [#x0A66-#x0A6F] | [#x0AE6-#x0AEF] | [#x0B66-#x0B6F] |
[#x0BE7-#x0BEF] | [#x0C66-#x0C6F] | [#x0CE6-#x0CEF] | [#x0D66-#x0D6F] |
[#x0E50-#x0E59] | [#x0ED0-#x0ED9] | [#x0F20-#x0F29]"""
extender = """
#x00B7 | #x02D0 | #x02D1 | #x0387 | #x0640 | #x0E46 | #x0EC6 | #x3005 |
#[#x3031-#x3035] | [#x309D-#x309E] | [#x30FC-#x30FE]"""
letter = " | ".join([baseChar, ideographic])
# Without the
name = " | ".join([letter, digit, ".", "-", "_", combiningCharacter,
extender])
nameFirst = " | ".join([letter, "_"])
reChar = re.compile(r"#x([\d|A-F]{4,4})")
reCharRange = re.compile(r"\[#x([\d|A-F]{4,4})-#x([\d|A-F]{4,4})\]")
def charStringToList(chars):
charRanges = [item.strip() for item in chars.split(" | ")]
rv = []
for item in charRanges:
foundMatch = False
for regexp in (reChar, reCharRange):
match = regexp.match(item)
if match is not None:
rv.append([hexToInt(item) for item in match.groups()])
if len(rv[-1]) == 1:
rv[-1] = rv[-1] * 2
foundMatch = True
break
if not foundMatch:
assert len(item) == 1
rv.append([ord(item)] * 2)
rv = normaliseCharList(rv)
return rv
def normaliseCharList(charList):
charList = sorted(charList)
for item in charList:
assert item[1] >= item[0]
rv = []
i = 0
while i < len(charList):
j = 1
rv.append(charList[i])
while i + j < len(charList) and charList[i + j][0] <= rv[-1][1] + 1:
rv[-1][1] = charList[i + j][1]
j += 1
i += j
return rv
# We don't really support characters above the BMP :(
max_unicode = int("FFFF", 16)
def missingRanges(charList):
rv = []
if charList[0] != 0:
rv.append([0, charList[0][0] - 1])
for i, item in enumerate(charList[:-1]):
rv.append([item[1] + 1, charList[i + 1][0] - 1])
if charList[-1][1] != max_unicode:
rv.append([charList[-1][1] + 1, max_unicode])
return rv
def listToRegexpStr(charList):
rv = []
for item in charList:
if item[0] == item[1]:
rv.append(escapeRegexp(chr(item[0])))
else:
rv.append(escapeRegexp(chr(item[0])) + "-" +
escapeRegexp(chr(item[1])))
return "[%s]" % "".join(rv)
def hexToInt(hex_str):
return int(hex_str, 16)
def escapeRegexp(string):
specialCharacters = (".", "^", "$", "*", "+", "?", "{", "}",
"[", "]", "|", "(", ")", "-")
for char in specialCharacters:
string = string.replace(char, "\\" + char)
return string
# output from the above
nonXmlNameBMPRegexp = re.compile('[\x00-,/:-@\\[-\\^`\\{-\xb6\xb8-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u02cf\u02d2-\u02ff\u0346-\u035f\u0362-\u0385\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482\u0487-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u0590\u05a2\u05ba\u05be\u05c0\u05c3\u05c5-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u063f\u0653-\u065f\u066a-\u066f\u06b8-\u06b9\u06bf\u06cf\u06d4\u06e9\u06ee-\u06ef\u06fa-\u0900\u0904\u093a-\u093b\u094e-\u0950\u0955-\u0957\u0964-\u0965\u0970-\u0980\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09bb\u09bd\u09c5-\u09c6\u09c9-\u09ca\u09ce-\u09d6\u09d8-\u09db\u09de\u09e4-\u09e5\u09f2-\u0a01\u0a03-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a3b\u0a3d\u0a43-\u0a46\u0a49-\u0a4a\u0a4e-\u0a58\u0a5d\u0a5f-\u0a65\u0a75-\u0a80\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abb\u0ac6\u0aca\u0ace-\u0adf\u0ae1-\u0ae5\u0af0-\u0b00\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3b\u0b44-\u0b46\u0b49-\u0b4a\u0b4e-\u0b55\u0b58-\u0b5b\u0b5e\u0b62-\u0b65\u0b70-\u0b81\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0bbd\u0bc3-\u0bc5\u0bc9\u0bce-\u0bd6\u0bd8-\u0be6\u0bf0-\u0c00\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c3d\u0c45\u0c49\u0c4e-\u0c54\u0c57-\u0c5f\u0c62-\u0c65\u0c70-\u0c81\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cbd\u0cc5\u0cc9\u0cce-\u0cd4\u0cd7-\u0cdd\u0cdf\u0ce2-\u0ce5\u0cf0-\u0d01\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d3d\u0d44-\u0d45\u0d49\u0d4e-\u0d56\u0d58-\u0d5f\u0d62-\u0d65\u0d70-\u0e00\u0e2f\u0e3b-\u0e3f\u0e4f\u0e5a-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eba\u0ebe-\u0ebf\u0ec5\u0ec7\u0ece-\u0ecf\u0eda-\u0f17\u0f1a-\u0f1f\u0f2a-\u0f34\u0f36\u0f38\u0f3a-\u0f3d\u0f48\u0f6a-\u0f70\u0f85\u0f8c-\u0f8f\u0f96\u0f98\u0fae-\u0fb0\u0fb8\u0fba-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u20cf\u20dd-\u20e0\u20e2-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3004\u3006\u3008-\u3020\u3030\u3036-\u3040\u3095-\u3098\u309b-\u309c\u309f-\u30a0\u30fb\u30ff-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')
nonXmlNameFirstBMPRegexp = re.compile('[\x00-@\\[-\\^`\\{-\xbf\xd7\xf7\u0132-\u0133\u013f-\u0140\u0149\u017f\u01c4-\u01cc\u01f1-\u01f3\u01f6-\u01f9\u0218-\u024f\u02a9-\u02ba\u02c2-\u0385\u0387\u038b\u038d\u03a2\u03cf\u03d7-\u03d9\u03db\u03dd\u03df\u03e1\u03f4-\u0400\u040d\u0450\u045d\u0482-\u048f\u04c5-\u04c6\u04c9-\u04ca\u04cd-\u04cf\u04ec-\u04ed\u04f6-\u04f7\u04fa-\u0530\u0557-\u0558\u055a-\u0560\u0587-\u05cf\u05eb-\u05ef\u05f3-\u0620\u063b-\u0640\u064b-\u0670\u06b8-\u06b9\u06bf\u06cf\u06d4\u06d6-\u06e4\u06e7-\u0904\u093a-\u093c\u093e-\u0957\u0962-\u0984\u098d-\u098e\u0991-\u0992\u09a9\u09b1\u09b3-\u09b5\u09ba-\u09db\u09de\u09e2-\u09ef\u09f2-\u0a04\u0a0b-\u0a0e\u0a11-\u0a12\u0a29\u0a31\u0a34\u0a37\u0a3a-\u0a58\u0a5d\u0a5f-\u0a71\u0a75-\u0a84\u0a8c\u0a8e\u0a92\u0aa9\u0ab1\u0ab4\u0aba-\u0abc\u0abe-\u0adf\u0ae1-\u0b04\u0b0d-\u0b0e\u0b11-\u0b12\u0b29\u0b31\u0b34-\u0b35\u0b3a-\u0b3c\u0b3e-\u0b5b\u0b5e\u0b62-\u0b84\u0b8b-\u0b8d\u0b91\u0b96-\u0b98\u0b9b\u0b9d\u0ba0-\u0ba2\u0ba5-\u0ba7\u0bab-\u0bad\u0bb6\u0bba-\u0c04\u0c0d\u0c11\u0c29\u0c34\u0c3a-\u0c5f\u0c62-\u0c84\u0c8d\u0c91\u0ca9\u0cb4\u0cba-\u0cdd\u0cdf\u0ce2-\u0d04\u0d0d\u0d11\u0d29\u0d3a-\u0d5f\u0d62-\u0e00\u0e2f\u0e31\u0e34-\u0e3f\u0e46-\u0e80\u0e83\u0e85-\u0e86\u0e89\u0e8b-\u0e8c\u0e8e-\u0e93\u0e98\u0ea0\u0ea4\u0ea6\u0ea8-\u0ea9\u0eac\u0eaf\u0eb1\u0eb4-\u0ebc\u0ebe-\u0ebf\u0ec5-\u0f3f\u0f48\u0f6a-\u109f\u10c6-\u10cf\u10f7-\u10ff\u1101\u1104\u1108\u110a\u110d\u1113-\u113b\u113d\u113f\u1141-\u114b\u114d\u114f\u1151-\u1153\u1156-\u1158\u115a-\u115e\u1162\u1164\u1166\u1168\u116a-\u116c\u116f-\u1171\u1174\u1176-\u119d\u119f-\u11a7\u11a9-\u11aa\u11ac-\u11ad\u11b0-\u11b6\u11b9\u11bb\u11c3-\u11ea\u11ec-\u11ef\u11f1-\u11f8\u11fa-\u1dff\u1e9c-\u1e9f\u1efa-\u1eff\u1f16-\u1f17\u1f1e-\u1f1f\u1f46-\u1f47\u1f4e-\u1f4f\u1f58\u1f5a\u1f5c\u1f5e\u1f7e-\u1f7f\u1fb5\u1fbd\u1fbf-\u1fc1\u1fc5\u1fcd-\u1fcf\u1fd4-\u1fd5\u1fdc-\u1fdf\u1fed-\u1ff1\u1ff5\u1ffd-\u2125\u2127-\u2129\u212c-\u212d\u212f-\u217f\u2183-\u3006\u3008-\u3020\u302a-\u3040\u3095-\u30a0\u30fb-\u3104\u312d-\u4dff\u9fa6-\uabff\ud7a4-\uffff]')
# Simpler things
nonPubidCharRegexp = re.compile("[^\x20\x0D\x0Aa-zA-Z0-9\-\'()+,./:=?;!*#@$_%]")
class InfosetFilter(object):
replacementRegexp = re.compile(r"U[\dA-F]{5,5}")
def __init__(self, replaceChars=None,
dropXmlnsLocalName=False,
dropXmlnsAttrNs=False,
preventDoubleDashComments=False,
preventDashAtCommentEnd=False,
replaceFormFeedCharacters=True,
preventSingleQuotePubid=False):
self.dropXmlnsLocalName = dropXmlnsLocalName
self.dropXmlnsAttrNs = dropXmlnsAttrNs
self.preventDoubleDashComments = preventDoubleDashComments
self.preventDashAtCommentEnd = preventDashAtCommentEnd
self.replaceFormFeedCharacters = replaceFormFeedCharacters
self.preventSingleQuotePubid = preventSingleQuotePubid
self.replaceCache = {}
def coerceAttribute(self, name, namespace=None):
if self.dropXmlnsLocalName and name.startswith("xmlns:"):
warnings.warn("Attributes cannot begin with xmlns", DataLossWarning)
return None
elif (self.dropXmlnsAttrNs and
namespace == "http://www.w3.org/2000/xmlns/"):
warnings.warn("Attributes cannot be in the xml namespace", DataLossWarning)
return None
else:
return self.toXmlName(name)
def coerceElement(self, name, namespace=None):
return self.toXmlName(name)
def coerceComment(self, data):
if self.preventDoubleDashComments:
while "--" in data:
warnings.warn("Comments cannot contain adjacent dashes", DataLossWarning)
data = data.replace("--", "- -")
return data
def coerceCharacters(self, data):
if self.replaceFormFeedCharacters:
for i in range(data.count("\x0C")):
warnings.warn("Text cannot contain U+000C", DataLossWarning)
data = data.replace("\x0C", " ")
# Other non-xml characters
return data
def coercePubid(self, data):
dataOutput = data
for char in nonPubidCharRegexp.findall(data):
warnings.warn("Coercing non-XML pubid", DataLossWarning)
replacement = self.getReplacementCharacter(char)
dataOutput = dataOutput.replace(char, replacement)
if self.preventSingleQuotePubid and dataOutput.find("'") >= 0:
warnings.warn("Pubid cannot contain single quote", DataLossWarning)
dataOutput = dataOutput.replace("'", self.getReplacementCharacter("'"))
return dataOutput
def toXmlName(self, name):
nameFirst = name[0]
nameRest = name[1:]
m = nonXmlNameFirstBMPRegexp.match(nameFirst)
if m:
warnings.warn("Coercing non-XML name", DataLossWarning)
nameFirstOutput = self.getReplacementCharacter(nameFirst)
else:
nameFirstOutput = nameFirst
nameRestOutput = nameRest
replaceChars = set(nonXmlNameBMPRegexp.findall(nameRest))
for char in replaceChars:
warnings.warn("Coercing non-XML name", DataLossWarning)
replacement = self.getReplacementCharacter(char)
nameRestOutput = nameRestOutput.replace(char, replacement)
return nameFirstOutput + nameRestOutput
def getReplacementCharacter(self, char):
if char in self.replaceCache:
replacement = self.replaceCache[char]
else:
replacement = self.escapeChar(char)
return replacement
def fromXmlName(self, name):
for item in set(self.replacementRegexp.findall(name)):
name = name.replace(item, self.unescapeChar(item))
return name
def escapeChar(self, char):
replacement = "U%05X" % ord(char)
self.replaceCache[char] = replacement
return replacement
def unescapeChar(self, charcode):
return chr(int(charcode[1:], 16))
| gpl-3.0 |
phektus/Django-Google-AppEngine-OpenId-Auth | django/contrib/comments/__init__.py | 423 | 3333 | from django.conf import settings
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.contrib.comments.models import Comment
from django.contrib.comments.forms import CommentForm
from django.utils.importlib import import_module
DEFAULT_COMMENTS_APP = 'django.contrib.comments'
def get_comment_app():
"""
Get the comment app (i.e. "django.contrib.comments") as defined in the settings
"""
# Make sure the app's in INSTALLED_APPS
comments_app = get_comment_app_name()
if comments_app not in settings.INSTALLED_APPS:
raise ImproperlyConfigured("The COMMENTS_APP (%r) "\
"must be in INSTALLED_APPS" % settings.COMMENTS_APP)
# Try to import the package
try:
package = import_module(comments_app)
except ImportError:
raise ImproperlyConfigured("The COMMENTS_APP setting refers to "\
"a non-existing package.")
return package
def get_comment_app_name():
"""
Returns the name of the comment app (either the setting value, if it
exists, or the default).
"""
return getattr(settings, 'COMMENTS_APP', DEFAULT_COMMENTS_APP)
def get_model():
"""
Returns the comment model class.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_model"):
return get_comment_app().get_model()
else:
return Comment
def get_form():
"""
Returns the comment ModelForm class.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_form"):
return get_comment_app().get_form()
else:
return CommentForm
def get_form_target():
"""
Returns the target URL for the comment form submission view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_form_target"):
return get_comment_app().get_form_target()
else:
return urlresolvers.reverse("django.contrib.comments.views.comments.post_comment")
def get_flag_url(comment):
"""
Get the URL for the "flag this comment" view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_flag_url"):
return get_comment_app().get_flag_url(comment)
else:
return urlresolvers.reverse("django.contrib.comments.views.moderation.flag",
args=(comment.id,))
def get_delete_url(comment):
"""
Get the URL for the "delete this comment" view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_delete_url"):
return get_comment_app().get_delete_url(comment)
else:
return urlresolvers.reverse("django.contrib.comments.views.moderation.delete",
args=(comment.id,))
def get_approve_url(comment):
"""
Get the URL for the "approve this comment from moderation" view.
"""
if get_comment_app_name() != DEFAULT_COMMENTS_APP and hasattr(get_comment_app(), "get_approve_url"):
return get_comment_app().get_approve_url(comment)
else:
return urlresolvers.reverse("django.contrib.comments.views.moderation.approve",
args=(comment.id,))
| bsd-3-clause |
exiahuang/SalesforceXyTools | requests/adapters.py | 22 | 17491 | # -*- coding: utf-8 -*-
"""
requests.adapters
~~~~~~~~~~~~~~~~~
This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
import os.path
import socket
from .models import Response
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce
from .packages.urllib3.util.retry import Retry
from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
select_proxy)
from .structures import CaseInsensitiveDict
from .packages.urllib3.exceptions import ClosedPoolError
from .packages.urllib3.exceptions import ConnectTimeoutError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import NewConnectionError
from .packages.urllib3.exceptions import ProxyError as _ProxyError
from .packages.urllib3.exceptions import ProtocolError
from .packages.urllib3.exceptions import ReadTimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import ResponseError
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError)
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
DEFAULT_POOL_TIMEOUT = None
class BaseAdapter(object):
"""The Base Transport Adapter"""
def __init__(self):
super(BaseAdapter, self).__init__()
def send(self):
raise NotImplementedError
def close(self):
raise NotImplementedError
class HTTPAdapter(BaseAdapter):
"""The built-in HTTP Adapter for urllib3.
Provides a general-case interface for Requests sessions to contact HTTP and
HTTPS urls by implementing the Transport Adapter interface. This class will
usually be created by the :class:`Session <Session>` class under the
covers.
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
:param max_retries: The maximum number of retries each connection
should attempt. Note, this applies only to failed DNS lookups, socket
connections and connection timeouts, never to requests where data has
made it to the server. By default, Requests does not retry failed
connections. If you need granular control over the conditions under
which we retry a request, import urllib3's ``Retry`` class and pass
that instead.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
'_pool_block']
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
pool_block=DEFAULT_POOLBLOCK):
if max_retries == DEFAULT_RETRIES:
self.max_retries = Retry(0, read=False)
else:
self.max_retries = Retry.from_int(max_retries)
self.config = {}
self.proxy_manager = {}
super(HTTPAdapter, self).__init__()
self._pool_connections = pool_connections
self._pool_maxsize = pool_maxsize
self._pool_block = pool_block
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
return dict((attr, getattr(self, attr, None)) for attr in
self.__attrs__)
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
# self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
for attr, value in state.items():
setattr(self, attr, value)
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
block=self._pool_block)
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
"""Initializes a urllib3 PoolManager.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param connections: The number of urllib3 connection pools to cache.
:param maxsize: The maximum number of connections to save in the pool.
:param block: Block when no free connections are available.
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
"""
# save these values for pickling
self._pool_connections = connections
self._pool_maxsize = maxsize
self._pool_block = block
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
block=block, strict=True, **pool_kwargs)
def proxy_manager_for(self, proxy, **proxy_kwargs):
"""Return urllib3 ProxyManager for the given proxy.
This method should not be called from user code, and is only
exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxy: The proxy to return a urllib3 ProxyManager for.
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
:returns: ProxyManager
"""
if not proxy in self.proxy_manager:
proxy_headers = self.proxy_headers(proxy)
self.proxy_manager[proxy] = proxy_from_url(
proxy,
proxy_headers=proxy_headers,
num_pools=self._pool_connections,
maxsize=self._pool_maxsize,
block=self._pool_block,
**proxy_kwargs)
return self.proxy_manager[proxy]
def cert_verify(self, conn, url, verify, cert):
"""Verify a SSL certificate. This method should not be called from user
code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param conn: The urllib3 connection object associated with the cert.
:param url: The requested URL.
:param verify: Whether we should actually verify the certificate.
:param cert: The SSL certificate to verify.
"""
if url.lower().startswith('https') and verify:
cert_loc = None
# Allow self-specified cert location.
if verify is not True:
cert_loc = verify
if not cert_loc:
cert_loc = DEFAULT_CA_BUNDLE_PATH
if not cert_loc:
raise Exception("Could not find a suitable SSL CA certificate bundle.")
conn.cert_reqs = 'CERT_REQUIRED'
if not os.path.isdir(cert_loc):
conn.ca_certs = cert_loc
else:
conn.ca_cert_dir = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
conn.ca_cert_dir = None
if cert:
if not isinstance(cert, basestring):
conn.cert_file = cert[0]
conn.key_file = cert[1]
else:
conn.cert_file = cert
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
response. This should not be called from user code, and is only exposed
for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
:param resp: The urllib3 response object.
"""
response = Response()
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
response.raw = resp
response.reason = response.raw.reason
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
# Add new cookies from the server.
extract_cookies_to_jar(response.cookies, req, resp)
# Give the Response some context.
response.request = req
response.connection = self
return response
def get_connection(self, url, proxies=None):
"""Returns a urllib3 connection for the given URL. This should not be
called from user code, and is only exposed for use when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param url: The URL to connect to.
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
"""
proxy = select_proxy(url, proxies)
if proxy:
proxy = prepend_scheme_if_needed(proxy, 'http')
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url)
else:
# Only scheme should be lower case
parsed = urlparse(url)
url = parsed.geturl()
conn = self.poolmanager.connection_from_url(url)
return conn
def close(self):
"""Disposes of any internal state.
Currently, this just closes the PoolManager, which closes pooled
connections.
"""
self.poolmanager.clear()
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
If the message is being sent through a HTTP proxy, the full URL has to
be used. Otherwise, we should only use the path portion of the URL.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs.
"""
proxy = select_proxy(request.url, proxies)
scheme = urlparse(request.url).scheme
if proxy and scheme != 'https':
url = urldefragauth(request.url)
else:
url = request.path_url
return url
def add_headers(self, request, **kwargs):
"""Add any headers needed by the connection. As of v2.0 this does
nothing by default, but is left for overriding by users that subclass
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
:param kwargs: The keyword arguments from the call to send().
"""
pass
def proxy_headers(self, proxy):
"""Returns a dictionary of the headers to add to any request sent
through a proxy. This works with urllib3 magic to ensure that they are
correctly sent to the proxy, rather than in a tunnelled request if
CONNECT is being used.
This should not be called from user code, and is only exposed for use
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
:param proxies: The url of the proxy being used for this request.
"""
headers = {}
username, password = get_auth_from_url(proxy)
if username and password:
headers['Proxy-Authorization'] = _basic_auth_str(username,
password)
return headers
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
conn = self.get_connection(request.url, proxies)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
self.add_headers(request)
chunked = not (request.body is None or 'Content-Length' in request.headers)
if isinstance(timeout, tuple):
try:
connect, read = timeout
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
err = ("Invalid timeout {0}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
else:
timeout = TimeoutSauce(connect=timeout, read=timeout)
try:
if not chunked:
resp = conn.urlopen(
method=request.method,
url=url,
body=request.body,
headers=request.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.max_retries,
timeout=timeout
)
# Send the request.
else:
if hasattr(conn, 'proxy_pool'):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
for header, value in request.headers.items():
low_conn.putheader(header, value)
low_conn.endheaders()
for i in request.body:
low_conn.send(hex(len(i))[2:].encode('utf-8'))
low_conn.send(b'\r\n')
low_conn.send(i)
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
# Receive the response from the server
try:
# For Python 2.7+ versions, use buffering of HTTP
# responses
r = low_conn.getresponse(buffering=True)
except TypeError:
# For compatibility with Python 2.6 versions and back
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
r,
pool=conn,
connection=low_conn,
preload_content=False,
decode_content=False
)
except:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
low_conn.close()
raise
except (ProtocolError, socket.error) as err:
raise ConnectionError(err, request=request)
except MaxRetryError as e:
if isinstance(e.reason, ConnectTimeoutError):
# TODO: Remove this in 3.0.0: see #2811
if not isinstance(e.reason, NewConnectionError):
raise ConnectTimeout(e, request=request)
if isinstance(e.reason, ResponseError):
raise RetryError(e, request=request)
raise ConnectionError(e, request=request)
except ClosedPoolError as e:
raise ConnectionError(e, request=request)
except _ProxyError as e:
raise ProxyError(e)
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
else:
raise
return self.build_response(request, resp)
| apache-2.0 |
bejar/kemlglearn | kemlglearn/time_series/smoothing/Smoothing.py | 1 | 10480 | """
.. module:: Smoothing
Smoothing
*************
:Description: Smoothing
:Authors: bejar
:Version:
:Created on: 21/07/2016 8:14
"""
__author__ = 'bejar'
from scipy.sparse.linalg import spsolve
import numpy as np
import scipy as scp
from scipy import sparse
from scipy.sparse import linalg
def numpy_smoothing(x, window_len=11, window='hanning'):
"""smooth the data using a window with requested size.
This method is based on the convolution of a scaled window with the signal.
The signal is prepared by introducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the begining and end part of the output signal.
input:
x: the input signal
window_len: the dimension of the smoothing window; should be an odd integer
window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'
flat window will produce a moving average smoothing.
output:
the smoothed signal
example:
t=linspace(-2,2,0.1)
x=sin(t)+randn(len(t))*0.1
y=smooth(x)
see also:
numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve
scipy.signal.lfilter
TODO: the window parameter could be the window itself if an array instead of a string
NOTE: length(output) != length(input), to correct this: return y[(window_len/2-1):-(window_len/2)] instead of just y.
"""
if x.ndim != 1:
raise ValueError("smooth only accepts 1 dimension arrays.")
if x.size < window_len:
raise ValueError("Input vector needs to be bigger than window size.")
if window_len < 3:
return x
if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise ValueError("Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'")
s = np.r_[x[window_len - 1:0:-1], x, x[-1:-window_len:-1]]
# print(len(s))
if window == 'flat': # moving average
w = np.ones(window_len, 'd')
else:
w = eval('np.' + window + '(window_len)')
y = np.convolve(w / w.sum(), s, mode='valid')
return y[int(window_len / 2):int(-window_len / 2) + 1]
def ALS_smoothing(y, lam, p, niter=10):
"""
Asymmetric Least Squares smoothing
Method for smoothing also useful for baseline correction
Taken from
@article{eilers2005baseline,
title={Baseline correction with asymmetric least squares smoothing},
author={Eilers, Paul HC and Boelens, Hans FM},
journal={Leiden University Medical Centre Report},
year={2005}
}
:param y: signal
:param lam: signal smoothing, usual values 10^2 - 10^9
:param p: asymmetry usual values from 0.001 to 0.1 for baseline removal
(but for smoothing can be close to 0.9)
:param niter: number of iterations,
:return:
"""
L = len(y)
D = sparse.csc_matrix(np.diff(np.eye(L), 2))
w = np.ones(L)
for i in range(niter):
W = sparse.spdiags(w, 0, L, L)
Z = W + lam * D.dot(D.transpose())
z = spsolve(Z, w * y)
w = p * (y > z) + (1 - p) * (y < z)
return z
def tvdiplmax(y):
"""Calculate the value of lambda so that if lambda >= lambdamax, the TVD
functional solved by TVDIP is minimized by the trivial constant solution
x = mean(y). This can then be used to determine a useful range of values
of lambda, for example.
Args:
y: Original signal to denoise, size N x 1.
Returns:
lambdamax: Value of lambda at which x = mean(y) is the output of the
TVDIP function.
"""
N = y.size
M = N - 1
# Construct sparse operator matrices
I1 = sparse.eye(M)
O1 = sparse.dia_matrix((M, 1))
D = sparse.hstack([I1, O1]) - sparse.hstack([O1, I1])
DDT = D.dot(D.conj().T)
Dy = D.dot(y)
lambdamax = np.absolute(linalg.spsolve(DDT, Dy)).max(0)
return lambdamax
def tvdip(y, lambdas, display=1, stoptol=1e-3, maxiter=60):
"""Performs discrete total variation denoising (TVD) using a primal-dual
interior-point solver. It minimizes the following discrete functional:
E=(1/2)||y-x||_2^2+lambda*||Dx||_1
over the variable x, given the input signal y, according to each value of
the regularization parametero lambda > 0. D is the first difference matrix.
Uses hot-restarts from each value of lambda to speed up convergence for
subsequent values: best use of the feature is made by ensuring that the
chosen lambda values are close to each other.
Args:
y: Original signal to denoise, size N x 1.
lambdas: A vector of positive regularization parameters, size L x 1.
TVD will be applied to each value in the vector.
display: (Optional) Set to 0 to turn off progress display, 1 to turn
on. Defaults to 1.
stoptol: (Optional) Precision as determined by duality gap tolerance,
if not specified defaults to 1e-3.
maxiter: (Optional) Maximum interior-point iterations, if not specified
defaults to 60.
Returns:
x: Denoised output signal for each value of lambda, size N x L.
E: Objective functional at minimum for each lamvda, size L x 1.
s: Optimization result, 1 = solved, 0 = maximum iterations
exceeded before reaching duality gap tolerance, size L x 1.
lambdamax: Maximum value of lambda for the given y. If
lambda >= lambdamax, the output is the trivial constant solution
x = mean(y).
"""
# Search tuning parameters
ALPHA = 0.01 # Backtracking linesearch parameter (0,0.5]
BETA = 0.5 # Backtracking linesearch parameter (0,1)
MAXLSITER = 20 # Max iterations of backtracking linesearch
MU = 2 # t update
N = y.size # Length of input signal y
M = N - 1 # Size of Dx
# Construct sparse operator matrices
I1 = sparse.eye(M)
O1 = sparse.dia_matrix((M, 1))
D = sparse.hstack([I1, O1]) - sparse.hstack([O1, I1])
DDT = D.dot(D.conj().T)
Dy = D.dot(y)
# Find max value of lambda
lambdamax = (np.absolute(linalg.spsolve(DDT, Dy))).max(0)
if display:
print("lambda_max=%5.2e" % lambdamax)
L = lambdas.size
x = np.zeros((N, L))
s = np.zeros((L, 1))
E = np.zeros((L, 1))
# Optimization variables set up once at the start
z = np.zeros((M, 1))
mu1 = np.ones((M, 1))
mu2 = np.ones((M, 1))
# Work through each value of lambda, with hot-restart on optimization
# variables
for idx, l in enumerate(lambdas):
t = 1e-10
step = np.inf
f1 = z - l
f2 = -z - l
# Main optimization loop
s[idx] = 1
if display:
print("Solving for lambda={0:5.2e}, lambda/lambda_max={1:5.2e}".format(l, l / lambdamax))
print("Iter# primal Dual Gap")
for iters in range(maxiter):
DTz = (z.conj().T * D).conj().T
DDTz = D.dot(DTz)
w = Dy - (mu1 - mu2)
# Calculate objectives and primal-dual gap
pobj1 = 0.5 * w.conj().T.dot(linalg.spsolve(DDT, w)) + l * (np.sum(mu1 + mu2))
pobj2 = 0.5 * DTz.conj().T.dot(DTz) + l * np.sum(np.absolute(Dy - DDTz))
pobj = np.minimum(pobj1, pobj2)
dobj = -0.5 * DTz.conj().T.dot(DTz) + Dy.conj().T.dot(z)
gap = pobj - dobj
if display:
print("{:5d} {:7.2e} {:7.2e} {:7.2e}".format(iters, pobj[0, 0],
dobj[0, 0],
gap[0, 0]))
# Test duality gap stopping criterion
if np.all(gap <= stoptol): # ****
s[idx] = 1
break
if step >= 0.2:
t = np.maximum(2 * M * MU / gap, 1.2 * t)
# Do Newton step
rz = DDTz - w
Sdata = (mu1 / f1 + mu2 / f2)
S = DDT - sparse.csc_matrix((Sdata.reshape(Sdata.size),
(np.arange(M), np.arange(M))))
r = -DDTz + Dy + (1 / t) / f1 - (1 / t) / f2
dz = linalg.spsolve(S, r).reshape(r.size, 1)
dmu1 = -(mu1 + ((1 / t) + dz * mu1) / f1)
dmu2 = -(mu2 + ((1 / t) - dz * mu2) / f2)
resDual = rz.copy()
resCent = np.vstack((-mu1 * f1 - 1 / t, -mu2 * f2 - 1 / t))
residual = np.vstack((resDual, resCent))
# Perform backtracking linesearch
negIdx1 = dmu1 < 0
negIdx2 = dmu2 < 0
step = 1
if np.any(negIdx1):
step = np.minimum(step,
0.99 * (-mu1[negIdx1] / dmu1[negIdx1]).min(0))
if np.any(negIdx2):
step = np.minimum(step,
0.99 * (-mu2[negIdx2] / dmu2[negIdx2]).min(0))
for _ in range(MAXLSITER):
newz = z + step * dz
newmu1 = mu1 + step * dmu1
newmu2 = mu2 + step * dmu2
newf1 = newz - l
newf2 = -newz - l
# Update residuals
newResDual = DDT.dot(newz) - Dy + newmu1 - newmu2
newResCent = np.vstack((-newmu1 * newf1 - 1 / t, -newmu2 * newf2 - 1 / t))
newResidual = np.vstack((newResDual, newResCent))
if (np.maximum(newf1.max(0), newf2.max(0)) < 0
and (scp.linalg.norm(newResidual) <=
(1 - ALPHA * step) * scp.linalg.norm(residual))):
break
step = BETA * step
# Update primal and dual optimization parameters
z = newz
mu1 = newmu1
mu2 = newmu2
f1 = newf1
f2 = newf2
x[:, idx] = (y - D.conj().T.dot(z)).reshape(x.shape[0])
xval = x[:, idx].reshape(x.shape[0], 1)
E[idx] = 0.5 * np.sum((y - xval) ** 2) + l * np.sum(np.absolute(D.dot(xval)))
# We may have a close solution that does not satisfy the duality gap
if iters >= maxiter:
s[idx] = 0
if display:
if s[idx]:
print("Solved to precision of duality gap %5.2e") % gap
else:
print("Max iterations exceeded - solution may be inaccurate")
return x, E, s, lambdamax
| mit |
SoberPeng/isaws | src/third_part/googletest/googletest/test/gtest_env_var_test.py | 343 | 4036 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print('Expected: %s' % (expected,))
print(' Actual: %s' % (actual,))
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
SetEnvVar('XML_OUTPUT_FILE', None) # For 'output' test
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
def testXmlOutputFile(self):
"""Tests that $XML_OUTPUT_FILE affects the output flag."""
SetEnvVar('GTEST_OUTPUT', None)
SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
AssertEq('xml:tmp/bar.xml', GetFlag('output'))
def testXmlOutputFileOverride(self):
"""Tests that $XML_OUTPUT_FILE is overridden by $GTEST_OUTPUT"""
SetEnvVar('GTEST_OUTPUT', 'xml:tmp/foo.xml')
SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')
AssertEq('xml:tmp/foo.xml', GetFlag('output'))
if __name__ == '__main__':
gtest_test_utils.Main()
| gpl-3.0 |
Microvellum/Fluid-Designer | win64-vc/2.78/python/lib/reportlab/lib/abag.py | 32 | 1140 | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/lib/abag.py
__version__=''' $Id$ '''
__doc__='''Data structure to hold a collection of attributes, used by styles.'''
class ABag:
"""
'Attribute Bag' - a trivial BAG class for holding attributes.
This predates modern Python. Doing this again, we'd use a subclass
of dict.
You may initialize with keyword arguments.
a = ABag(k0=v0,....,kx=vx,....) ==> getattr(a,'kx')==vx
c = a.clone(ak0=av0,.....) copy with optional additional attributes.
"""
def __init__(self,**attr):
self.__dict__.update(attr)
def clone(self,**attr):
n = self.__class__(**self.__dict__)
if attr: n.__dict__.update(attr)
return n
def __repr__(self):
D = self.__dict__
K = list(D.keys())
K.sort()
return '%s(%s)' % (self.__class__.__name__,', '.join(['%s=%r' % (k,D[k]) for k in K]))
if __name__=="__main__":
AB = ABag(a=1, c="hello")
CD = AB.clone()
print(AB)
print(CD)
| gpl-3.0 |
apache/libcloud | libcloud/common/worldwidedns.py | 28 | 7244 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from libcloud.common.base import ConnectionUserAndKey
from libcloud.common.base import Response
from libcloud.common.types import ProviderError
OK_CODES = ['200', '211', '212', '213']
ERROR_CODES = ['401', '403', '405', '406', '407', '408', '409', '410', '411',
'412', '413', '414', '450', '451']
class WorldWideDNSException(ProviderError):
def __init__(self, value, http_code, code, driver=None):
self.code = code
super(WorldWideDNSException, self).__init__(value, http_code, driver)
class SuspendedAccount(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Login ID you supplied is SUSPENDED, you need to renew" + \
" your account"
super(SuspendedAccount, self).__init__(value, http_code, 401,
driver)
class LoginOrPasswordNotMatch(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Login ID and/or Password you supplied is not on file or" + \
" does not match"
super(LoginOrPasswordNotMatch, self).__init__(value, http_code, 403,
driver)
class NonExistentDomain(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Domain name supplied is not in your account"
super(NonExistentDomain, self).__init__(value, http_code, 405,
driver)
class CouldntRemoveDomain(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Error occured removing domain from name server, try again"
super(CouldntRemoveDomain, self).__init__(value, http_code, 406,
driver)
class LimitExceeded(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Your limit was exceeded, you need to upgrade your account"
super(LimitExceeded, self).__init__(value, http_code, 407,
driver)
class ExistentDomain(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Domain already exists on our servers"
super(ExistentDomain, self).__init__(value, http_code, 408,
driver)
class DomainBanned(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Domain is listed in DNSBL and is banned from our servers"
super(DomainBanned, self).__init__(value, http_code, 409,
driver)
class InvalidDomainName(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Invalid domain name"
super(InvalidDomainName, self).__init__(value, http_code, 410,
driver)
class ErrorOnReloadInNameServer(WorldWideDNSException):
def __init__(self, server, http_code, driver=None):
if server == 1:
value = "Name server #1 kicked an error on reload, contact support"
code = 411
elif server == 2:
value = "Name server #2 kicked an error on reload, contact support"
code = 412
elif server == 3:
value = "Name server #3 kicked an error on reload, contact support"
code = 413
super(ErrorOnReloadInNameServer, self).__init__(value, http_code, code,
driver)
class NewUserNotValid(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "New userid is not valid"
super(NewUserNotValid, self).__init__(value, http_code, 414,
driver)
class CouldntReachNameServer(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "Couldn't reach the name server, try again later"
super(CouldntReachNameServer, self).__init__(value, http_code, 450,
driver)
class NoZoneFile(WorldWideDNSException):
def __init__(self, http_code, driver=None):
value = "No zone file in the name server queried"
super(NoZoneFile, self).__init__(value, http_code, 451,
driver)
ERROR_CODE_TO_EXCEPTION_CLS = {
'401': SuspendedAccount,
'403': LoginOrPasswordNotMatch,
'405': NonExistentDomain,
'406': CouldntRemoveDomain,
'407': LimitExceeded,
'408': ExistentDomain,
'409': DomainBanned,
'410': InvalidDomainName,
'411': ErrorOnReloadInNameServer,
'412': ErrorOnReloadInNameServer,
'413': ErrorOnReloadInNameServer,
'414': NewUserNotValid,
'450': CouldntReachNameServer,
'451': NoZoneFile,
}
class WorldWideDNSResponse(Response):
def parse_body(self):
"""
Parse response body.
:return: Parsed body.
:rtype: ``str``
"""
if self._code_response(self.body):
codes = re.split('\r?\n', self.body)
for code in codes:
if code in OK_CODES:
continue
elif code in ERROR_CODES:
exception = ERROR_CODE_TO_EXCEPTION_CLS.get(code)
if code in ['411', '412', '413']:
server = int(code[2])
raise exception(server, self.status)
raise exception(self.status)
return self.body
def _code_response(self, body):
"""
Checks if the response body contains code status.
:rtype: ``bool``
"""
available_response_codes = OK_CODES + ERROR_CODES
codes = re.split('\r?\n', body)
if codes[0] in available_response_codes:
return True
return False
class WorldWideDNSConnection(ConnectionUserAndKey):
host = 'www.worldwidedns.net'
responseCls = WorldWideDNSResponse
def add_default_params(self, params):
"""
Add parameters that are necessary for every request
This method adds ``NAME`` and ``PASSWORD`` to
the request.
"""
params["NAME"] = self.user_id
params["PASSWORD"] = self.key
reseller_id = getattr(self, 'reseller_id', None)
if reseller_id:
params["ID"] = reseller_id
return params
| apache-2.0 |
nwchandler/ansible | lib/ansible/modules/remote_management/hpilo/hpilo_boot.py | 9 | 7149 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: hpilo_boot
version_added: "2.3"
author: Dag Wieers (@dagwieers)
short_description: Boot system using specific media through HP iLO interface
description:
- "This module boots a system through its HP iLO interface. The boot media
can be one of: cdrom, floppy, hdd, network or usb."
- This module requires the hpilo python module.
options:
host:
description:
- The HP iLO hostname/address that is linked to the physical system.
required: true
login:
description:
- The login name to authenticate to the HP iLO interface.
default: Administrator
password:
description:
- The password to authenticate to the HP iLO interface.
default: admin
media:
description:
- The boot media to boot the system from
default: network
choices: [ "cdrom", "floppy", "hdd", "network", "normal", "usb" ]
image:
description:
- The URL of a cdrom, floppy or usb boot media image.
protocol://username:password@hostname:port/filename
- protocol is either 'http' or 'https'
- username:password is optional
- port is optional
state:
description:
- The state of the boot media.
- "no_boot: Do not boot from the device"
- "boot_once: Boot from the device once and then notthereafter"
- "boot_always: Boot from the device each time the serveris rebooted"
- "connect: Connect the virtual media device and set to boot_always"
- "disconnect: Disconnects the virtual media device and set to no_boot"
- "poweroff: Power off the server"
default: boot_once
choices: [ "boot_always", "boot_once", "connect", "disconnect", "no_boot", "poweroff" ]
force:
description:
- Whether to force a reboot (even when the system is already booted).
- As a safeguard, without force, hpilo_boot will refuse to reboot a server that is already running.
default: no
choices: [ "yes", "no" ]
ssl_version:
description:
- Change the ssl_version used.
default: TLSv1
choices: [ "SSLv3", "SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2" ]
version_added: '2.4'
requirements:
- hpilo
notes:
- To use a USB key image you need to specify floppy as boot media.
- This module ought to be run from a system that can access the HP iLO
interface directly, either by using C(local_action) or using C(delegate_to).
'''
EXAMPLES = r'''
- name: Task to boot a system using an ISO from an HP iLO interface only if the system is an HP server
hpilo_boot:
host: YOUR_ILO_ADDRESS
login: YOUR_ILO_LOGIN
password: YOUR_ILO_PASSWORD
media: cdrom
image: http://some-web-server/iso/boot.iso
when: cmdb_hwmodel.startswith('HP ')
delegate_to: localhost
- name: Power off a server
hpilo_boot:
host: YOUR_ILO_HOST
login: YOUR_ILO_LOGIN
password: YOUR_ILO_PASSWORD
state: poweroff
delegate_to: localhost
'''
RETURN = '''
# Default return values
'''
import time
import warnings
from ansible.module_utils.basic import AnsibleModule
try:
import hpilo
HAS_HPILO = True
except ImportError:
HAS_HPILO = False
# Suppress warnings from hpilo
warnings.simplefilter('ignore')
def main():
module = AnsibleModule(
argument_spec = dict(
host = dict(required=True, type='str'),
login = dict(default='Administrator', type='str'),
password = dict(default='admin', type='str', no_log=True),
media = dict(default=None, type='str', choices=['cdrom', 'floppy', 'rbsu', 'hdd', 'network', 'normal', 'usb']),
image = dict(default=None, type='str'),
state = dict(default='boot_once', type='str', choices=['boot_always', 'boot_once', 'connect', 'disconnect', 'no_boot', 'poweroff']),
force = dict(default=False, type='bool'),
ssl_version = dict(default='TLSv1', choices=['SSLv3', 'SSLv23', 'TLSv1', 'TLSv1_1', 'TLSv1_2']),
)
)
if not HAS_HPILO:
module.fail_json(msg='The hpilo python module is required')
host = module.params['host']
login = module.params['login']
password = module.params['password']
media = module.params['media']
image = module.params['image']
state = module.params['state']
force = module.params['force']
ssl_version = getattr(hpilo.ssl, 'PROTOCOL_' + module.params.get('ssl_version').upper().replace('V','v'))
ilo = hpilo.Ilo(host, login=login, password=password, ssl_version=ssl_version)
changed = False
status = {}
power_status = 'UNKNOWN'
if media and state in ('boot_always', 'boot_once', 'connect', 'disconnect', 'no_boot'):
# Workaround for: Error communicating with iLO: Problem manipulating EV
try:
ilo.set_one_time_boot(media)
except hpilo.IloError:
time.sleep(60)
ilo.set_one_time_boot(media)
# TODO: Verify if image URL exists/works
if image:
ilo.insert_virtual_media(media, image)
changed = True
if media == 'cdrom':
ilo.set_vm_status('cdrom', state, True)
status = ilo.get_vm_status()
changed = True
elif media in ('floppy', 'usb'):
ilo.set_vf_status(state, True)
status = ilo.get_vf_status()
changed = True
# Only perform a boot when state is boot_once or boot_always, or in case we want to force a reboot
if state in ('boot_once', 'boot_always') or force:
power_status = ilo.get_host_power_status()
if not force and power_status == 'ON':
module.fail_json(msg='HP iLO (%s) reports that the server is already powered on !' % host)
if power_status == 'ON':
#ilo.cold_boot_server()
ilo.warm_boot_server()
changed = True
else:
ilo.press_pwr_btn()
#ilo.reset_server()
#ilo.set_host_power(host_power=True)
changed = True
elif state in ('poweroff'):
power_status = ilo.get_host_power_status()
if not power_status == 'OFF':
ilo.hold_pwr_btn()
#ilo.set_host_power(host_power=False)
changed = True
module.exit_json(changed=changed, power=power_status, **status)
if __name__ == '__main__':
main()
| gpl-3.0 |
jesramirez/odoo | addons/sale/res_config.py | 148 | 8754 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import fields, osv
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class sale_configuration(osv.TransientModel):
_inherit = 'sale.config.settings'
_columns = {
'group_invoice_so_lines': fields.boolean('Generate invoices based on the sales order lines',
implied_group='sale.group_invoice_so_lines',
help="To allow your salesman to make invoices for sales order lines using the menu 'Lines to Invoice'."),
'timesheet': fields.boolean('Prepare invoices based on timesheets',
help='For modifying account analytic view to show important data to project manager of services companies.'
'You can also view the report of account analytic summary user-wise as well as month wise.\n'
'-This installs the module account_analytic_analysis.'),
'module_account_analytic_analysis': fields.boolean('Use contracts management',
help='Allows to define your customer contracts conditions: invoicing '
'method (fixed price, on timesheet, advance invoice), the exact pricing '
'(650€/day for a developer), the duration (one year support contract).\n'
'You will be able to follow the progress of the contract and invoice automatically.\n'
'-It installs the account_analytic_analysis module.'),
'time_unit': fields.many2one('product.uom', 'The default working time unit for services is'),
'group_sale_pricelist':fields.boolean("Use pricelists to adapt your price per customers",
implied_group='product.group_sale_pricelist',
help="""Allows to manage different prices based on rules per category of customers.
Example: 10% for retailers, promotion of 5 EUR on this product, etc."""),
'group_uom':fields.boolean("Allow using different units of measure",
implied_group='product.group_uom',
help="""Allows you to select and maintain different units of measure for products."""),
'group_discount_per_so_line': fields.boolean("Allow setting a discount on the sales order lines",
implied_group='sale.group_discount_per_so_line',
help="Allows you to apply some discount per sales order line."),
'module_warning': fields.boolean("Allow configuring alerts by customer or products",
help='Allow to configure notification on products and trigger them when a user wants to sell a given product or a given customer.\n'
'Example: Product: this product is deprecated, do not purchase more than 5.\n'
'Supplier: don\'t forget to ask for an express delivery.'),
'module_sale_margin': fields.boolean("Display margins on sales orders",
help='This adds the \'Margin\' on sales order.\n'
'This gives the profitability by calculating the difference between the Unit Price and Cost Price.\n'
'-This installs the module sale_margin.'),
'module_website_quote': fields.boolean("Allow online quotations and templates",
help='This adds the online quotation'),
'module_sale_journal': fields.boolean("Allow batch invoicing of delivery orders through journals",
help='Allows you to categorize your sales and deliveries (picking lists) between different journals, '
'and perform batch operations on journals.\n'
'-This installs the module sale_journal.'),
'module_analytic_user_function': fields.boolean("One employee can have different roles per contract",
help='Allows you to define what is the default function of a specific user on a given account.\n'
'This is mostly used when a user encodes his timesheet. The values are retrieved and the fields are auto-filled. '
'But the possibility to change these values is still available.\n'
'-This installs the module analytic_user_function.'),
'module_project': fields.boolean("Project"),
'module_sale_stock': fields.boolean("Trigger delivery orders automatically from sales orders",
help='Allows you to Make Quotation, Sale Order using different Order policy and Manage Related Stock.\n'
'-This installs the module sale_stock.'),
'group_sale_delivery_address': fields.boolean("Allow a different address for delivery and invoicing ",
implied_group='sale.group_delivery_invoice_address',
help="Allows you to specify different delivery and invoice addresses on a sales order."),
}
def default_get(self, cr, uid, fields, context=None):
ir_model_data = self.pool.get('ir.model.data')
res = super(sale_configuration, self).default_get(cr, uid, fields, context)
if res.get('module_project'):
user = self.pool.get('res.users').browse(cr, uid, uid, context)
res['time_unit'] = user.company_id.project_time_mode_id.id
else:
product = ir_model_data.xmlid_to_object(cr, uid, 'product.product_product_consultant')
if product and product.exists():
res['time_unit'] = product.uom_id.id
res['timesheet'] = res.get('module_account_analytic_analysis')
return res
def _get_default_time_unit(self, cr, uid, context=None):
ids = self.pool.get('product.uom').search(cr, uid, [('name', '=', _('Hour'))], context=context)
return ids and ids[0] or False
_defaults = {
'time_unit': _get_default_time_unit,
}
def set_sale_defaults(self, cr, uid, ids, context=None):
ir_model_data = self.pool.get('ir.model.data')
wizard = self.browse(cr, uid, ids)[0]
if wizard.time_unit:
product = ir_model_data.xmlid_to_object(cr, uid, 'product.product_product_consultant')
if product and product.exists():
product.write({'uom_id': wizard.time_unit.id, 'uom_po_id': wizard.time_unit.id})
else:
_logger.warning("Product with xml_id 'product.product_product_consultant' not found, UoMs not updated!")
if wizard.module_project and wizard.time_unit:
user = self.pool.get('res.users').browse(cr, uid, uid, context)
user.company_id.write({'project_time_mode_id': wizard.time_unit.id})
return {}
def onchange_task_work(self, cr, uid, ids, task_work, context=None):
return {'value': {
'module_project_timesheet': task_work,
'module_sale_service': task_work,
}}
def onchange_timesheet(self, cr, uid, ids, timesheet, context=None):
return {'value': {
'timesheet': timesheet,
'module_account_analytic_analysis': timesheet,
}}
class account_config_settings(osv.osv_memory):
_inherit = 'account.config.settings'
_columns = {
'module_sale_analytic_plans': fields.boolean('Use multiple analytic accounts on sales',
help="""This allows install module sale_analytic_plans."""),
'group_analytic_account_for_sales': fields.boolean('Analytic accounting for sales',
implied_group='sale.group_analytic_accounting',
help="Allows you to specify an analytic account on sales orders."),
}
def onchange_sale_analytic_plans(self, cr, uid, ids, module_sale_analytic_plans, context=None):
""" change group_analytic_account_for_sales following module_sale_analytic_plans """
if not module_sale_analytic_plans:
return {}
return {'value': {'group_analytic_account_for_sales': module_sale_analytic_plans}}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
orlandi/connectomicsPerspectivesPaper | participants_codes/konnectomics/spikedetect/test_spikedetect.py | 2 | 1283 | # Copyright 2014 Alistair Muldal <alistair.muldal@pharm.ox.ac.uk>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from scipy.optimize import anneal, brute
def get_thresh_from_rate(n, dt=0.02, expected_rate=0.33, tmin=0.001, tmax=0.2,
tstep=1E-2):
n = np.atleast_2d(n)
nc, nt = n.shape
expected_nspikes = expected_rate * nc * nt * dt
def loss(thresh, n, expected_nspikes):
nspikes = (n // thresh).sum()
return (nspikes - expected_nspikes) ** 2
ranges = slice(tmin, tmax, tstep)
tmin = brute(loss, args=(n, expected_nspikes), ranges=(ranges,))
Jmin = loss(tmin, n, expected_nspikes)
return tmin, Jmin | mit |
perseas/Pyrseas | tests/dbobject/test_function.py | 1 | 24598 | # -*- coding: utf-8 -*-
"""Test functions"""
import pytest
from inspect import cleandoc
from pyrseas.testutils import DatabaseToMapTestCase
from pyrseas.testutils import InputMapToSqlTestCase, fix_indent
SOURCE1 = "SELECT 'dummy'::text"
CREATE_STMT1 = "CREATE FUNCTION sd.f1() RETURNS text LANGUAGE sql IMMUTABLE " \
"AS $_$%s$_$" % SOURCE1
SOURCE2 = "SELECT GREATEST($1, $2)"
CREATE_STMT2 = "CREATE FUNCTION sd.f1(integer, integer) RETURNS integer " \
"LANGUAGE sql IMMUTABLE AS $_$%s$_$" % SOURCE2
COMMENT_STMT = "COMMENT ON FUNCTION sd.f1(integer, integer) IS " \
"'Test function f1'"
SOURCE3 = "SELECT * FROM generate_series($1, $2)"
CREATE_STMT3 = "CREATE FUNCTION f2(integer, integer) RETURNS SETOF integer " \
"ROWS 20 LANGUAGE sql IMMUTABLE AS $_$%s$_$" % SOURCE3
SOURCE4 = "SELECT $1 + $2"
CREATE_STMT4 = "CREATE FUNCTION fadd(integer, integer) RETURNS integer " \
"LANGUAGE sql IMMUTABLE AS $_$%s$_$" % SOURCE4
SOURCE5 = "SELECT $1 - $2"
CREATE_STMT5 = "CREATE FUNCTION fsub(integer, integer) RETURNS integer " \
"LANGUAGE sql IMMUTABLE AS $_$%s$_$" % SOURCE5
class FunctionToMapTestCase(DatabaseToMapTestCase):
"""Test mapping of existing functions"""
def test_map_function1(self):
"Map a very simple function with no arguments"
dbmap = self.to_map([CREATE_STMT1])
expmap = {'language': 'sql', 'returns': 'text',
'source': SOURCE1, 'volatility': 'immutable'}
assert dbmap['schema sd']['function f1()'] == expmap
def test_map_function_with_args(self):
"Map a function with two arguments"
stmts = ["CREATE FUNCTION f1(integer, integer) RETURNS integer "
"LANGUAGE sql AS $_$%s$_$" % SOURCE2]
dbmap = self.to_map(stmts)
assert dbmap['schema sd']['function f1(integer, integer)'] == \
{'language': 'sql', 'returns': 'integer', 'source': SOURCE2}
def test_map_function_default_args(self):
"Map a function with default arguments"
stmts = ["CREATE FUNCTION f1(integer, integer) RETURNS integer "
"LANGUAGE sql AS $_$%s$_$" % SOURCE2]
dbmap = self.to_map(stmts)
assert dbmap['schema sd']['function f1(integer, integer)'] == \
{'language': 'sql', 'returns': 'integer', 'source': SOURCE2}
def test_map_void_function(self):
"Map a function returning void"
stmts = ["CREATE TABLE t1 (c1 integer, c2 text)",
"CREATE FUNCTION f1() RETURNS void LANGUAGE sql AS "
"$_$INSERT INTO t1 VALUES (1, 'dummy')$_$"]
dbmap = self.to_map(stmts)
expmap = {'language': 'sql', 'returns': 'void',
'source': "INSERT INTO t1 VALUES (1, 'dummy')"}
assert dbmap['schema sd']['function f1()'] == expmap
def test_map_setof_row_function(self):
"Map a function returning a set of rows"
stmts = ["CREATE TABLE t1 (c1 integer, c2 text)",
"CREATE FUNCTION f1() RETURNS SETOF t1 LANGUAGE sql AS "
"$_$SELECT * FROM t1$_$"]
dbmap = self.to_map(stmts)
expmap = {'language': 'sql', 'returns': 'SETOF sd.t1',
'source': "SELECT * FROM t1"}
assert dbmap['schema sd']['function f1()'] == expmap
def test_map_security_definer_function(self):
"Map a function that is SECURITY DEFINER"
stmts = ["CREATE FUNCTION f1() RETURNS text LANGUAGE sql "
"SECURITY DEFINER AS $_$%s$_$" % SOURCE1]
dbmap = self.to_map(stmts)
expmap = {'language': 'sql', 'returns': 'text',
'source': SOURCE1, 'security_definer': True}
assert dbmap['schema sd']['function f1()'] == expmap
def test_map_c_lang_function(self):
"Map a dynamically loaded C language function"
# NOTE 1: Needs contrib/spi module to be available
# NOTE 2: Needs superuser privilege
stmts = ["CREATE FUNCTION autoinc() RETURNS trigger "
"AS '$libdir/autoinc' LANGUAGE c"]
dbmap = self.to_map(stmts, superuser=True)
expmap = {'language': 'c', 'obj_file': '$libdir/autoinc',
'link_symbol': 'autoinc', 'returns': 'trigger'}
assert dbmap['schema sd']['function autoinc()'] == expmap
def test_map_function_config(self):
"Map a function with a configuration parameter"
stmts = ["CREATE FUNCTION f1() RETURNS date LANGUAGE sql SET "
"datestyle to postgres, dmy AS $_$SELECT CURRENT_DATE$_$"]
dbmap = self.to_map(stmts)
expmap = {'language': 'sql', 'returns': 'date',
'configuration': ['DateStyle=postgres, dmy'],
'source': "SELECT CURRENT_DATE"}
assert dbmap['schema sd']['function f1()'] == expmap
def test_map_function_comment(self):
"Map a function comment"
dbmap = self.to_map([CREATE_STMT2, COMMENT_STMT])
assert dbmap['schema sd']['function f1(integer, integer)'][
'description'] == 'Test function f1'
def test_map_function_rows(self):
"Map a function rows"
dbmap = self.to_map([CREATE_STMT3])
assert dbmap['schema sd']['function f2(integer, integer)'][
'rows'] == 20
def test_map_function_leakproof(self):
"Map a function with LEAKPROOF qualifier"
stmt = CREATE_STMT4.replace("IMMUTABLE", "IMMUTABLE LEAKPROOF")
dbmap = self.to_map([stmt], superuser=True)
expmap = {'language': 'sql', 'returns': 'integer', 'leakproof': True,
'source': SOURCE4, 'volatility': 'immutable'}
assert dbmap['schema sd']['function fadd(integer, integer)'] == \
expmap
class FunctionToSqlTestCase(InputMapToSqlTestCase):
"""Test SQL generation from input functions"""
def test_create_function1(self):
"Create a very simple function with no arguments"
inmap = self.std_map()
inmap['schema sd'].update({'function f1()': {
'language': 'sql', 'returns': 'text', 'source': SOURCE1,
'volatility': 'immutable'}})
sql = self.to_sql(inmap)
assert fix_indent(sql[1]) == CREATE_STMT1
def test_create_function_with_args(self):
"Create a function with two arguments"
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE2}})
sql = self.to_sql(inmap)
assert fix_indent(sql[1]) == "CREATE FUNCTION sd.f1(integer, integer)"\
" RETURNS integer LANGUAGE sql AS $_$%s$_$" % SOURCE2
def test_create_setof_row_function(self):
"Create a function returning a set of rows"
inmap = self.std_map()
inmap['schema sd'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}},
{'c2': {'type': 'text'}}]}})
inmap['schema sd'].update({
'function f1()': {'language': 'sql', 'returns': 'SETOF t1',
'source': "SELECT * FROM t1"}})
sql = self.to_sql(inmap)
assert fix_indent(sql[2]) == "CREATE FUNCTION sd.f1() RETURNS " \
"SETOF t1 LANGUAGE sql AS $_$SELECT * FROM t1$_$"
def test_create_setof_row_function_rows(self):
"Create a function returning a set of rows with suggested number"
inmap = self.std_map()
inmap['schema sd'].update({'table t1': {
'columns': [{'c1': {'type': 'integer'}},
{'c2': {'type': 'text'}}]}})
inmap['schema sd'].update({
'function f1()': {'language': 'sql', 'returns': 'SETOF t1',
'source': "SELECT * FROM t1", 'rows': 50}})
sql = self.to_sql(inmap)
assert fix_indent(sql[2]) == "CREATE FUNCTION sd.f1() RETURNS SETOF " \
"t1 LANGUAGE sql ROWS 50 AS $_$SELECT * FROM t1$_$"
def test_create_security_definer_function(self):
"Create a SECURITY DEFINER function"
inmap = self.std_map()
inmap['schema sd'].update({'function f1()': {
'language': 'sql', 'returns': 'text', 'source': SOURCE1,
'security_definer': True}})
sql = self.to_sql(inmap)
assert fix_indent(sql[1]) == "CREATE FUNCTION sd.f1() RETURNS text " \
"LANGUAGE sql SECURITY DEFINER AS $_$%s$_$" % SOURCE1
def test_create_c_lang_function(self):
"Create a dynamically loaded C language function"
# NOTE 1: Needs contrib/spi module to be available
# NOTE 2: Needs superuser privilege
inmap = self.std_map()
inmap['schema sd'].update({'function autoinc()': {
'language': 'c', 'returns': 'trigger',
'obj_file': '$libdir/autoinc'}})
sql = self.to_sql(inmap)
assert fix_indent(sql[0]) == "CREATE FUNCTION sd.autoinc() " \
"RETURNS trigger LANGUAGE c AS '$libdir/autoinc', 'autoinc'"
def test_create_function_config(self):
"Create a function with a configuration parameter"
inmap = self.std_map()
inmap['schema sd'].update({'function f1()': {
'language': 'sql', 'returns': 'date',
'configuration': ['DateStyle=postgres, dmy'],
'source': "SELECT CURRENT_DATE"}})
sql = self.to_sql(inmap)
assert fix_indent(sql[1]) == "CREATE FUNCTION sd.f1() RETURNS date " \
"LANGUAGE sql SET DateStyle=postgres, dmy AS " \
"$_$SELECT CURRENT_DATE$_$"
def test_create_function_in_schema(self):
"Create a function within a non-default schema"
inmap = self.std_map()
inmap.update({'schema s1': {'function f1()': {
'language': 'sql', 'returns': 'text', 'source': SOURCE1,
'volatility': 'immutable'}}})
sql = self.to_sql(inmap, ["CREATE SCHEMA s1"])
assert fix_indent(sql[1]) == "CREATE FUNCTION s1.f1() RETURNS text " \
"LANGUAGE sql IMMUTABLE AS $_$%s$_$" % SOURCE1
def test_bad_function_map(self):
"Error creating a function with a bad map"
inmap = self.std_map()
inmap['schema sd'].update({'f1()': {
'language': 'sql', 'returns': 'text', 'source': SOURCE1}})
with pytest.raises(KeyError):
self.to_sql(inmap)
def test_drop_function1(self):
"Drop an existing function with no arguments"
sql = self.to_sql(self.std_map(), [CREATE_STMT1])
assert sql == ["DROP FUNCTION sd.f1()"]
def test_drop_function_with_args(self):
"Drop an existing function which has arguments"
sql = self.to_sql(self.std_map(), [CREATE_STMT2])
assert sql == ["DROP FUNCTION sd.f1(integer, integer)"]
def test_change_function_defn(self):
"Change function definition"
inmap = self.std_map()
inmap['schema sd'].update({'function f1()': {
'language': 'sql', 'returns': 'text',
'source': "SELECT 'example'::text", 'volatility': 'immutable'}})
sql = self.to_sql(inmap, [CREATE_STMT1])
assert fix_indent(sql[1]) == "CREATE OR REPLACE FUNCTION sd.f1() " \
"RETURNS text LANGUAGE sql IMMUTABLE AS " \
"$_$SELECT 'example'::text$_$"
def test_function_with_comment(self):
"Create a function with a comment"
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'description': 'Test function f1', 'language': 'sql',
'returns': 'integer', 'source': SOURCE2}})
sql = self.to_sql(inmap)
assert fix_indent(sql[1]) == "CREATE FUNCTION sd.f1(integer, integer)"\
" RETURNS integer LANGUAGE sql AS $_$%s$_$" % SOURCE2
assert sql[2] == COMMENT_STMT
def test_comment_on_function(self):
"Create a comment for an existing function"
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'description': 'Test function f1', 'language': 'sql',
'returns': 'integer', 'source': SOURCE2}})
sql = self.to_sql(inmap, [CREATE_STMT2])
assert sql == [COMMENT_STMT]
def test_drop_function_comment(self):
"Drop a comment on an existing function"
stmts = [CREATE_STMT2, COMMENT_STMT]
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE2}})
sql = self.to_sql(inmap, stmts)
assert sql == ["COMMENT ON FUNCTION sd.f1(integer, integer) IS NULL"]
def test_change_function_comment(self):
"Change existing comment on a function"
stmts = [CREATE_STMT2, COMMENT_STMT]
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'description': 'Changed function f1', 'language': 'sql',
'returns': 'integer', 'source': SOURCE2}})
sql = self.to_sql(inmap, stmts)
assert sql == ["COMMENT ON FUNCTION sd.f1(integer, integer) IS "
"'Changed function f1'"]
def test_function_leakproof(self):
"Create a function with LEAKPROOF qualifier"
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'leakproof': True,
'source': SOURCE4, 'volatility': 'immutable'}})
sql = self.to_sql(inmap, superuser=True)
assert fix_indent(sql[1]) == "CREATE FUNCTION sd.f1(integer, integer)"\
" RETURNS integer LANGUAGE sql IMMUTABLE LEAKPROOF AS " \
"$_$%s$_$" % SOURCE4
def test_alter_function_leakproof(self):
"Change a function with LEAKPROOF qualifier"
inmap = self.std_map()
inmap['schema sd'].update({
'function fadd(integer, integer)': {
'language': 'sql', 'returns': 'integer',
'source': SOURCE4, 'volatility': 'immutable'}})
stmt = CREATE_STMT4.replace("IMMUTABLE", "IMMUTABLE LEAKPROOF")
sql = self.to_sql(inmap, [stmt], superuser=True)
assert fix_indent(sql[0]) == \
"ALTER FUNCTION sd.fadd(integer, integer) NOT LEAKPROOF"
def test_change_function_return_type(self):
source = lambda rtype: "SELECT '127.0.0.1'::{}".format(rtype)
old_type = 'text'
new_type = 'inet'
statement = lambda rtype: cleandoc("""
CREATE OR REPLACE FUNCTION sd.fget_addr()
RETURNS {rtype}
LANGUAGE sql
IMMUTABLE
AS $_${body}$_$"""
).format(
rtype=rtype,
body=source(rtype),
).replace('\n', ' ')
inmap = self.std_map()
inmap['schema sd'].update({
'function fget_addr()': {
'language': 'sql',
'returns': new_type,
'source': source(new_type),
}
})
sql = self.to_sql(inmap, [statement(old_type)])
assert statement(new_type) == fix_indent(sql[1])
class AggregateToMapTestCase(DatabaseToMapTestCase):
"""Test mapping of existing aggregates"""
def test_map_aggregate_simple(self):
"Map a simple aggregate"
stmts = [CREATE_STMT2, "CREATE AGGREGATE a1 (integer) ("
"SFUNC = f1, STYPE = integer)"]
dbmap = self.to_map(stmts)
expmap = {'sfunc': 'f1', 'stype': 'integer'}
assert dbmap['schema sd']['function f1(integer, integer)'] == \
{'language': 'sql', 'returns': 'integer', 'source': SOURCE2,
'volatility': 'immutable'}
assert dbmap['schema sd']['aggregate a1(integer)'] == expmap
def test_map_aggregate_init_final(self):
"Map an aggregate with an INITCOND and a FINALFUNC"
stmts = [CREATE_STMT2,
"CREATE FUNCTION f2(integer) RETURNS float "
"LANGUAGE sql AS $_$SELECT $1::float$_$ IMMUTABLE",
"CREATE AGGREGATE a1 (integer) (SFUNC = f1, STYPE = integer, "
"FINALFUNC = f2, INITCOND = '-1')"]
dbmap = self.to_map(stmts)
expmap = {'sfunc': 'f1', 'stype': 'integer',
'initcond': '-1', 'finalfunc': 'f2'}
assert dbmap['schema sd']['function f1(integer, integer)'] == \
{'language': 'sql', 'returns': 'integer', 'source': SOURCE2,
'volatility': 'immutable'}
assert dbmap['schema sd']['function f2(integer)'] == \
{'language': 'sql', 'returns': 'double precision',
'source': "SELECT $1::float", 'volatility': 'immutable'}
assert dbmap['schema sd']['aggregate a1(integer)'] == expmap
def test_map_aggregate_sortop(self):
"Map an aggregate with a SORTOP"
stmts = [CREATE_STMT2, "CREATE AGGREGATE a1 (integer) ("
"SFUNC = f1, STYPE = integer, SORTOP = >)"]
dbmap = self.to_map(stmts)
expmap = {'sfunc': 'f1', 'stype': 'integer',
'sortop': 'pg_catalog.>'}
assert dbmap['schema sd']['aggregate a1(integer)'] == expmap
def test_map_moving_aggregate(self):
"Map a moving-aggregate mode function"
if self.db.version < 90400:
self.skipTest('Only available on PG 9.4 and later')
stmts = [CREATE_STMT4, CREATE_STMT5,
"CREATE AGGREGATE a1 (integer) (sfunc = fadd, "
"stype = integer, initcond = '0', msfunc = fadd, "
"minvfunc = fsub, mstype = integer, minitcond = '0')"]
dbmap = self.to_map(stmts)
expmap = {'sfunc': 'fadd', 'stype': 'integer', 'initcond': '0',
'msfunc': 'fadd', 'minvfunc': 'fsub', 'mstype': 'integer',
'minitcond': '0'}
assert dbmap['schema sd']['aggregate a1(integer)'] == expmap
def test_map_ordered_set_aggregate(self):
"Map an ordered-set aggregate"
if self.db.version < 90400:
self.skipTest('Only available on PG 9.4 and later')
stmts = [CREATE_STMT2, "CREATE AGGREGATE a1 (integer ORDER BY "
"integer) (sfunc = f1, stype = integer)"]
dbmap = self.to_map(stmts)
expmap = {'sfunc': 'f1', 'stype': 'integer', 'kind': 'ordered'}
assert dbmap['schema sd'][
'aggregate a1(integer ORDER BY integer)'] == expmap
def test_map_aggregate_restricted(self):
"Map an aggregate with restricted parallel safety"
if self.db.version < 90600:
self.skipTest('Only available on PG 9.6 and later')
stmts = [CREATE_STMT2, "CREATE AGGREGATE a1 (integer) ("
"SFUNC = f1, STYPE = integer, PARALLEL = RESTRICTED)"]
dbmap = self.to_map(stmts)
expmap = {'sfunc': 'f1', 'stype': 'integer', 'parallel': 'restricted'}
assert dbmap['schema sd']['aggregate a1(integer)'] == expmap
class AggregateToSqlTestCase(InputMapToSqlTestCase):
"""Test SQL generation from input aggregates"""
def test_create_aggregate_simple(self):
"Create a simple aggregate"
inmap = self.std_map()
inmap['schema sd'].update({'function f1(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE2,
'volatility': 'immutable'}})
inmap['schema sd'].update({'aggregate a1(integer)': {
'sfunc': 'f1', 'stype': 'integer'}})
sql = self.to_sql(inmap)
assert fix_indent(sql[1]) == CREATE_STMT2
assert fix_indent(sql[2]) == "CREATE AGGREGATE sd.a1(integer) " \
"(SFUNC = sd.f1, STYPE = integer)"
def test_create_aggregate_sortop(self):
"Create an aggregate that specifies a sort operator"
inmap = self.std_map()
inmap['schema sd'].update({'function f1(float, float)': {
'language': 'sql', 'returns': 'float', 'source': SOURCE2,
'volatility': 'immutable'}})
inmap['schema sd'].update({'aggregate a1(float)': {
'sfunc': 'f1', 'stype': 'float', 'sortop': 'pg_catalog.>'}})
sql = self.to_sql(inmap)
assert fix_indent(sql[2]) == "CREATE AGGREGATE sd.a1(float) " \
"(SFUNC = sd.f1, STYPE = float, SORTOP = OPERATOR(pg_catalog.>))"
def test_create_aggregate_init_final(self):
"Create an aggregate with an INITCOND and a FINALFUNC"
inmap = self.std_map()
inmap['schema sd'].update({'function f1(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE2,
'volatility': 'immutable'}})
inmap['schema sd'].update({'function f2(integer)': {
'language': 'sql', 'returns': 'double precision',
'source': "SELECT $1::float", 'volatility': 'immutable'}})
inmap['schema sd'].update({'aggregate a1(integer)': {
'sfunc': 'f1', 'stype': 'integer', 'initcond': '-1',
'finalfunc': 'f2'}})
sql = self.to_sql(inmap)
funcs = sorted(sql[1:3])
assert fix_indent(funcs[0]) == CREATE_STMT2
assert fix_indent(funcs[1]) == "CREATE FUNCTION sd.f2(integer) " \
"RETURNS double precision LANGUAGE sql IMMUTABLE " \
"AS $_$SELECT $1::float$_$"
assert fix_indent(sql[3]) == "CREATE AGGREGATE sd.a1(integer) " \
"(SFUNC = sd.f1, STYPE = integer, FINALFUNC = sd.f2, " \
"INITCOND = '-1')"
def test_drop_aggregate(self):
"Drop an existing aggregate"
stmts = [CREATE_STMT2, "CREATE AGGREGATE agg1 (integer) "
"(SFUNC = f1, STYPE = integer)"]
sql = self.to_sql(self.std_map(), stmts)
assert sql[0] == "DROP AGGREGATE sd.agg1(integer)"
assert sql[1] == "DROP FUNCTION sd.f1(integer, integer)"
def test_create_moving_aggregate(self):
"Create a moving-aggregate mode function"
if self.db.version < 90400:
self.skipTest('Only available on PG 9.4 and later')
inmap = self.std_map()
inmap['schema sd'].update(
{'function fadd(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE4,
'volatility': 'immutable'},
'function fsub(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE5,
'volatility': 'immutable'},
'aggregate a1(integer)': {
'sfunc': 'fadd', 'stype': 'integer', 'initcond': '0',
'msfunc': 'fadd', 'minvfunc': 'fsub', 'mstype': 'integer',
'minitcond': '0'}})
sql = self.to_sql(inmap, [CREATE_STMT4, CREATE_STMT5])
assert fix_indent(sql[0]) == "CREATE AGGREGATE sd.a1(integer) (" \
"SFUNC = sd.fadd, STYPE = integer, INITCOND = '0', " \
"MSFUNC = sd.fadd, MINVFUNC = sd.fsub, MSTYPE = integer, " \
"MINITCOND = '0')"
def test_create_hypothetical_set_aggregate(self):
"Create a hypothetical-set aggregate"
if self.db.version < 90400:
self.skipTest('Only available on PG 9.4 and later')
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE2,
'volatility': 'immutable'},
'aggregate a1(integer ORDER BY integer)': {
'kind': 'hypothetical', 'sfunc': 'f1', 'stype': 'integer'}})
sql = self.to_sql(inmap, [CREATE_STMT2])
assert fix_indent(sql[0]) == "CREATE AGGREGATE sd.a1(integer " \
"ORDER BY integer) (SFUNC = sd.f1, STYPE = integer, HYPOTHETICAL)"
def test_create_aggregate_parallel_safe(self):
"Create an aggregate with parallel safety"
if self.db.version < 90600:
self.skipTest('Only available on PG 9.6 and later')
inmap = self.std_map()
inmap['schema sd'].update({
'function f1(integer, integer)': {
'language': 'sql', 'returns': 'integer', 'source': SOURCE2,
'volatility': 'immutable'},
'aggregate a1(integer ORDER BY integer)': {
'sfunc': 'f1', 'stype': 'integer', 'parallel': 'safe'}})
sql = self.to_sql(inmap, [CREATE_STMT2])
assert fix_indent(sql[0]) == "CREATE AGGREGATE sd.a1(integer " \
"ORDER BY integer) (SFUNC = sd.f1, STYPE = integer, " \
"PARALLEL = SAFE)"
| bsd-3-clause |
chylli/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/port/gtk.py | 113 | 8060 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import subprocess
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.port.base import Port
from webkitpy.port.pulseaudio_sanitizer import PulseAudioSanitizer
from webkitpy.port.xvfbdriver import XvfbDriver
class GtkPort(Port):
port_name = "gtk"
def __init__(self, *args, **kwargs):
super(GtkPort, self).__init__(*args, **kwargs)
self._pulseaudio_sanitizer = PulseAudioSanitizer()
def warn_if_bug_missing_in_test_expectations(self):
return not self.get_option('webkit_test_runner')
def _port_flag_for_scripts(self):
return "--gtk"
def _driver_class(self):
return XvfbDriver
def default_timeout_ms(self):
if self.get_option('configuration') == 'Debug':
return 12 * 1000
return 6 * 1000
def setup_test_run(self):
super(GtkPort, self).setup_test_run()
self._pulseaudio_sanitizer.unload_pulseaudio_module()
def clean_up_test_run(self):
super(GtkPort, self).clean_up_test_run()
self._pulseaudio_sanitizer.restore_pulseaudio_module()
def setup_environ_for_server(self, server_name=None):
environment = super(GtkPort, self).setup_environ_for_server(server_name)
environment['GTK_MODULES'] = 'gail'
environment['GSETTINGS_BACKEND'] = 'memory'
environment['LIBOVERLAY_SCROLLBAR'] = '0'
environment['TEST_RUNNER_INJECTED_BUNDLE_FILENAME'] = self._build_path('Libraries', 'libTestRunnerInjectedBundle.la')
environment['TEST_RUNNER_TEST_PLUGIN_PATH'] = self._build_path('TestNetscapePlugin', '.libs')
environment['WEBKIT_INSPECTOR_PATH'] = self._build_path('Programs', 'resources', 'inspector')
environment['AUDIO_RESOURCES_PATH'] = self.path_from_webkit_base('Source', 'WebCore', 'platform', 'audio', 'resources')
self._copy_value_from_environ_if_set(environment, 'WEBKIT_OUTPUTDIR')
return environment
def _generate_all_test_configurations(self):
configurations = []
for build_type in self.ALL_BUILD_TYPES:
configurations.append(TestConfiguration(version=self._version, architecture='x86', build_type=build_type))
return configurations
def _path_to_driver(self):
return self._build_path('Programs', self.driver_name())
def _path_to_image_diff(self):
return self._build_path('Programs', 'ImageDiff')
def _path_to_webcore_library(self):
gtk_library_names = [
"libwebkitgtk-1.0.so",
"libwebkitgtk-3.0.so",
"libwebkit2gtk-1.0.so",
]
for library in gtk_library_names:
full_library = self._build_path(".libs", library)
if self._filesystem.isfile(full_library):
return full_library
return None
def _search_paths(self):
search_paths = []
if self.get_option('webkit_test_runner'):
search_paths.extend([self.port_name + '-wk2', 'wk2'])
else:
search_paths.append(self.port_name + '-wk1')
search_paths.append(self.port_name)
search_paths.extend(self.get_option("additional_platform_directory", []))
return search_paths
def default_baseline_search_path(self):
return map(self._webkit_baseline_path, self._search_paths())
def _port_specific_expectations_files(self):
return [self._filesystem.join(self._webkit_baseline_path(p), 'TestExpectations') for p in reversed(self._search_paths())]
# FIXME: We should find a way to share this implmentation with Gtk,
# or teach run-launcher how to call run-safari and move this down to Port.
def show_results_html_file(self, results_filename):
run_launcher_args = ["file://%s" % results_filename]
if self.get_option('webkit_test_runner'):
run_launcher_args.append('-2')
# FIXME: old-run-webkit-tests also added ["-graphicssystem", "raster", "-style", "windows"]
# FIXME: old-run-webkit-tests converted results_filename path for cygwin.
self._run_script("run-launcher", run_launcher_args)
def check_sys_deps(self, needs_http):
return super(GtkPort, self).check_sys_deps(needs_http) and XvfbDriver.check_xvfb(self)
def _get_gdb_output(self, coredump_path):
cmd = ['gdb', '-ex', 'thread apply all bt 1024', '--batch', str(self._path_to_driver()), coredump_path]
proc = subprocess.Popen(cmd, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
errors = [l.strip().decode('utf8', 'ignore') for l in stderr.splitlines()]
return (stdout.decode('utf8', 'ignore'), errors)
def _get_crash_log(self, name, pid, stdout, stderr, newer_than):
pid_representation = str(pid or '<unknown>')
log_directory = os.environ.get("WEBKIT_CORE_DUMPS_DIRECTORY")
errors = []
crash_log = ''
expected_crash_dump_filename = "core-pid_%s-_-process_%s" % (pid_representation, name)
def match_filename(filesystem, directory, filename):
if pid:
return filename == expected_crash_dump_filename
return filename.find(name) > -1
if log_directory:
dumps = self._filesystem.files_under(log_directory, file_filter=match_filename)
if dumps:
# Get the most recent coredump matching the pid and/or process name.
coredump_path = list(reversed(sorted(dumps)))[0]
if not newer_than or self._filesystem.mtime(coredump_path) > newer_than:
crash_log, errors = self._get_gdb_output(coredump_path)
stderr_lines = errors + (stderr or '<empty>').decode('utf8', 'ignore').splitlines()
errors_str = '\n'.join(('STDERR: ' + l) for l in stderr_lines)
if not crash_log:
if not log_directory:
log_directory = "/path/to/coredumps"
core_pattern = os.path.join(log_directory, "core-pid_%p-_-process_%e")
crash_log = """\
Coredump %(expected_crash_dump_filename)s not found. To enable crash logs:
- run this command as super-user: echo "%(core_pattern)s" > /proc/sys/kernel/core_pattern
- enable core dumps: ulimit -c unlimited
- set the WEBKIT_CORE_DUMPS_DIRECTORY environment variable: export WEBKIT_CORE_DUMPS_DIRECTORY=%(log_directory)s
""" % locals()
return (stderr, """\
Crash log for %(name)s (pid %(pid_representation)s):
%(crash_log)s
%(errors_str)s""" % locals())
| bsd-3-clause |
nigma/django-twilio-sms | src/utils.py | 1 | 2718 | #-*- coding: utf-8 -*-
from __future__ import unicode_literals
from decimal import Decimal
import logging
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.encoding import force_text
from twilio.rest import TwilioRestClient
from .models import OutgoingSMS
logger = logging.getLogger("django-twilio-sms.utils")
def build_callback_url(request, urlname, message):
"""
Build Twilio callback url for confirming message delivery status
:type message: OutgoingSMS
"""
location = reverse(urlname, kwargs={"pk": message.pk})
callback_domain = getattr(settings, "TWILIO_CALLBACK_DOMAIN", None)
if callback_domain:
url = "{}://{}{}".format(
"https" if getattr(settings, "TWILIO_CALLBACK_USE_HTTPS", False) else "http",
callback_domain,
location
)
elif request is not None:
url = request.build_absolute_uri(location)
else:
raise ValueError(
"Unable to build callback url. Configure TWILIO_CALLBACK_DOMAIN "
"or pass request object to function call"
)
return url
def send_sms(request, to_number, body, callback_urlname="sms_status_callback"):
"""
Create :class:`OutgoingSMS` object and send SMS using Twilio.
"""
client = TwilioRestClient(settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN)
from_number = settings.TWILIO_PHONE_NUMBER
message = OutgoingSMS.objects.create(
from_number=from_number,
to_number=to_number,
body=body,
)
status_callback = None
if callback_urlname:
status_callback = build_callback_url(request, callback_urlname, message)
logger.debug("Sending SMS message to %s with callback url %s: %s.",
to_number, status_callback, body)
if not getattr(settings, "TWILIO_DRY_MODE", False):
sent = client.sms.messages.create(
to=to_number,
from_=from_number,
body=body,
status_callback=status_callback
)
logger.debug("SMS message sent: %s", sent.__dict__)
message.sms_sid = sent.sid
message.account_sid = sent.account_sid
message.status = sent.status
message.to_parsed = sent.to
if sent.price:
message.price = Decimal(force_text(sent.price))
message.price_unit = sent.price_unit
message.sent_at = sent.date_created
message.save(update_fields=[
"sms_sid", "account_sid", "status", "to_parsed",
"price", "price_unit", "sent_at"
])
else:
logger.info("SMS: from %s to %s: %s", from_number, to_number, body)
return message
| mit |
MTK6580/walkie-talkie | ALPS.L1.MP6.V2_HEXING6580_WE_L/alps/development/scripts/compare-installed-size.py | 4 | 3442 | #!/usr/bin/python
# Copyright (C) 2013 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""summarize and compare the component sizes in installed-files.txt."""
import sys
bin_size1 = {}
bin_size2 = {}
bin_sizes = [bin_size1, bin_size2]
file_sizes = {}
def PrintUsage():
print "usage: " + sys.argv[0] + " filename [filename2]"
print ""
print " Input file is installed-files.txt from the build output directory."
print " When only one input file is given, it will generate module_0.csv."
print " When two input files are given, in addition it will generate"
print " module_1.csv and comparison.csv."
print ""
print " The module_x.csv file shows the aggregated file size in each module"
print " (eg bin, lib, app, ...)"
print " The comparison.cvs file shows the individual file sizes side by side"
print " from two different builds"
print ""
print " These files can be uploaded to Google Doc for further processing."
sys.exit(1)
def ParseFile(install_file, idx):
input_stream = open(install_file, 'r')
for line in input_stream:
# line = "25027208 /system/lib/libchromeview.so"
line = line.strip()
# size = "25027208", name = "/system/lib/libchromeview.so"
size, name = line.split()
# components = ["", "system", "lib", "libchromeview.so"]
components = name.split('/')
# module = "lib"
module = components[2]
# filename = libchromeview.so"
filename = components[-1]
# sum up the file sizes by module name
if module not in bin_sizes[idx]:
bin_sizes[idx][module] = int(size)
else:
bin_sizes[idx][module] += int(size)
# sometimes a file only exists on one build but not the other - use 0 as the
# default size.
if idx == 0:
file_sizes[name] = [module, size, 0]
else:
if name in file_sizes:
file_sizes[name][-1] = size
else:
file_sizes[name] = [module, 0, size]
input_stream.close()
# output the module csv file
output = open("module_%d.csv" % idx, 'w')
total = 0
for key in bin_sizes[idx]:
output.write("%s, %d\n" % (key, bin_sizes[idx][key]))
output.close()
def main():
if len(sys.argv) < 2 or len(sys.argv) > 3:
PrintUsage()
# Parse the first installed-files.txt
ParseFile(sys.argv[1], 0)
# Parse the second installed-files.txt
if len(sys.argv) == 3:
ParseFile(sys.argv[2], 1)
# comparison.csv has the following columns:
# filename, module, size1, size2, size2-size1
# eg: /system/lib/libchromeview.so, lib, 25027208, 33278460, 8251252
output = open("comparison.csv", 'w')
for key in file_sizes:
output.write("%s, %s, %s, %s, %d\n" %
(key, file_sizes[key][0], file_sizes[key][1],
file_sizes[key][2],
int(file_sizes[key][2]) - int(file_sizes[key][1])))
output.close()
if __name__ == '__main__':
main()
# vi: ts=2 sw=2
| gpl-3.0 |
nvictus/hic2cool | hic2cool_extractnorms.py | 1 | 13474 | from __future__ import absolute_import, division, print_function, unicode_literals
import sys
import struct
import zlib
import numpy as np
import h5py
import math
import cooler
import pandas as pd
from collections import OrderedDict
version = 'dummy'
NORMS = ['VC', 'VC_SQRT', 'KR']
#read function
def readcstr(f):
# buf = bytearray()
buf = b""
while True:
b = f.read(1)
if b is None or b == b"\0":
# return str(buf,encoding="utf-8", errors="strict")
return buf.decode("utf-8", errors="ignore")
else:
buf += b
# buf.append(b)
def read_header(infile):
"""
Takes in a .hic file and returns a dictionary containing information about
the chromosome. Keys are chromosome index numbers (0 through # of chroms contained
in file) and values are [chr idx (int), chr name (str), chrom length (str)].
Returns the masterindex used by the file as well as the open file object.
"""
req=open(infile, 'rb')
chrs = {}
resolutions = []
magic_string = struct.unpack('<3s', req.read(3))[0]
req.read(1)
if (magic_string != b"HIC"):
print('This does not appear to be a HiC file; magic string is incorrect')
sys.exit()
global version
version = struct.unpack('<i',req.read(4))[0]
masterindex = struct.unpack('<q',req.read(8))[0]
genome = b""
c=req.read(1)
while (c != b'\0'):
genome += c
c=req.read(1)
genome = genome.decode('ascii')
nattributes = struct.unpack('<i',req.read(4))[0]
for x in range(nattributes):
key = readcstr(req)
value = readcstr(req)
nChrs = struct.unpack('<i',req.read(4))[0]
for i in range(0, nChrs):
name = readcstr(req)
length = struct.unpack('<i',req.read(4))[0]
if name and length:
formatted_name = ('chr' + name if ('all' not in name.lower() and
'chr' not in name.lower()) else name)
formatted_name = ('chrM' if formatted_name == 'chrMT' else
formatted_name)
chrs[i] = [i, formatted_name, length]
nBpRes = struct.unpack('<i',req.read(4))[0]
# find bp delimited resolutions supported by the hic file
for x in range(0, nBpRes):
res = struct.unpack('<i',req.read(4))[0]
resolutions.append(res)
return req, chrs, resolutions, masterindex, genome
def read_footer(req, master, norm, unit, resolution):
"""
Takes in an open hic file and generates two dictionaries. pair_footer_info
contains the file position of info for any given chromosome pair (formatted
as a string). Chr_footer_info gives chromosome-level size and position info
relative to the file. This way, this function only has to run once
All of the unused read() code is used to find the correct place in the file,
supposedly. This is code from straw.
"""
pair_footer_info={}
chr_footer_info={}
req.seek(master)
nBytes = struct.unpack('<i', req.read(4))[0]
nEntries = struct.unpack('<i', req.read(4))[0]
found = False
for i in range(nEntries):
stri = readcstr(req)
fpos = struct.unpack('<q', req.read(8))[0]
sizeinbytes = struct.unpack('<i', req.read(4))[0]
pair_footer_info[stri] = fpos
nExpectedValues = struct.unpack('<i',req.read(4))[0]
for i in range(nExpectedValues):
str_ = readcstr(req)
binSize = struct.unpack('<i',req.read(4))[0]
nValues = struct.unpack('<i',req.read(4))[0]
for j in range(nValues):
v = struct.unpack('<d',req.read(8))[0]
nNormalizationFactors = struct.unpack('<i',req.read(4))[0]
for j in range(nNormalizationFactors):
chrIdx = struct.unpack('<i',req.read(4))[0]
v = struct.unpack('<d',req.read(8))[0]
nExpectedValues = struct.unpack('<i',req.read(4))[0]
for i in range(nExpectedValues):
str_ = readcstr(req)
str_ = readcstr(req)
binSize = struct.unpack('<i',req.read(4))[0]
nValues = struct.unpack('<i',req.read(4))[0]
for j in range(nValues):
v = struct.unpack('<d',req.read(8))[0]
nNormalizationFactors = struct.unpack('<i',req.read(4))[0]
for j in range(nNormalizationFactors):
chrIdx = struct.unpack('<i',req.read(4))[0]
v = struct.unpack('<d',req.read(8))[0]
nEntries = struct.unpack('<i',req.read(4))[0]
for i in range(nEntries):
normtype = readcstr(req)
chrIdx = struct.unpack('<i',req.read(4))[0]
unit1 = readcstr(req)
resolution1 = struct.unpack('<i',req.read(4))[0]
filePosition = struct.unpack('<q',req.read(8))[0]
sizeInBytes = struct.unpack('<i',req.read(4))[0]
if (normtype==norm and unit1==unit and resolution1==resolution):
chr_footer_info[chrIdx] = {'position':filePosition, 'size':sizeInBytes}
return req, pair_footer_info, chr_footer_info
#FUN(fin, entry) Return Norm
def read_normalization_vector(req, entry):
req.seek(entry['position'])
nValues = struct.unpack('<i',req.read(4))[0]
value = []
for i in range(nValues):
d = struct.unpack('<d',req.read(8))[0]
value.append(d)
return value
def parse_norm(norm, req, chr1, chr2, unit, binsize, covered_chr_pairs,
pair_footer_info, chr_footer_info, chrom_map):
"""
Adapted from the straw() function in the original straw package.
Mainly, since all chroms are iterated over, the read_header and read_footer
functions were placed outside of straw() and made to be reusable across
any chromosome pair.
Main function is to build a bin_map, which contains normalization values
for every bin, and a count_map, which is a nested dictionary which contains
the contact count for any two bins.
"""
magic_string = ""
if (not (norm=="VC" or norm=="VC_SQRT" or norm=="KR")):
print("Norm specified incorrectly, must be one of <NONE/VC/VC_SQRT/KR>")
force_exit(warn_string, req)
if (not (unit=="BP" or unit=="FRAG")):
print("Unit specified incorrectly, must be one of <BP/FRAG>")
force_exit(warn_string, req)
chr1ind = chr1[0]
chr2ind = chr2[0]
c1pos1 = 0
c1pos2 = int(chr1[2])
c2pos1 = 0
c2pos2 = int(chr2[2])
c1 = min(chr1ind, chr2ind)
c2 = max(chr1ind, chr2ind)
chr_key = str(c1) + "_" + str(c2)
try:
pair_footer_info[chr_key]
except KeyError:
warn_string = (
'ERROR. There is a discrepancy between the chrs declared in the ' +
'infile header and the actual information it contains.\nThe '
'intersection between ' + chr1[1] + ' and ' + chr2[1] +
' could not be found in the file.')
force_exit(warn_string, req)
myFilePos = pair_footer_info[chr_key]
if (norm != "NONE"):
#import ipdb; ipdb.set_trace()
c1Norm = read_normalization_vector(req, chr_footer_info[c1])
c2Norm = read_normalization_vector(req, chr_footer_info[c2])
chrom_map[chr1[1]] = c1Norm
chrom_map[chr2[1]] = c2Norm
covered_chr_pairs.append(chr_key)
def hic2cool_extractnorms(infile, outfile, resolution=0,
exclude_MT=False, command_line=False):
"""
Main function that coordinates the reading of header and footer from infile
and uses that information to parse the hic matrix.
Opens outfile and writes in form of .cool file
Params:
<infile> str .hic filename
<outfile> str .cool output filename
<resolution> int bp bin size. If 0, use all. Defaults to 0.
Final .cool structure will change depending on this param (see README)
<norm> str normalization type. Defaults to KR, optionally NONE, VC, or VC_SQRT
<exclude_MT> bool. If True, ignore MT contacts. Defaults to False.
<command_line> bool. True if executing from run_hic.py. Prompts hic headers
be printed to stdout.
"""
from collections import OrderedDict
import cooler
unit = 'BP' # only using base pair unit for now
resolution = int(resolution)
req, used_chrs, resolutions, masteridx, genome = read_header(infile)
chromosomes = [used_chrs[i][1] for i in range(1, len(used_chrs))]
lengths = [used_chrs[i][2] for i in range(1, len(used_chrs))]
chromsizes = pd.Series(index=chromosomes, data=lengths)
if command_line: # print hic header info for command line usage
chr_names = [used_chrs[key][1] for key in used_chrs.keys()]
print('################')
print('### hic2cool ###')
print('################')
print('hic file header info:')
print('Chromosomes: ', chr_names)
print('Resolutions: ', resolutions)
print('Genome: ', genome)
if exclude_MT: # remove chr25, which is MT, if this flag is set
used_chrs.pop(25, None)
# ensure user input binsize is a resolution supported by the hic file
if resolution != 0 and resolution not in resolutions:
error_str = (
'ERROR. Given binsize (in bp) is not a supported resolution in ' +
'this file.\nPlease use 0 (all resolutions) or use one of: ' +
resolutions)
force_exit(error_str, req)
use_resolutions = resolutions if resolution == 0 else [resolution]
cooler_groups = {}
for path in cooler.io.ls(outfile):
binsize = cooler.Cooler(outfile + '::' + path).info['bin-size']
cooler_groups[binsize] = path
print('MCOOL contents:')
print(cooler_groups)
for norm in NORMS:
print('Norm:', norm)
for binsize in use_resolutions:
chrom_map = {}
bins = cooler.binnify(chromsizes, binsize)
req, pair_footer_info, chr_footer_info = read_footer(
req, masteridx, norm, unit, binsize)
covered_chr_pairs = []
for chr_x in used_chrs:
if used_chrs[chr_x][1].lower() == 'all':
continue
for chr_y in used_chrs:
if used_chrs[chr_y][1].lower() == 'all':
continue
c1 = min(chr_x, chr_y)
c2 = max(chr_x, chr_y)
# ensure this is true
# since matrices are upper triangular, no need to cover
# c1-c2 and c2-c1 reciprocally
if str(c1) + "_" + str(c2) in covered_chr_pairs:
continue
parse_norm(
norm,
req,
used_chrs[c1],
used_chrs[c2],
unit,
binsize,
covered_chr_pairs,
pair_footer_info,
chr_footer_info,
chrom_map
)
lengths_in_bins = bins.groupby('chrom').size()
# hic normalization vector lengths have inconsistent lengths...
# truncate appropriately
vector = np.concatenate([
chrom_map[chrom][:lengths_in_bins.loc[chrom]]
for chrom in chromosomes
])
bins[norm] = vector
print('Resolution:', binsize)
print(bins.head())
print('Writing to cool file...')
group_path = cooler_groups[binsize]
cooler.io.append(
outfile + '::' + group_path,
'bins',
{norm: bins[norm].values},
force=True)
req.close()
def force_exit(message, req):
"""
Exit the program due to some error. Print out message and close the given
input files.
"""
req.close()
print(message, file=sys.stderr)
sys.exit()
if __name__ == '__main__':
import argparse
def main():
"""
Execute the program from the command line
Args are:
python hic2cool.py <infile (.hic)> <outfile (.cool)> <resolutions
desired (defaults to all, optionally bp int)> <normalization type
(defaults to 'KR', optionally 'NONE', 'VC', or 'VC_SQRT')>
<exclude MT (default False)>
"""
parser = argparse.ArgumentParser()
parser.add_argument("infile", help=".hic input file")
parser.add_argument("outfile", help=".cool output file")
parser.add_argument("-r", "--resolution",
help="integer bp resolution desired in cooler file. "
"Setting to 0 (default) will use all resolutions. "
"If all resolutions are used, a multi-res .cool file will be "
"created, which has a different hdf5 structure. See the "
"README for more info", type=int, default=0)
parser.add_argument("-e", "--exclude_MT",
help="if used, exclude the mitochondria (MT) from the output",
action="store_true")
args = parser.parse_args()
# these parameters adapted from theaidenlab/straw
# KR is default normalization type and BP is the unit for binsize
hic2cool_extractnorms(
args.infile,
args.outfile,
args.resolution,
#args.normalization,
args.exclude_MT,
True)
main()
| mit |
LukeMurphey/splunk-google-drive | src/bin/google_drive_app/chardet/euckrprober.py | 290 | 1748 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
from .mbcssm import EUCKR_SM_MODEL
class EUCKRProber(MultiByteCharSetProber):
def __init__(self):
super(EUCKRProber, self).__init__()
self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
self.distribution_analyzer = EUCKRDistributionAnalysis()
self.reset()
@property
def charset_name(self):
return "EUC-KR"
@property
def language(self):
return "Korean"
| apache-2.0 |
xiaojunwu/crosswalk-test-suite | webapi/tct-csp-w3c-tests/csp-py/csp_font-src_self_allowed-manual.py | 30 | 2709 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
response.headers.set("Content-Security-Policy", "font-src 'self'")
response.headers.set("X-Content-Security-Policy", "font-src 'self'")
response.headers.set("X-WebKit-CSP", "font-src 'self'")
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <yunfeix.hao@intel.com>
-->
<html>
<head>
<title>CSP Test: csp_font-src_self_allowed</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#font-src"/>
<meta name="flags" content=""/>
<meta name="assert" content="font-src 'self'"/>
<meta charset="utf-8"/>
<style>
@font-face {
font-family: Canvas;
src: url("support/w3c/CanvasTest.ttf");
}
#test {
font-family: Canvas;
}
</style>
</head>
<body>
<p>Test passes if the two lines are different in font</p>
<div id="test">1234 ABCD</div>
<div>1234 ABCD</div>
</body>
</html> """
| bsd-3-clause |
phobson/bokeh | bokeh/command/subcommands/tests/test_json.py | 13 | 3465 | from __future__ import absolute_import
import argparse
import pytest
import os
import sys
is_python2 = sys.version_info[0] == 2
import bokeh.command.subcommands.json as scjson
from bokeh.command.bootstrap import main
from bokeh.util.testing import TmpDir, WorkingDir, with_directory_contents
from . import basic_scatter_script
def test_create():
import argparse
from bokeh.command.subcommand import Subcommand
obj = scjson.JSON(parser=argparse.ArgumentParser())
assert isinstance(obj, Subcommand)
def test_name():
assert scjson.JSON.name == "json"
def test_help():
assert scjson.JSON.help == "Create JSON files for one or more applications"
def test_args():
assert scjson.JSON.args == (
('files', dict(
metavar='DIRECTORY-OR-SCRIPT',
nargs='+',
help="The app directories or scripts to generate JSON for",
default=None
)),
('--indent', dict(
metavar='LEVEL',
type=int,
help="indentation to use when printing",
default=None
)),
(('-o', '--output'), dict(
metavar='FILENAME',
action='append',
type=str,
help="Name of the output file or - for standard output."
)),
('--args', dict(
metavar='COMMAND-LINE-ARGS',
nargs=argparse.REMAINDER,
help="Any command line arguments remaining are passed on to the application handler",
)),
)
def test_no_script(capsys):
with (TmpDir(prefix="bokeh-json-no-script")) as dirname:
with WorkingDir(dirname):
with pytest.raises(SystemExit):
main(["bokeh", "json"])
out, err = capsys.readouterr()
if is_python2:
too_few = "too few arguments"
else:
too_few = "the following arguments are required: DIRECTORY-OR-SCRIPT"
assert err == """usage: bokeh json [-h] [--indent LEVEL] [-o FILENAME] [--args ...]
DIRECTORY-OR-SCRIPT [DIRECTORY-OR-SCRIPT ...]
bokeh json: error: %s
""" % (too_few)
assert out == ""
def test_basic_script(capsys):
def run(dirname):
with WorkingDir(dirname):
main(["bokeh", "json", "scatter.py"])
out, err = capsys.readouterr()
assert err == ""
assert out == ""
assert set(["scatter.json", "scatter.py"]) == set(os.listdir(dirname))
with_directory_contents({ 'scatter.py' : basic_scatter_script },
run)
def test_basic_script_with_output_after(capsys):
def run(dirname):
with WorkingDir(dirname):
main(["bokeh", "json", "scatter.py", "--output", "foo.json"])
out, err = capsys.readouterr()
assert err == ""
assert out == ""
assert set(["foo.json", "scatter.py"]) == set(os.listdir(dirname))
with_directory_contents({ 'scatter.py' : basic_scatter_script },
run)
def test_basic_script_with_output_before(capsys):
def run(dirname):
with WorkingDir(dirname):
main(["bokeh", "json", "--output", "foo.json", "scatter.py"])
out, err = capsys.readouterr()
assert err == ""
assert out == ""
assert set(["foo.json", "scatter.py"]) == set(os.listdir(dirname))
with_directory_contents({ 'scatter.py' : basic_scatter_script },
run)
| bsd-3-clause |
GENI-NSF/gram | opsmon/gram_opsmon_populator_v2.py | 1 | 39738 | #----------------------------------------------------------------------
# Copyright (c) 2011-2014 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
# Module to populate monitoring database with live statistics of
# State of all the compute nodes on this rack
import gram_slice_info
import json
import os
import psutil
import subprocess
import sys
import tempfile
import time
opsmon_path='/home/gram/ops-monitoring'
local_path = opsmon_path + "/local"
common_path = opsmon_path + "/common"
config_path = opsmon_path + "/config"
sys.path.append(opsmon_path)
sys.path.append(local_path)
sys.path.append(common_path)
sys.path.append(config_path)
import table_manager
# Class to generate data files representing current selected states
# on all compute hosts, and populate database tables accordingly
# Maintains a rolling window of data, deleting elements older than a given
# time before present
class OpsMonPopulator:
def __init__(self, config):
self._frequency_sec = int(config['frequency_sec'])
self._window_duration_sec = int(config['window_duration_sec'])
self._database_user = config['database_user']
self._database_pwd = config['database_pwd']
self._database_name = config['database_name']
# External IP addr of Gram rack local datastore
self._base_url = config['base_address']
self._aggregate_id = config['aggregate_id']
self._aggregate_urn = config['aggregate_urn']
# Aggregate info query url
self._aggregate_href = self._base_url + "/info/aggregate/" + self._aggregate_id
# Time-series data measurement reference
self._measurement_href = self._base_url + "/data/"
self._hosts = config['hosts']
self._modules = config['modules']
self._node_commands = config['node_commands']
self._interface_commands = config['interface_commands']
self._prev_values = {}
self._config = config
self._table_manager = table_manager.TableManager('local', config_path, False)
for cmd in self._node_commands:
tablename = cmd['table']
self._prev_values[tablename] = {}
self._gram_config = json.loads(open('/etc/gram/config.json').read())
self._recent_snapshot = None
self._objects_by_urn = {} # Current objects read by snapshot
# json-schema
self._agg_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/aggregate#"
self._authority_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/authority#"
self._node_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/node#"
self._sliver_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/sliver#"
self._slice_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/slice#"
self._interface_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/port#"
self._interfacevlan_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/port-vlan#"
self._link_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/link#"
self._user_schema = "http://www.gpolab.bbn.com/monitoring/schema/20140501/user#"
# Change ' to " in any expressions (can't parse " in json)
for cmd in self._node_commands:
expr = cmd['expression']
expr_new = expr.replace("'", '\\"')
cmd['expression'] = expr_new
imports = ";".join("import %s" % mod for mod in self._modules)
measurements = ", ".join(c['expression'] for c in self._node_commands)
self._rsh_command = "python -c %s%s;print %s%s" % \
('"', imports, measurements, '"')
# print "RSH = %s" % self._rsh_command
self._interface_info_rsh_command = "python -c %s%s%s;print %s%s" % \
('"', imports, ';import json', 'json.dumps(psutil.net_io_counters(pernic=True))', '"')
# print "IFACE RSH = %s" % self._interface_info_rsh_command
self._external_vlans = self._compute_external_vlans()
self._internal_vlans = \
parseVLANs(self._gram_config['internal_vlans'])
self._nodes = {}
self._links = {}
self._initialize_nodes()
# setting the nodes dictionary using dense code format
def _initialize_nodes(self):
for node in self._config['hosts']:
hostname = node['id']
node_id = self.get_node_id(hostname)
node_urn = node['urn']
node_address = node['address']
imports = "import psutil"
measurements = "psutil.virtual_memory().total/1000"
rsh_memory_command = "python -c%s%s;print %s%s" % \
('"', imports, measurements, '"')
rsh_command = ['rsh', node_address, rsh_memory_command]
result = subprocess.check_output(rsh_command)
mem_total_kb = int(result)
node_href = self.get_node_href(node_id)
self._nodes[node_id] = {'id':node_id, 'urn': node_urn,
'host' : hostname,
'href': node_href, 'mem_total_kb': mem_total_kb,
'schema': self._node_schema}
def get_node_id(self, node_name):
return self._aggregate_id + "." + node_name
def get_slice_id(self, slice_urn):
return flatten_urn(self._aggregate_id + "." + slice_urn)
def get_authority_id(self, authority_urn):
return flatten_urn(authority_urn)
def get_user_id(self, user_urn):
return flatten_urn(self._aggregate_id + "." + user_urn)
def get_link_id(self, link_urn):
return flatten_urn(link_urn)
def get_interface_urn(self, node_urn, iface_name):
return node_urn + ":" + iface_name
def get_interface_id(self, node_urn, interface_name):
return flatten_urn(node_urn + "_" + interface_name)
def get_interfacevlan_id(self, urn):
return flatten_urn(urn)
def get_interfacevlan_urn(self, tag):
return self._aggregate_urn + "_VLANL_" + str(tag)
def get_node_href(self, node_id):
return "%s/info/node/%s" % (self._base_url, node_id)
def get_link_href(self, link_id):
return "%s/info/link/%s" % (self._base_url, link_id)
def get_interface_href(self, interface_id):
return self._base_url + "/info/interface/" + interface_id
def get_interfacevlan_href(self, interfacevlan_id):
return self._base_url + "/info/interfacevlan/" + interfacevlan_id
def get_slice_href(self, slice_id):
return self._base_url + "/info/slice/" + slice_id
def get_authority_href(self, authority_id):
return self._base_url + "/info/authority/" + authority_id
def get_usr_href(self, user_id):
return self._base_url + "/info/user/" + user_id
def get_sliver_href(self, sliver_id):
return self._base_url + "/info/sliver/" + sliver_id
def get_sliver_resource_href(self, sliver_resource_id):
node_href = \
self._base_url + "/info/sliver_resource/" + sliver_resource_id
def get_internal_link_urn(self):
return self._aggregate_urn+ "_INTERNAL"
# Top-level loop: Generate data file, execute into database and sleep
def run(self):
print "GRAM OPSMON process for %s" % self._aggregate_id
while True:
self._latest_snapshot = gram_slice_info.find_latest_snapshot()
self._objects_by_urn = gram_slice_info.parse_snapshot(self._latest_snapshot)
self.delete_static_entries()
self.update_info_tables()
self.update_data_tables()
self.update_slice_tables()
self.update_sliver_tables()
self.update_aggregate_tables()
self.update_interfacevlan_info()
self.update_switch_info()
# data_filename = self.generate_data_file()
# self.execute_data_file(data_filename)
# print "FILE = %s" % data_filename
# os.unlink(data_filename)
print "Updated OpsMon dynamic data for %s at %d" % (self._aggregate_id, int(time.time()))
time.sleep(self._frequency_sec)
# Update static H/W config information based on config plus information
# from nodes themselves
def update_info_tables(self):
self.update_aggregate_info()
self.update_link_info()
self.update_node_info()
self.update_interface_info()
# Update aggregate info tables
def update_aggregate_info(self):
ts = str(int(time.time()*1000000))
purge_old_related_entries(self._table_manager,
'ops_aggregate_sliver', 'aggregate_id',
'ops_aggregate', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_sliver', 'aggregate_href',
'ops_aggregate', 'selfRef', ts)
self._table_manager.purge_old_tsdata('ops_aggregate', ts)
meas_ref = self._measurement_href
# New fields
pop_version = 1.0
op_status = 'Up'
ip_poolsize = 0
agg = [self._agg_schema, self._aggregate_id, self._aggregate_href,
self._aggregate_urn, ts, meas_ref, pop_version, op_status, ip_poolsize]
info_insert(self._table_manager, 'ops_aggregate', agg)
def update_link_info(self):
ts = str(int(time.time()*1000000))
self._table_manager.purge_old_tsdata('ops_link', ts)
links = [link for link in self._objects_by_urn.values() \
if link['__type__'] == 'NetworkLink']
for link in links:
link_urn = link['sliver_urn']
link_id = self.get_link_id(link_urn)
link_href = self.get_link_href(link_id)
link_info = [self._link_schema, link_id, link_href, link_urn, ts]
info_insert(self._table_manager, 'ops_link', link_info)
agg_resource_info = [link_id, self._aggregate_id, link_href, link_urn]
info_insert(self._table_manager, 'ops_aggregate_resource', agg_resource_info)
# Update node info tables
def update_node_info(self):
ts = str(int(time.time()*1000000))
purge_old_related_entries(self._table_manager,
'ops_node_cpu_util', 'id',
'ops_node', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_node_disk_part_max_used', 'id',
'ops_node', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_node_is_available', 'id',
'ops_node', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_node_mem_used_kb', 'id',
'ops_node', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_node_num_vms_allocated', 'id',
'ops_node', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_node_swap_free', 'id',
'ops_node', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_node_interface', 'node_id',
'ops_node', 'id', ts)
self._table_manager.purge_old_tsdata('ops_node', ts)
for node_id, nd in self._nodes.items():
# New fields
node_type = "NODE"
virt_type = "VIRT"
node = [nd['schema'], nd['id'], nd['href'], nd['urn'], ts,
node_type, nd['mem_total_kb'], virt_type]
resource = [nd['id'], self._aggregate_id, nd['urn'], nd['href']]
info_insert(self._table_manager, "ops_node", node)
info_insert(self._table_manager, 'ops_aggregate_resource', resource)
# Update interface info tables
def update_interface_info(self):
ts = str(int(time.time()*1000000))
# Clear out old interface info:
purge_old_related_entries(self._table_manager,
'ops_interface_rx_bps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_rx_dps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_rx_eps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_rx_pps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_tx_bps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_tx_dps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_tx_eps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_tx_frequency', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_tx_power', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_tx_pps', 'id',
'ops_interface', 'id', ts)
purge_old_related_entries(self._table_manager,
'ops_interface_wmx_noc', 'id',
'ops_interface', 'id', ts)
self._table_manager.purge_old_tsdata('ops_interface', ts)
# Clear out old sliver resource info
for node_urn, node_info in self._nodes.items():
node_id = self.get_node_id(node_info['id'])
self._table_manager.delete_stmt('ops_node_interface', node_id)
# Insert into ops_node_interface
for node_info in self._config['hosts']:
node_urn = node_info['urn']
node_id = self.get_node_id(node_info['id'])
for iface_name, iface_data in node_info['interfaces'].items():
iface_id = self.get_interface_id(node_urn, iface_name)
iface_urn = self.get_interface_urn(node_urn, iface_name)
iface_href = self.get_interface_href(iface_id)
node_interface_info = [iface_id, node_id, iface_urn, iface_href]
iface_address = iface_data['address']
iface_role = iface_data['role']
iface_address_type = iface_data['type']
iface_max_bps = iface_data['max_bps']
iface_max_pps = 0
interface_info = [self._interface_schema, iface_id,
iface_href, iface_urn,
ts,
# iface_address_type,
# iface_address,
iface_role,
iface_max_bps, iface_max_pps]
info_insert(self._table_manager, 'ops_interface', interface_info)
info_insert(self._table_manager, 'ops_node_interface', node_interface_info)
# Update the ops_link_interface_vlan and ops_interfacevlan
# tables to reflect current VLAN allocations
def update_interfacevlan_info(self):
ts = str(int(time.time()*1000000))
self._table_manager.purge_old_tsdata('ops_interfacevlan', ts)
links = [link for link in self._objects_by_urn.values() \
if link['__type__'] == 'NetworkLink']
ifaces = [iface for iface in self._objects_by_urn.values() \
if iface['__type__'] == 'NetworkInterface']
vms = [vm for vm in self._objects_by_urn.values() \
if vm['__type__'] == 'VirtualMachine']
data_interface = self._gram_config['data_interface']
for iface in ifaces:
ifacevlan_urn = iface['sliver_urn']
ifacevlan_id = self.get_interfacevlan_id(ifacevlan_urn)
ifacevlan_href = self.get_interfacevlan_href(ifacevlan_id)
# The VM of this interface
iface_vm = None
for vm in vms:
if vm['sliver_urn'] == iface['virtual_machine']:
iface_vm = vm
break
host = iface_vm['host']
# The node_urn for compute node on which VM resides
node_urn = None
for node_id, node_info in self._nodes.items():
if node_info['host'] == host:
node_urn = node_info['urn']
break
# *** The physical interface on compute host for this sliver interface
iface_urn = self.get_interface_urn(node_urn, data_interface)
iface_id = self.get_interface_id(node_urn, data_interface)
iface_href = self.get_interface_href(iface_id)
# Find the link for this interface and grab VLAN tag
link_urn = iface['link']
link = None
for lnk in links:
if lnk['sliver_urn'] == link_urn:
link = lnk;
break
tag = link['vlan_tag']
link_id = self.get_link_id(link_urn)
ifacevlan_info = [self._interfacevlan_schema, ifacevlan_id,
ifacevlan_href, ifacevlan_urn, ts, tag,
iface_urn, iface_href]
info_insert(self._table_manager, 'ops_interfacevlan',
ifacevlan_info)
link_ifacevlan_info = [ifacevlan_id, link_id, ifacevlan_urn, ifacevlan_href]
info_insert(self._table_manager, 'ops_link_interfacevlan',
link_ifacevlan_info)
# Add in stitching interface vlan info
for link in links:
if 'stitching_info' in link and \
'vlan_tag' in link['stitching_info']:
vlan_tag = link['stitching_info']['vlan_tag']
link_urn = link['sliver_urn']
link_id = self.get_link_id(link_urn)
ifacevlan_urn = link['stitching_info']['link']
ifacevlan_id = self.get_interfacevlan_id(ifacevlan_urn)
ifacevlan_href = self.get_interfacevlan_href(ifacevlan_id)
link_ifacevlan_info = [ifacevlan_id, link_id, ifacevlan_urn, ifacevlan_href]
info_insert(self._table_manager, 'ops_link_interfacevlan',
link_ifacevlan_info);
iface_urn = self.find_iface_urn_for_link_urn(ifacevlan_urn)
iface_id = self.get_interface_id(iface_urn, 'EGRESS')
iface_href = self.get_interface_href(iface_id)
ifacevlan_info = [self._interfacevlan_schema, ifacevlan_id,
ifacevlan_href, ifacevlan_urn, ts, vlan_tag,
iface_urn, iface_href]
info_insert(self._table_manager, 'ops_interfacevlan',
ifacevlan_info);
# Return the interface port URN for the given stitching link URN
def find_iface_urn_for_link_urn(self, link_urn):
# print "LINK_URN = %s" % link_urn
# print "SI = %s" % self._gram_config['stitching_info']['edge_points']
for ep in self._gram_config['stitching_info']['edge_points']:
if ep['local_link'] == link_urn:
return ep['port']
return None
# Update information about the switch, its egress ports
# And associated measurements (pps, bps, etc)
def update_switch_info(self):
ts = str(int(time.time()*1000000))
# Add entry into ops_node for each switch
switches = []
if 'stitching_info' in self._gram_config and \
'edge_points' in self._gram_config['stitching_info']:
# Gather all the switches and write unique entry in ops_node
for ep in self._gram_config['stitching_info']['edge_points']:
switch_name = ep['local_switch']
if switch_name not in switches: switches.append(switch_name)
for switch_name in switches:
switch_id = self.get_node_id(switch_name)
switch_href = self.get_node_href(switch_id)
# New fields
node_type = "NODE"
virt_type = "VIRT"
switch_node_info = [self._node_schema, switch_id, switch_href,\
switch_name, ts,
node_type, 0, virt_type] # 0 = mem_total_kb
info_insert(self._table_manager, 'ops_node', switch_node_info)
# Enter an interface in the ops_interface for the egress_ports
# As well as ops_node_interface
# For each end point, grab info from the switch
# Use that to determine MAC and line speed as well
# as whatever measurements are available
for ep in self._gram_config['stitching_info']['edge_points']:
switch_name = ep['local_switch']
switch_id = self.get_node_id(switch_name)
iface_urn = ep['port']
iface_id = self.get_interface_id(iface_urn, 'EGRESS')
iface_href = self.get_interface_href(iface_id)
iface_address_type = 'MAC'
iface_address = 'de:ad:be:ef' # Default if unknown
iface_role = 'DATA'
iface_max_bps = 0
iface_max_pps = 0
if iface_urn in self._config['ports']:
stats_command = self._config['ports'][iface_urn]['command']
parser_module = \
self._config['ports'][iface_urn]['parser_module']
parser = self._config['ports'][iface_urn]['parser']
measurements = self._config['ports'][iface_urn]['measurements']
iface_raw_stats = subprocess.check_output(stats_command)
# print "IFACE_RAW_STATS = %s" % iface_raw_stats
exec('import %s' % parser_module)
parse_cmd = "%s(iface_raw_stats)" % parser
iface_stats = eval(parse_cmd)
# print "IFACE_STATS = %s" % iface_stats
iface_max_bps = iface_stats['line_speed']
iface_address = iface_stats['mac_address']
for meas in measurements:
meas_table = meas['table']
meas_key = meas['key']
meas_change_rate = meas['change_rate']
value = iface_stats[meas_key]
if meas_change_rate:
value = self._compute_change_rate(value,
meas_table,
iface_id)
ts_data = [iface_id, ts, value]
info_insert(self._table_manager, meas_table,
ts_data)
# Insert interface and node_interface entries for egress port
iface_info = [self._interface_schema, iface_id, iface_href,
iface_urn, ts,
# iface_address_type,
# iface_address,
iface_role,
iface_max_bps, iface_max_pps]
info_insert(self._table_manager, 'ops_interface', iface_info)
node_iface_info = [iface_id, switch_id, iface_urn, iface_href]
info_insert(self._table_manager, 'ops_node_interface',
node_iface_info)
# update slice tables based on most recent snapshot
def update_slice_tables(self):
ts = int(time.time()*1000000)
# Clear out old slice/user info
self.delete_all_entries_in_table('ops_slice_user')
self.delete_all_entries_in_table('ops_authority_slice')
self._table_manager.purge_old_tsdata('ops_slice', ts)
self._table_manager.purge_old_tsdata('ops_user', ts)
self._table_manager.purge_old_tsdata('ops_authority', ts)
user_urns = []
authority_urns = []
# Insert into ops_slice, ops_user and ops_slice_user tables
# for each active slice
for object_urn, object_attributes in self._objects_by_urn.items():
if object_attributes['__type__'] not in ['Slice']: continue
user_urn = object_attributes['user_urn']
slice_urn = object_attributes['slice_urn']
slice_uuid = object_attributes['tenant_uuid']
expires = object_attributes['expiration']
authority_urn = getAuthorityURN(slice_urn, 'sa')
authority_id = self.get_authority_id(authority_urn)
authority_href = self.get_authority_href(authority_id)
# Fill in authority table
if authority_urn not in authority_urns:
authority_id = self.get_authority_id(authority_urn)
authority_href = self.get_authority_href(authority_id)
authority_info = [self._authority_schema, authority_id,
authority_href, authority_urn, ts]
info_insert(self._table_manager, 'ops_authority',
authority_info)
authority_urns.append(authority_urn)
created = -1 # *** Can't get this
# Insert into ops_slice table
slice_id = self.get_slice_id(slice_urn)
slice_href = self.get_slice_href(slice_id)
slice_info = [self._slice_schema, slice_id, slice_href,
slice_urn, slice_uuid, ts, authority_urn,
authority_href, created, expires]
info_insert(self._table_manager, 'ops_slice', slice_info)
# If user URN is present, link from slice to user
if user_urn is not None:
if user_urn not in user_urns: user_urns.append(user_urn)
user_id = self.get_user_id(user_urn)
user_href = self.get_user_href(user_id)
role = None # *** Don't know what this is or how to get it
slice_user_info = [user_id, slice_id, user_urn, role, user_href]
info_insert(self._table_manager, 'ops_slice_user', slice_user_info)
# Link from slice to authority
auth_slice_info = [slice_id, authority_id, slice_urn, slice_href]
info_insert(self._table_manager, 'ops_authority_slice',
auth_slice_info)
# Fill in users table
for user_urn in user_urns:
user_id = self.get_user_id(user_urn)
user_href = self.get_user_href(user_id)
authority_urn = getAuthorityURN(user_urn, 'ma')
authority_id = self.get_authority_href(authority_urn)
authority_href = self.get_authority_href(authority_id)
if authority_urn not in authority_urns:
authority_urns.append(authority_urn)
full_name = None # *** Don't have this
email = None # *** Don't have this
user_info = [self._user_schema, user_id, user_href, user_urn, ts,
authority_urn, authority_href, full_name, email]
info_insert(self._table_manager, 'ops_user', user_info)
# update sliver tables based on most recent snapshot
def update_sliver_tables(self):
ts = int(time.time()*1000000)
# Clear out old sliver info:
self._table_manager.purge_old_tsdata('ops_sliver', ts)
# Clear out old sliver resource and aggregate sliver
self.delete_all_entries_in_table('ops_sliver_resource')
self.delete_all_entries_in_table('ops_aggregate_sliver')
# Insert into ops_sliver_resource table and ops_aggregate_sliver table
for object_urn, object_attributes in self._objects_by_urn.items():
if object_attributes['__type__'] not in ['NetworkInterface', 'VirtualMachine']: continue
slice_urn = object_attributes['slice_urn']
sliver_id = flatten_urn(slice_urn) + "_" + object_attributes['name']
# Insert into sliver tables
for object_urn, object_attributes in self._objects_by_urn.items():
if object_attributes['__type__'] not in ['NetworkInterface', 'VirtualMachine']: continue
schema = self._sliver_schema
slice_urn = object_attributes['slice_urn']
sliver_id = flatten_urn(slice_urn) + "_" + object_attributes['name']
sliver_href = self.get_sliver_href(sliver_id)
sliver_urn = object_urn
sliver_uuid = object_attributes['uuid']
slice_urn = object_attributes['slice_urn']
slice_uuid = object_attributes['slice']
creator = object_attributes['user_urn']
created = object_attributes['creation']
expires = object_attributes['expiration']
if expires is None: expires = -1
node_name = object_attributes['host']
node_id = self._aggregate_id + "." + node_name
node_urn = None
for node in self._config['hosts']:
if node['id'] == node_name: node_urn = node['urn']
node_href = self.get_sliver_resource_href(node_id)
# Insert into ops_sliver_table
link_id = ''
sliver_info = [schema, sliver_id, sliver_href, sliver_urn, sliver_uuid, \
ts, self._aggregate_urn, self._aggregate_href, \
slice_urn, slice_uuid, creator, \
created, expires, node_id, link_id]
info_insert(self._table_manager, 'ops_sliver', sliver_info)
# Insert into ops_sliver_resource table
sliver_resource_info = [node_id, sliver_id, node_urn, node_href]
info_insert(self._table_manager, 'ops_sliver_resource', sliver_resource_info)
# Insert into ops_aggregate_sliver table
sliver_aggregate_info = \
[sliver_id, self._aggregate_id, sliver_urn, sliver_href]
info_insert(self._table_manager, 'ops_aggregate_sliver', sliver_aggregate_info)
# Update aggregate measurement tables on most recent snapshot
def update_aggregate_tables(self):
ts = int(time.time()*1000000)
num_vms_table = 'ops_aggregate_num_vms_allocated'
# Clear out old node info
self._table_manager.purge_old_tsdata(num_vms_table, ts)
# Count number of VM's in current snapshot
num_vms = 0
for object_urn, object_attributes in self._objects_by_urn.items():
if object_attributes['__type__'] == 'VirtualMachine':
num_vms = num_vms + 1
# Write a record to the ops_aggregate_num_vms_allocated table
num_vms_info = [self._aggregate_id, ts, num_vms]
info_insert(self._table_manager, num_vms_table, num_vms_info)
# Update data tables
# Remove old records
# Grab new entries from hosts
# and place in appropriate data tables
def update_data_tables(self):
ts = int(time.time()*1000000)
window_threshold = ts - (self._window_duration_sec * 1000000)
# Delete old records from data tables
for command in self._node_commands + self._interface_commands:
tablename = command['table']
self._table_manager.purge_old_tsdata(tablename, window_threshold)
# For each host, grab the most recent data in a single command
for host in self._hosts:
host_id = self.get_node_id(host['id'])
node_urn = self._nodes[host_id]['urn']
host_address = host['address']
rsh_command = ["rsh", host_address, self._rsh_command]
# print "RSH = %s" % rsh_command
result = subprocess.check_output(rsh_command)
# print "RESULT (%s) = %s" % (host_id, result)
measurements = result.split(' ')
for i in range(len(measurements)):
command = self._node_commands[i]
tablename = command['table']
change_rate = 'change_rate' in command
value = int(float(measurements[i]))
# For metrics that are change rates, keep track of previous value
# And compute rate of change (change in metric / change in time)
if change_rate:
value = self._compute_change_rate(value, tablename, host_id)
ts_data = [host_id, ts, value]
info_insert(self._table_manager, tablename, ts_data)
interface_info_rsh_command = ['rsh', host_address, self._interface_info_rsh_command]
interface_info_string = subprocess.check_output(interface_info_rsh_command)
interface_info = json.loads(interface_info_string)
# print "II(%s) = %s" % (host_id , interface_info)
for interface_name, interface in host['interfaces'].items():
interface_id = self.get_interface_id(node_urn, interface_name)
for interface_command in self._interface_commands:
tablename = interface_command['table']
expression = interface_command['expression']
expression_index = psutil._common.snetio._fields.index(expression)
change_rate = interface_command['change_rate']
value = interface_info[interface_name][expression_index]
if change_rate:
value = self._compute_change_rate(value, tablename, interface_id)
ts_data = [interface_id, ts, value]
info_insert(self._table_manager, tablename, ts_data)
def _compute_change_rate(self, value, tablename, identifier):
prev_value = value # For first time, make change rate zero
if tablename in self._prev_values and \
identifier in self._prev_values[tablename]:
prev_value = self._prev_values[tablename][identifier]
if tablename not in self._prev_values: self._prev_values[tablename] = {}
self._prev_values[tablename][identifier] = value
value = int(((value - prev_value) / float(self._frequency_sec)))
return value
def _compute_external_vlans(self):
external_vlans = {}
if 'stitching_info' in self._gram_config:
stitching_info = self._gram_config['stitching_info']
for ep in stitching_info['edge_points']:
port = ep['port']
vlans = ep['vlans']
external_vlans[port] = parseVLANs(vlans)
return external_vlans
def delete_static_entries(self):
self.delete_all_entries_in_table('ops_aggregate_resource')
self.delete_all_entries_in_table('ops_link_interfacevlan')
# Delete all entries in a given table
def delete_all_entries_in_table(self, tablename):
ids = self._table_manager.get_all_ids_from_table(tablename)
# print "Deleting all entries in %s %s" % (tablename, ids)
for id in ids:
self._table_manager.delete_stmt(tablename, id)
# Helper functions
# Replace : and + in URN to -
def flatten_urn(urn):
return urn.replace(':', '_').replace('+', '_')
# creates a values string from an ordered array of values
def info_insert(table_manager, table_str, row_arr):
val_str = "('"
for val in row_arr:
val_str += str(val) + "','" # join won't do this
val_str = val_str[:-2] + ")" # remove last 2 of 3 chars: ',' and add )
# print "INSERT: %s => %s" % (table_str, val_str)
table_manager.insert_stmt(table_str, val_str)
def purge_old_related_entries(table_manager, related_table, related_index,
key_table, key_index, ts):
delete_related_entries = \
"delete from %s where %s in (select %s from %s where ts < %s)" % \
(related_table, related_index, key_index, key_table, ts)
table_manager.execute_sql(delete_related_entries)
def main():
if len(sys.argv) < 2:
print "Usage: python gram_opsmon_populator config_filename"
return
config_filename = sys.argv[1]
config = None
with open(config_filename) as config_file:
config_data = config_file.read()
config = json.loads(config_data)
if config is not None:
required_fields = ['frequency_sec', 'window_duration_sec',
'database_user', 'database_pwd', 'database_name',
'hosts', 'modules', 'node_commands',
'interface_commands']
missing_fields = []
for field in required_fields:
if field not in config: missing_fields.append(field)
if len(missing_fields) > 0:
sys.exit("Missing required fields in config: %s" % missing_fields)
populator = OpsMonPopulator(config)
populator.run()
# Parse a comma/hyphen set of sorted tags into a list of tags
def parseVLANs(vlan_spec):
ranges = (x.split("-") for x in vlan_spec.split(","))
return [i for r in ranges for i in range(int(r[0]), int(r[-1]) + 1)]
# Turn a urn into the urn of the authority that created it
def getAuthorityURN(urn, authority_type):
pieces = urn.split(':')
authority_id = pieces[2]
return 'urn:publicid:%s+authority+%s' % (authority_id, authority_type)
if __name__ == "__main__":
sys.exit(main())
| mit |
martydill/PyCIL | src/VM.py | 1 | 8870 | from Stack import Stack
import Types
import unittest
from Parser.ParserContext import ParserContext
from MethodDefinition import MethodDefinition
from Parser.MethodParser import MethodParser
from Method import Method
from Instructions.Ret import Ret
from Variable import Variable
class DebugHooks:
PreMethod, PostMethod, PreInstruction, PostInstruction = range(4)
class VM:
def __init__(self):
self.assemblies = []
self.methods = []
self.currentMethod = None
self.stack = Stack(8)
self.hooks = [None, None, None, None]
self.instructionPointer = 0
self.protected_blocks = []
def start(self):
self.add_builtins()
md = self.find_method_by_signature(None, 'Main', None, None)
method = md.get_method()
self.execute_method(method)
pass
def add_builtins(self):
m = MethodDefinition()
m.instructions.append(Ret())
m.name = 'ctor'
m.namespace = '[mscorlib]System.Object'
m.returnType = Types.Void
self.methods.append(m)
def load(self, fileName):
f = open(fileName, 'r')
s = f.read()
p = ParserContext()
p.parse(s)
self.methods = p.methods
def find_instruction_pointer_by_label(self, label):
for i in range(len(self.current_method().instructions)):
instruction = self.current_method().instructions[i]
if instruction.label == label:
return i
return -1
#fixme - check for instance/static
def find_method_by_signature(self, namespace, name, returnType, params):
for m in self.methods:
# fixme - namexpaces not parsed
if namespace != None and m.namespace != namespace:
continue
if returnType != None and m.returnType != returnType:
continue
if name != None and m.name != name:
continue
if params == None:
return m # fixme - shoudl always do checks
if len(m.parameters) == len(params):
equal = True
for i in range(len(params)):
# support both variables and types
methodParameterType = m.parameters[i]
if isinstance(m.parameters[i], Variable):
methodParameterType = m.parameters[i].type
parameterType = params[i]
if isinstance(params[i], Variable):
parameterType = params[i].type
if parameterType != methodParameterType and parameterType.arrayType != methodParameterType.arrayType:
equal = False
break
if equal:
return m
return None
#raise Exception("method not found: " + name)
def current_method(self):
return self.current_stack_frame().method
def current_stack_frame(self):
return self.stack.currentFrame;
def set_current_method(self, method):
self.stack.beginFrame(method.maxStack, method)
self.currentMethod = method
def execute_method(self, method):
self.set_current_method(method)
if self.hooks[DebugHooks.PreMethod] is not None:
self.hooks[DebugHooks.PreMethod](method)
frame = self.current_stack_frame()
frame.instructionPointer = 0
while frame == self.current_stack_frame() and frame.instructionPointer < len(method.instructions):
instruction = method.instructions[self.current_stack_frame().instructionPointer]
self.current_stack_frame().instructionPointer += 1
if self.hooks[DebugHooks.PreInstruction] is not None:
self.hooks[DebugHooks.PreInstruction](instruction)
instruction.execute(self)
if self.hooks[DebugHooks.PostInstruction] is not None:
self.hooks[DebugHooks.PostInstruction](instruction)
if self.hooks[DebugHooks.PostMethod] is not None:
self.hooks[DebugHooks.PostMethod](method)
def add_hook(self, hookType, method):
self.hooks[hookType] = method
def remove_hook(self, hookType, method):
self.hooks[hookType] = None
def get_instruction_pointer(self):
return self.current_stack_frame().instructionPointer
def get_protected_blocks(self):
return self.protected_blocks
class VMTest(unittest.TestCase):
def test_find_when_empty_throws_exception(self):
vm = VM()
self.assertRaises(Exception, vm.find_method_by_signature(None, 'nonexistent', Types.Int8, []))
def test_find_different_params(self):
vm = VM()
method = MethodDefinition()
method.name = 'hello'
method.returnType = Types.Int8
method.parameters = [Types.Int16, Types.Int32]
vm.methods.append(method)
m = vm.find_method_by_signature(None, 'hello', Types.Int8, [Types.Int8, Types.Int32])
self.assertEqual(m, None)
def test_find_different_return_type(self):
vm = VM()
method = MethodDefinition()
method.name = 'hello'
method.returnType = Types.Int8
method.parameters = [Types.Int16, Types.Int32]
vm.methods.append(method)
m = vm.find_method_by_signature(None, 'hello', Types.Int8, [])
self.assertEqual(m, None)
def test_find_different_name(self):
vm = VM()
method = MethodDefinition()
method.name = 'hello'
method.returnType = Types.Int8
method.parameters = [Types.Int16]
vm.methods.append(method)
m = vm.find_method_by_signature(None, 'hello2', Types.Int8, [Types.Int16])
self.assertEqual(m, None)
def test_find_match(self):
vm = VM()
method = MethodDefinition()
method.name = 'hello'
method.returnType = Types.Int8
method.parameters = [Types.Int16, Types.Int32]
vm.methods.append(method)
m = vm.find_method_by_signature(None, 'hello', Types.Int8, [Types.Int16, Types.Int32])
self.assertEqual(m, method)
def test_execute_method(self):
vm = VM()
md = MethodDefinition()
md.name = 'hello'
md.returnType = Types.Int8
md.parameters = [Types.Int16, Types.Int32]
md.maxStack = 77
m = md.get_method()
self.assertEqual(vm.current_method(), None)
vm.execute_method(m)
self.assertEqual(vm.current_method(), m)
self.assertEqual(vm.stack.get_frame_size(), 77)
def test_recursive_execute_method_each_instance_has_new_instance_variables(self):
vm = VM()
md = MethodDefinition()
md.name = 'hello'
md.returnType = Types.Int8
md.parameters = [Types.Int16, Types.Int32]
md.maxStack = 77
self.assertEqual(vm.current_method(), None)
m = md.get_method()
vm.execute_method(m)
self.assertEqual(vm.current_method(), m)
self.assertEqual(vm.stack.get_frame_size(), 77)
def test_parse_method_ret(self):
s = ('.method public void main() {\n '
'.locals init (int32 first,\n'
'int32 second,\n'
'int32 result)\n'
'ret\n'
' }')
vm = VM()
p = ParserContext(s)
mp = MethodParser()
m = mp.parse(p)
locals = m.locals
self.assertEqual(len(locals), 3)
self.assertEqual(locals[0].name, 'first')
self.assertEqual(locals[0].alias, None)
self.assertEqual(locals[0].type, Types.Int32)
self.assertEqual(locals[1].name, 'second')
self.assertEqual(locals[1].alias, None)
self.assertEqual(locals[1].type, Types.Int32)
self.assertEqual(locals[2].name, 'result')
self.assertEqual(locals[2].alias, None)
self.assertEqual(locals[2].type, Types.Int32)
self.assertEqual(len(m.instructions), 1)
self.assertEqual('ret', m.instructions[0].name)
def test_execute_method_add(self):
s = ('.method public int main() {\n '
'.maxstack 10\n'
'ldc.i4.1\n'
'ldc.i4.5\n'
'add\n'
'ret\n'
'}')
vm = VM()
p = ParserContext(s)
mp = MethodParser()
m = mp.parse(p)
vm.execute_method(m.get_method())
self.assertEqual(vm.stack.count(), 1)
self.assertEqual(vm.stack.pop().value, 6)
| bsd-2-clause |
grap/OCB | addons/report_webkit/wizard/report_webkit_actions.py | 49 | 6642 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Vincent Renaville
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
from openerp.tools.translate import _
from openerp.osv import fields, osv
from openerp import pooler
class report_webkit_actions(osv.osv_memory):
_name = "report.webkit.actions"
_description = "Webkit Actions"
_columns = {
'print_button':fields.boolean('Add print button', help="Check this to add a Print action for this Report in the sidebar of the corresponding document types"),
'open_action':fields.boolean('Open added action', help="Check this to view the newly added internal print action after creating it (technical view) "),
}
_defaults = {
'print_button': lambda *a: True,
'open_action': lambda *a: False,
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
""" Changes the view dynamically
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return: New arch of view.
"""
if not context: context = {}
res = super(report_webkit_actions, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False)
record_id = context and context.get('active_id', False) or False
active_model = context.get('active_model')
if not record_id or (active_model and active_model != 'ir.actions.report.xml'):
return res
report = self.pool.get('ir.actions.report.xml').browse(
cr,
uid,
context.get('active_id'),
context=context
)
ir_values_obj = self.pool.get('ir.values')
ids = ir_values_obj.search(
cr,
uid,
[('value','=',report.type+','+str(context.get('active_id')))]
)
if ids:
res['arch'] = '''<form string="Add Print Buttons">
<label string="Report Action already exist for this report."/>
</form>
'''
return res
def do_action(self, cr, uid, ids, context=None):
""" This Function Open added Action.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of report.webkit.actions's ID
@param context: A standard dictionary
@return: Dictionary of ir.values form.
"""
if context is None:
context = {}
report_obj = self.pool.get('ir.actions.report.xml')
for current in self.browse(cr, uid, ids, context=context):
report = report_obj.browse(
cr,
uid,
context.get('active_id'),
context=context
)
if current.print_button:
ir_values_obj = pooler.get_pool(cr.dbname).get('ir.values')
res = ir_values_obj.set(
cr,
uid,
'action',
'client_print_multi',
report.report_name,
[report.model],
'ir.actions.report.xml,%d' % context.get('active_id', False),
isobject=True
)
else:
ir_values_obj = pooler.get_pool(cr.dbname).get('ir.values')
res = ir_values_obj.set(
cr,
uid,
'action',
'client_print_multi',
report.report_name,
[report.model,0],
'ir.actions.report.xml,%d' % context.get('active_id', False),
isobject=True
)
if res[0]:
if not current.open_action:
return {'type': 'ir.actions.act_window_close'}
return {
'name': _('Client Actions Connections'),
'view_type': 'form',
'view_mode': 'form',
'res_id' : res[0],
'res_model': 'ir.values',
'view_id': False,
'type': 'ir.actions.act_window',
}
report_webkit_actions()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
brianjgeiger/osf.io | osf/models/archive.py | 22 | 6191 | from django.contrib.postgres.fields import ArrayField
from django.utils import timezone
from django.db import models
from osf.utils.fields import NonNaiveDateTimeField
from website import settings
from osf.models.base import BaseModel, ObjectIDMixin
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
from addons.base.models import BaseStorageAddon
from website.archiver import (
ARCHIVER_INITIATED,
ARCHIVER_SUCCESS,
ARCHIVER_FAILURE,
ARCHIVER_FAILURE_STATUSES
)
class ArchiveTarget(ObjectIDMixin, BaseModel):
"""Stores the results of archiving a single addon
"""
# addon_short_name of target addon
name = models.CharField(max_length=2048)
status = models.CharField(max_length=40, default=ARCHIVER_INITIATED)
# <dict> representation of a website.archiver.AggregateStatResult
# Format: {
# 'target_id': <str>,
# 'target_name': <str>,
# 'targets': <list>(StatResult | AggregateStatResult),
# 'num_files': <int>,
# 'disk_usage': <float>,
# }
stat_result = DateTimeAwareJSONField(default=dict, blank=True)
errors = ArrayField(models.TextField(), default=list, blank=True)
def __repr__(self):
return '<{0}(_id={1}, name={2}, status={3})>'.format(
self.__class__.__name__,
self._id,
self.name,
self.status
)
class ArchiveJob(ObjectIDMixin, BaseModel):
# whether or not the ArchiveJob is complete (success or fail)
done = models.BooleanField(default=False, verbose_name='completed')
# whether or not emails have been sent for this ArchiveJob
sent = models.BooleanField(default=False, verbose_name='emails sent')
status = models.CharField(max_length=40, default=ARCHIVER_INITIATED)
datetime_initiated = NonNaiveDateTimeField(default=timezone.now, verbose_name='initiated at')
dst_node = models.ForeignKey('Registration', related_name='archive_jobs',
verbose_name='destination node', null=True,
blank=True, on_delete=models.CASCADE)
src_node = models.ForeignKey('Node', verbose_name='source node', null=True,
blank=True, on_delete=models.CASCADE)
initiator = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
target_addons = models.ManyToManyField('ArchiveTarget')
def __repr__(self):
return (
'<{ClassName}(_id={self._id}, done={self.done}, '
' status={self.status}, src_node={self.src_node}, dst_node={self.dst_node})>'
).format(ClassName=self.__class__.__name__, self=self)
@property
def children(self):
return [node.archive_job for node in self.dst_node.nodes_primary]
@property
def parent(self):
parent_node = self.dst_node.parent_node
return parent_node.archive_job if parent_node else None
@property
def success(self):
return self.status == ARCHIVER_SUCCESS
@property
def pending(self):
return any([
target for target in self.target_addons.all()
if target.status not in (ARCHIVER_SUCCESS, ARCHIVER_FAILURE)
])
def info(self):
return self.src_node, self.dst_node, self.initiator
def target_info(self):
return [
{
'name': target.name,
'status': target.status,
'stat_result': target.stat_result,
'errors': target.errors
}
for target in self.target_addons.all()
]
def archive_tree_finished(self):
if self.pending:
return False
if not self.children:
return True
return all([
child.archive_tree_finished()
for child in self.children
])
def _fail_above(self):
"""Marks all ArchiveJob instances attached to Nodes above this as failed
"""
parent = self.parent
if parent:
parent.status = ARCHIVER_FAILURE
parent.save()
def _post_update_target(self):
"""Checks for success or failure if the ArchiveJob on self.dst_node
is finished
"""
if self.status == ARCHIVER_FAILURE:
return
if not self.pending:
self.done = True
if self.target_addons.filter(status__in=ARCHIVER_FAILURE_STATUSES).exists():
self.status = ARCHIVER_FAILURE
self._fail_above()
else:
self.status = ARCHIVER_SUCCESS
self.save()
def get_target(self, addon_short_name):
return self.target_addons.filter(name=addon_short_name).first()
def _set_target(self, addon_short_name):
if self.get_target(addon_short_name):
return
target = ArchiveTarget(name=addon_short_name)
target.save()
self.target_addons.add(target)
def set_targets(self):
addons = []
for addon in [self.src_node.get_addon(name)
for name in settings.ADDONS_ARCHIVABLE
if settings.ADDONS_ARCHIVABLE[name] != 'none']:
if not addon or not isinstance(addon, BaseStorageAddon) or not addon.complete:
continue
archive_errors = getattr(addon, 'archive_errors', None)
if not archive_errors or (archive_errors and not archive_errors()):
if addon.config.short_name == 'dataverse':
addons.append(addon.config.short_name + '-draft')
addons.append(addon.config.short_name + '-published')
else:
addons.append(addon.config.short_name)
for addon in addons:
self._set_target(addon)
self.save()
def update_target(self, addon_short_name, status, stat_result=None, errors=None):
stat_result = stat_result or {}
errors = errors or []
target = self.get_target(addon_short_name)
target.status = status
target.errors = errors
target.stat_result = stat_result
target.save()
self._post_update_target()
| apache-2.0 |
tanmaythakur/django | tests/null_fk_ordering/models.py | 210 | 1605 | """
Regression tests for proper working of ForeignKey(null=True). Tests these bugs:
* #7512: including a nullable foreign key reference in Meta ordering has un
xpected results
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# The first two models represent a very simple null FK ordering case.
class Author(models.Model):
name = models.CharField(max_length=150)
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=150)
author = models.ForeignKey(Author, models.SET_NULL, null=True)
def __str__(self):
return 'Article titled: %s' % (self.title, )
class Meta:
ordering = ['author__name', ]
# These following 4 models represent a far more complex ordering case.
class SystemInfo(models.Model):
system_name = models.CharField(max_length=32)
class Forum(models.Model):
system_info = models.ForeignKey(SystemInfo, models.CASCADE)
forum_name = models.CharField(max_length=32)
@python_2_unicode_compatible
class Post(models.Model):
forum = models.ForeignKey(Forum, models.SET_NULL, null=True)
title = models.CharField(max_length=32)
def __str__(self):
return self.title
@python_2_unicode_compatible
class Comment(models.Model):
post = models.ForeignKey(Post, models.SET_NULL, null=True)
comment_text = models.CharField(max_length=250)
class Meta:
ordering = ['post__forum__system_info__system_name', 'comment_text']
def __str__(self):
return self.comment_text
| bsd-3-clause |
Freso/picard | test/test_amazon_urls.py | 3 | 1718 | # -*- coding: utf-8 -*-
import unittest
from picard.util import parse_amazon_url
class ParseAmazonUrlTest(unittest.TestCase):
def test_1(self):
url = 'http://www.amazon.com/dp/020530902X'
expected = {'asin': '020530902X', 'host': 'amazon.com'}
r = parse_amazon_url(url)
self.assertEqual(r, expected)
def test_2(self):
url = 'http://ec1.amazon.co.jp/gp/product/020530902X'
expected = {'asin': '020530902X', 'host': 'ec1.amazon.co.jp'}
r = parse_amazon_url(url)
self.assertEqual(r, expected)
def test_3(self):
url = 'http://amazon.com/Dark-Side-Moon-Pink-Floyd/dp/B004ZN9RWK/ref=sr_1_1?s=music&ie=UTF8&qid=1372605047&sr=1-1&keywords=pink+floyd+dark+side+of+the+moon'
expected = {'asin': 'B004ZN9RWK', 'host': 'amazon.com'}
r = parse_amazon_url(url)
self.assertEqual(r, expected)
def test_4(self):
#incorrect ASIN
url = 'http://www.amazon.com/dp/A20530902X'
expected = None
r = parse_amazon_url(url)
self.assertEqual(r, expected)
def test_5(self):
#incorrect ASIN
url = 'http://www.amazon.com/dp/020530902x'
expected = None
r = parse_amazon_url(url)
self.assertEqual(r, expected)
def test_6(self):
url = 'https://www.amazon.co.jp/gp/product/B00005FMYV'
expected = {'asin': 'B00005FMYV', 'host': 'amazon.co.jp'}
r = parse_amazon_url(url)
self.assertEqual(r, expected)
def test_7(self):
#incorrect url scheme
url = 'httpsa://www.amazon.co.jp/gp/product/B00005FMYV'
expected = None
r = parse_amazon_url(url)
self.assertEqual(r, expected)
| gpl-2.0 |
shikil/sympy | sympy/assumptions/tests/test_query.py | 29 | 91365 | from sympy.abc import t, w, x, y, z, n, k, m, p, i
from sympy.assumptions import (ask, AssumptionsContext, Q, register_handler,
remove_handler)
from sympy.assumptions.assume import global_assumptions
from sympy.assumptions.ask import compute_known_facts, single_fact_lookup
from sympy.assumptions.handlers import AskHandler
from sympy.core.add import Add
from sympy.core.numbers import (I, Integer, Rational, oo, pi)
from sympy.core.singleton import S
from sympy.core.power import Pow
from sympy.core.symbol import symbols
from sympy.functions.combinatorial.factorials import factorial
from sympy.functions.elementary.complexes import (Abs, im, re, sign)
from sympy.functions.elementary.exponential import (exp, log)
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.functions.elementary.trigonometric import (
acos, acot, asin, atan, cos, cot, sin, tan)
from sympy.logic.boolalg import Equivalent, Implies, Xor, And, to_cnf
from sympy.utilities.pytest import raises, XFAIL, slow, raises
from sympy.assumptions.assume import assuming
from sympy.utilities.exceptions import SymPyDeprecationWarning
def test_int_1():
z = 1
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is True
assert ask(Q.rational(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is True
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_int_11():
z = 11
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is True
assert ask(Q.rational(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is True
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is True
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_int_12():
z = 12
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is True
assert ask(Q.rational(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is True
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is True
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_float_1():
z = 1.0
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
z = 7.2123
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_zero_0():
z = Integer(0)
assert ask(Q.nonzero(z)) is False
assert ask(Q.zero(z)) is True
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is True
assert ask(Q.rational(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is False
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is True
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_negativeone():
z = Integer(-1)
assert ask(Q.nonzero(z)) is True
assert ask(Q.zero(z)) is False
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is True
assert ask(Q.rational(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is False
assert ask(Q.negative(z)) is True
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is True
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_infinity():
assert ask(Q.commutative(oo)) is True
assert ask(Q.integer(oo)) is False
assert ask(Q.rational(oo)) is False
assert ask(Q.algebraic(oo)) is False
assert ask(Q.real(oo)) is False
assert ask(Q.extended_real(oo)) is True
assert ask(Q.complex(oo)) is False
assert ask(Q.irrational(oo)) is False
assert ask(Q.imaginary(oo)) is False
assert ask(Q.positive(oo)) is True
assert ask(Q.negative(oo)) is False
assert ask(Q.even(oo)) is False
assert ask(Q.odd(oo)) is False
assert ask(Q.finite(oo)) is False
assert ask(Q.prime(oo)) is False
assert ask(Q.composite(oo)) is False
assert ask(Q.hermitian(oo)) is False
assert ask(Q.antihermitian(oo)) is False
def test_neg_infinity():
mm = S.NegativeInfinity
assert ask(Q.commutative(mm)) is True
assert ask(Q.integer(mm)) is False
assert ask(Q.rational(mm)) is False
assert ask(Q.algebraic(mm)) is False
assert ask(Q.real(mm)) is False
assert ask(Q.extended_real(mm)) is True
assert ask(Q.complex(mm)) is False
assert ask(Q.irrational(mm)) is False
assert ask(Q.imaginary(mm)) is False
assert ask(Q.positive(mm)) is False
assert ask(Q.negative(mm)) is True
assert ask(Q.even(mm)) is False
assert ask(Q.odd(mm)) is False
assert ask(Q.finite(mm)) is False
assert ask(Q.prime(mm)) is False
assert ask(Q.composite(mm)) is False
assert ask(Q.hermitian(mm)) is False
assert ask(Q.antihermitian(mm)) is False
def test_nan():
nan = S.NaN
assert ask(Q.commutative(nan)) is True
assert ask(Q.integer(nan)) is False
assert ask(Q.rational(nan)) is False
assert ask(Q.algebraic(nan)) is False
assert ask(Q.real(nan)) is False
assert ask(Q.extended_real(nan)) is False
assert ask(Q.complex(nan)) is False
assert ask(Q.irrational(nan)) is False
assert ask(Q.imaginary(nan)) is False
assert ask(Q.positive(nan)) is False
assert ask(Q.nonzero(nan)) is True
assert ask(Q.zero(nan)) is False
assert ask(Q.even(nan)) is False
assert ask(Q.odd(nan)) is False
assert ask(Q.finite(nan)) is False
assert ask(Q.prime(nan)) is False
assert ask(Q.composite(nan)) is False
assert ask(Q.hermitian(nan)) is False
assert ask(Q.antihermitian(nan)) is False
def test_Rational_number():
r = Rational(3, 4)
assert ask(Q.commutative(r)) is True
assert ask(Q.integer(r)) is False
assert ask(Q.rational(r)) is True
assert ask(Q.real(r)) is True
assert ask(Q.complex(r)) is True
assert ask(Q.irrational(r)) is False
assert ask(Q.imaginary(r)) is False
assert ask(Q.positive(r)) is True
assert ask(Q.negative(r)) is False
assert ask(Q.even(r)) is False
assert ask(Q.odd(r)) is False
assert ask(Q.finite(r)) is True
assert ask(Q.prime(r)) is False
assert ask(Q.composite(r)) is False
assert ask(Q.hermitian(r)) is True
assert ask(Q.antihermitian(r)) is False
r = Rational(1, 4)
assert ask(Q.positive(r)) is True
assert ask(Q.negative(r)) is False
r = Rational(5, 4)
assert ask(Q.negative(r)) is False
assert ask(Q.positive(r)) is True
r = Rational(5, 3)
assert ask(Q.positive(r)) is True
assert ask(Q.negative(r)) is False
r = Rational(-3, 4)
assert ask(Q.positive(r)) is False
assert ask(Q.negative(r)) is True
r = Rational(-1, 4)
assert ask(Q.positive(r)) is False
assert ask(Q.negative(r)) is True
r = Rational(-5, 4)
assert ask(Q.negative(r)) is True
assert ask(Q.positive(r)) is False
r = Rational(-5, 3)
assert ask(Q.positive(r)) is False
assert ask(Q.negative(r)) is True
def test_sqrt_2():
z = sqrt(2)
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_pi():
z = S.Pi
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is False
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
z = S.Pi + 1
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is False
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
z = 2*S.Pi
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is False
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
z = S.Pi ** 2
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is False
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
z = (1 + S.Pi) ** 2
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is False
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_E():
z = S.Exp1
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is False
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_GoldenRatio():
z = S.GoldenRatio
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is True
assert ask(Q.real(z)) is True
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is True
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is True
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is True
assert ask(Q.antihermitian(z)) is False
def test_I():
z = I
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is True
assert ask(Q.real(z)) is False
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is True
assert ask(Q.positive(z)) is False
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is False
assert ask(Q.antihermitian(z)) is True
z = 1 + I
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is True
assert ask(Q.real(z)) is False
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is False
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is False
assert ask(Q.antihermitian(z)) is False
z = I*(1 + I)
assert ask(Q.commutative(z)) is True
assert ask(Q.integer(z)) is False
assert ask(Q.rational(z)) is False
assert ask(Q.algebraic(z)) is True
assert ask(Q.real(z)) is False
assert ask(Q.complex(z)) is True
assert ask(Q.irrational(z)) is False
assert ask(Q.imaginary(z)) is False
assert ask(Q.positive(z)) is False
assert ask(Q.negative(z)) is False
assert ask(Q.even(z)) is False
assert ask(Q.odd(z)) is False
assert ask(Q.finite(z)) is True
assert ask(Q.prime(z)) is False
assert ask(Q.composite(z)) is False
assert ask(Q.hermitian(z)) is False
assert ask(Q.antihermitian(z)) is False
z = I**(I)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is True
z = (-I)**(I)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is True
z = (3*I)**(I)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is False
z = (1)**(I)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is True
z = (-1)**(I)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is True
z = (1+I)**(I)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is False
z = (I)**(I+3)
assert ask(Q.imaginary(z)) is True
assert ask(Q.real(z)) is False
z = (I)**(I+2)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is True
z = (I)**(2)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is True
z = (I)**(3)
assert ask(Q.imaginary(z)) is True
assert ask(Q.real(z)) is False
z = (3)**(I)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is False
z = (I)**(0)
assert ask(Q.imaginary(z)) is False
assert ask(Q.real(z)) is True
@slow
def test_bounded():
x, y, z = symbols('x,y,z')
assert ask(Q.finite(x)) is None
assert ask(Q.finite(x), Q.finite(x)) is True
assert ask(Q.finite(x), Q.finite(y)) is None
assert ask(Q.finite(x), Q.complex(x)) is None
assert ask(Q.finite(x + 1)) is None
assert ask(Q.finite(x + 1), Q.finite(x)) is True
a = x + y
x, y = a.args
# B + B
assert ask(Q.finite(a), Q.finite(x) & Q.finite(y)) is True
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & Q.positive(x)) is True
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & Q.positive(y)) is True
assert ask(Q.finite(a),
Q.finite(x) & Q.finite(y) & Q.positive(x) & Q.positive(y)) is True
assert ask(Q.finite(a),
Q.finite(x) & Q.finite(y) & Q.positive(x) & ~Q.positive(y)) is True
assert ask(Q.finite(a),
Q.finite(x) & Q.finite(y) & ~Q.positive(x) & Q.positive(y)) is True
assert ask(Q.finite(a),
Q.finite(x) & Q.finite(y) & ~Q.positive(x) & ~Q.positive(y)) is True
# B + U
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y)) is False
assert ask(
Q.finite(a), Q.finite(x) & ~Q.finite(y) & Q.positive(x)) is False
assert ask(
Q.finite(a), Q.finite(x) & ~Q.finite(y) & Q.positive(y)) is False
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y) & Q.positive(x) &
Q.positive(y)) is False
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y) & Q.positive(x) &
~Q.positive(y)) is False
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y) & ~Q.positive(x) &
Q.positive(y)) is False
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y) & ~Q.positive(x) &
~Q.positive(y)) is False
# B + ?
assert ask(Q.finite(a), Q.finite(x)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.positive(x)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.positive(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.positive(x) & Q.positive(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.positive(x) & ~Q.positive(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & ~Q.positive(x) & Q.positive(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & ~Q.positive(x) & ~Q.positive(y)) is None
# U + U
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & Q.positive(x)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & Q.positive(y)) is None
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & Q.positive(x) &
Q.positive(y)) is False
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & Q.positive(x) &
~Q.positive(y)) is None
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & ~Q.positive(x) &
Q.positive(y)) is None
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & ~Q.positive(x) &
~Q.positive(y)) is False
# U + ?
assert ask(Q.finite(a), ~Q.finite(y)) is None
assert ask(Q.finite(a), ~Q.finite(y) & Q.positive(x)) is None
assert ask(Q.finite(a), ~Q.finite(y) & Q.positive(y)) is None
assert ask(
Q.finite(a), ~Q.finite(y) & Q.positive(x) & Q.positive(y)) is False
assert ask(
Q.finite(a), ~Q.finite(y) & Q.positive(x) & ~Q.positive(y)) is None
assert ask(
Q.finite(a), ~Q.finite(y) & ~Q.positive(x) & Q.positive(y)) is None
assert ask(
Q.finite(a), ~Q.finite(y) & ~Q.positive(x) & ~Q.positive(y)) is False
# ? + ?
assert ask(Q.finite(a),) is None
assert ask(Q.finite(a), Q.positive(x)) is None
assert ask(Q.finite(a), Q.positive(y)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.positive(y)) is None
assert ask(Q.finite(a), Q.positive(x) & ~Q.positive(y)) is None
assert ask(Q.finite(a), ~Q.positive(x) & Q.positive(y)) is None
assert ask(Q.finite(a), ~Q.positive(x) & ~Q.positive(y)) is None
a = x + y + z
x, y, z = a.args
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.negative(y) &
Q.finite(y) & Q.negative(z) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.negative(y) & Q.finite(y) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.negative(y) &
Q.finite(y) & Q.positive(z) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.negative(y) &
Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.negative(y) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.negative(y) &
Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.negative(y) & Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.negative(y) & Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.negative(y) & Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.finite(y) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.finite(y) & Q.positive(z) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.finite(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.negative(x) & Q.finite(x) & Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.positive(z) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.positive(y) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.positive(y) & Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.positive(y) & Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.positive(y) & Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.negative(z)) is False
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.negative(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.negative(x) & Q.finite(x) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) & Q.positive(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.positive(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.positive(z)) is False
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.negative(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.negative(x) & Q.finite(x) & Q.negative(y)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.negative(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.finite(x)) is None
assert ask(
Q.finite(a), Q.negative(x) & Q.finite(x) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
Q.finite(x) & Q.positive(y) & Q.positive(z)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & Q.finite(z)) is True
assert ask(Q.finite(a),
Q.finite(x) & Q.finite(y) & Q.positive(z) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.finite(x) &
Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) &
Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.finite(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.positive(z) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) &
Q.positive(y) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) &
Q.positive(y) & Q.finite(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.positive(y) & Q.finite(y)) is None
assert ask(Q.finite(a), Q.finite(x) &
Q.positive(y) & Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.negative(z)) is False
assert ask(
Q.finite(a), Q.finite(x) & Q.negative(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(
Q.finite(a), Q.finite(x) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.finite(x) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(
Q.finite(a), Q.finite(x) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.positive(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.positive(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.positive(z)) is False
assert ask(
Q.finite(a), Q.finite(x) & Q.negative(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.negative(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.negative(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.finite(x)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.positive(z)) is None
assert ask(
Q.finite(a), Q.finite(x) & Q.positive(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.positive(z) & Q.finite(z)) is True
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.positive(y) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) & Q.positive(y) &
Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.positive(y) & Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & Q.positive(y) & Q.finite(y)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.positive(y) & Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.negative(z)) is False
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & Q.negative(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.positive(x) & Q.finite(x) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) & Q.positive(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & Q.positive(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.positive(z)) is False
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & Q.negative(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.positive(x) & Q.finite(x) & Q.negative(y)) is None
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & Q.negative(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) & Q.finite(x)) is None
assert ask(
Q.finite(a), Q.positive(x) & Q.finite(x) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) &
Q.finite(x) & Q.positive(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.negative(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) & Q.negative(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.negative(z)) is False
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & Q.negative(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) &
Q.negative(y) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.negative(x) & ~Q.finite(x) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) & Q.positive(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & Q.positive(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & Q.negative(y) & Q.negative(z)) is False
assert ask(
Q.finite(a), Q.negative(x) & ~Q.finite(x) & Q.negative(y)) is None
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & Q.negative(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) & ~Q.finite(x)) is None
assert ask(
Q.finite(a), Q.negative(x) & ~Q.finite(x) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x) &
~Q.finite(x) & Q.positive(y) & Q.positive(z)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(x) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(Q.finite(a), ~Q.finite(x) & Q.positive(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & Q.positive(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), ~Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.positive(z)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & Q.negative(y) & Q.negative(z)) is None
assert ask(Q.finite(a), ~Q.finite(x) & Q.negative(y)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & Q.negative(y) & Q.positive(z)) is None
assert ask(Q.finite(a), ~Q.finite(x)) is None
assert ask(Q.finite(a), ~Q.finite(x) & Q.positive(z)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & Q.positive(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) & ~Q.finite(x) & Q.positive(y) &
~Q.finite(y) & Q.positive(z) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.positive(x) & ~Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.positive(x) &
~Q.finite(x) & Q.positive(y) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.positive(x) & ~Q.finite(x) &
Q.positive(y) & ~Q.finite(y) & Q.positive(z)) is False
assert ask(Q.finite(a), Q.positive(x) &
~Q.finite(x) & Q.negative(y) & Q.negative(z)) is None
assert ask(
Q.finite(a), Q.positive(x) & ~Q.finite(x) & Q.negative(y)) is None
assert ask(Q.finite(a), Q.positive(x) &
~Q.finite(x) & Q.negative(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) & ~Q.finite(x)) is None
assert ask(
Q.finite(a), Q.positive(x) & ~Q.finite(x) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(x) &
~Q.finite(x) & Q.positive(y) & Q.positive(z)) is False
assert ask(
Q.finite(a), Q.negative(x) & Q.negative(y) & Q.negative(z)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.negative(y)) is None
assert ask(
Q.finite(a), Q.negative(x) & Q.negative(y) & Q.positive(z)) is None
assert ask(Q.finite(a), Q.negative(x)) is None
assert ask(Q.finite(a), Q.negative(x) & Q.positive(z)) is None
assert ask(
Q.finite(a), Q.negative(x) & Q.positive(y) & Q.positive(z)) is None
assert ask(Q.finite(a)) is None
assert ask(Q.finite(a), Q.positive(z)) is None
assert ask(Q.finite(a), Q.positive(y) & Q.positive(z)) is None
assert ask(
Q.finite(a), Q.positive(x) & Q.positive(y) & Q.positive(z)) is None
x, y, z = symbols('x,y,z')
assert ask(Q.finite(2*x)) is None
assert ask(Q.finite(2*x), Q.finite(x)) is True
a = x*y
x, y = a.args
assert ask(Q.finite(a), Q.finite(x) & Q.finite(y)) is True
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y)) is False
assert ask(Q.finite(a), Q.finite(x)) is None
assert ask(Q.finite(a), ~Q.finite(x) & Q.finite(y)) is False
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y)) is False
assert ask(Q.finite(a), ~Q.finite(x)) is None
assert ask(Q.finite(a), Q.finite(y)) is None
assert ask(Q.finite(a), ~Q.finite(y)) is None
assert ask(Q.finite(a)) is None
a = x*y*z
x, y, z = a.args
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & Q.finite(z)) is True
assert ask(
Q.finite(a), Q.finite(x) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) & Q.finite(y)) is None
assert ask(
Q.finite(a), Q.finite(x) & ~Q.finite(y) & Q.finite(z)) is False
assert ask(
Q.finite(a), Q.finite(x) & ~Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.finite(x) & Q.finite(z)) is None
assert ask(Q.finite(a), Q.finite(x) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.finite(x)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & Q.finite(y) & Q.finite(z)) is False
assert ask(
Q.finite(a), ~Q.finite(x) & Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), ~Q.finite(x) & Q.finite(y)) is None
assert ask(
Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & Q.finite(z)) is False
assert ask(
Q.finite(a), ~Q.finite(x) & ~Q.finite(y) & ~Q.finite(z)) is False
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(y)) is None
assert ask(Q.finite(a), ~Q.finite(x) & Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(x) & ~Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(x)) is None
assert ask(Q.finite(a), Q.finite(y) & Q.finite(z)) is None
assert ask(Q.finite(a), Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), Q.finite(y)) is None
assert ask(Q.finite(a), ~Q.finite(y) & Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(y) & ~Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(y)) is None
assert ask(Q.finite(a), Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(z)) is None
assert ask(Q.finite(a), ~Q.finite(z) &
Q.nonzero(x) & Q.nonzero(y) & Q.nonzero(z)) is None
assert ask(Q.finite(a), ~Q.finite(y) & ~Q.finite(z) &
Q.nonzero(x) & Q.nonzero(y) & Q.nonzero(z)) is False
x, y, z = symbols('x,y,z')
assert ask(Q.finite(x**2)) is None
assert ask(Q.finite(2**x)) is None
assert ask(Q.finite(2**x), Q.finite(x)) is True
assert ask(Q.finite(x**x)) is None
assert ask(Q.finite(Rational(1, 2) ** x)) is None
assert ask(Q.finite(Rational(1, 2) ** x), Q.positive(x)) is True
assert ask(Q.finite(Rational(1, 2) ** x), Q.negative(x)) is None
assert ask(Q.finite(S(2) ** x), Q.negative(x)) is True
assert ask(Q.finite(sqrt(x))) is None
assert ask(Q.finite(2**x), ~Q.finite(x)) is False
assert ask(Q.finite(x**2), ~Q.finite(x)) is False
# sign function
assert ask(Q.finite(sign(x))) is True
assert ask(Q.finite(sign(x)), ~Q.finite(x)) is True
# exponential functions
assert ask(Q.finite(log(x))) is None
assert ask(Q.finite(log(x)), Q.finite(x)) is True
assert ask(Q.finite(exp(x))) is None
assert ask(Q.finite(exp(x)), Q.finite(x)) is True
assert ask(Q.finite(exp(2))) is True
# trigonometric functions
assert ask(Q.finite(sin(x))) is True
assert ask(Q.finite(sin(x)), ~Q.finite(x)) is True
assert ask(Q.finite(cos(x))) is True
assert ask(Q.finite(cos(x)), ~Q.finite(x)) is True
assert ask(Q.finite(2*sin(x))) is True
assert ask(Q.finite(sin(x)**2)) is True
assert ask(Q.finite(cos(x)**2)) is True
assert ask(Q.finite(cos(x) + sin(x))) is True
@XFAIL
def test_bounded_xfail():
"""We need to support relations in ask for this to work"""
assert ask(Q.finite(sin(x)**x)) is True
assert ask(Q.finite(cos(x)**x)) is True
def test_commutative():
"""By default objects are Q.commutative that is why it returns True
for both key=True and key=False"""
assert ask(Q.commutative(x)) is True
assert ask(Q.commutative(x), ~Q.commutative(x)) is False
assert ask(Q.commutative(x), Q.complex(x)) is True
assert ask(Q.commutative(x), Q.imaginary(x)) is True
assert ask(Q.commutative(x), Q.real(x)) is True
assert ask(Q.commutative(x), Q.positive(x)) is True
assert ask(Q.commutative(x), ~Q.commutative(y)) is True
assert ask(Q.commutative(2*x)) is True
assert ask(Q.commutative(2*x), ~Q.commutative(x)) is False
assert ask(Q.commutative(x + 1)) is True
assert ask(Q.commutative(x + 1), ~Q.commutative(x)) is False
assert ask(Q.commutative(x**2)) is True
assert ask(Q.commutative(x**2), ~Q.commutative(x)) is False
assert ask(Q.commutative(log(x))) is True
def test_complex():
assert ask(Q.complex(x)) is None
assert ask(Q.complex(x), Q.complex(x)) is True
assert ask(Q.complex(x), Q.complex(y)) is None
assert ask(Q.complex(x), ~Q.complex(x)) is False
assert ask(Q.complex(x), Q.real(x)) is True
assert ask(Q.complex(x), ~Q.real(x)) is None
assert ask(Q.complex(x), Q.rational(x)) is True
assert ask(Q.complex(x), Q.irrational(x)) is True
assert ask(Q.complex(x), Q.positive(x)) is True
assert ask(Q.complex(x), Q.imaginary(x)) is True
assert ask(Q.complex(x), Q.algebraic(x)) is True
# a+b
assert ask(Q.complex(x + 1), Q.complex(x)) is True
assert ask(Q.complex(x + 1), Q.real(x)) is True
assert ask(Q.complex(x + 1), Q.rational(x)) is True
assert ask(Q.complex(x + 1), Q.irrational(x)) is True
assert ask(Q.complex(x + 1), Q.imaginary(x)) is True
assert ask(Q.complex(x + 1), Q.integer(x)) is True
assert ask(Q.complex(x + 1), Q.even(x)) is True
assert ask(Q.complex(x + 1), Q.odd(x)) is True
assert ask(Q.complex(x + y), Q.complex(x) & Q.complex(y)) is True
assert ask(Q.complex(x + y), Q.real(x) & Q.imaginary(y)) is True
# a*x +b
assert ask(Q.complex(2*x + 1), Q.complex(x)) is True
assert ask(Q.complex(2*x + 1), Q.real(x)) is True
assert ask(Q.complex(2*x + 1), Q.positive(x)) is True
assert ask(Q.complex(2*x + 1), Q.rational(x)) is True
assert ask(Q.complex(2*x + 1), Q.irrational(x)) is True
assert ask(Q.complex(2*x + 1), Q.imaginary(x)) is True
assert ask(Q.complex(2*x + 1), Q.integer(x)) is True
assert ask(Q.complex(2*x + 1), Q.even(x)) is True
assert ask(Q.complex(2*x + 1), Q.odd(x)) is True
# x**2
assert ask(Q.complex(x**2), Q.complex(x)) is True
assert ask(Q.complex(x**2), Q.real(x)) is True
assert ask(Q.complex(x**2), Q.positive(x)) is True
assert ask(Q.complex(x**2), Q.rational(x)) is True
assert ask(Q.complex(x**2), Q.irrational(x)) is True
assert ask(Q.complex(x**2), Q.imaginary(x)) is True
assert ask(Q.complex(x**2), Q.integer(x)) is True
assert ask(Q.complex(x**2), Q.even(x)) is True
assert ask(Q.complex(x**2), Q.odd(x)) is True
# 2**x
assert ask(Q.complex(2**x), Q.complex(x)) is True
assert ask(Q.complex(2**x), Q.real(x)) is True
assert ask(Q.complex(2**x), Q.positive(x)) is True
assert ask(Q.complex(2**x), Q.rational(x)) is True
assert ask(Q.complex(2**x), Q.irrational(x)) is True
assert ask(Q.complex(2**x), Q.imaginary(x)) is True
assert ask(Q.complex(2**x), Q.integer(x)) is True
assert ask(Q.complex(2**x), Q.even(x)) is True
assert ask(Q.complex(2**x), Q.odd(x)) is True
assert ask(Q.complex(x**y), Q.complex(x) & Q.complex(y)) is True
# trigonometric expressions
assert ask(Q.complex(sin(x))) is True
assert ask(Q.complex(sin(2*x + 1))) is True
assert ask(Q.complex(cos(x))) is True
assert ask(Q.complex(cos(2*x + 1))) is True
# exponential
assert ask(Q.complex(exp(x))) is True
assert ask(Q.complex(exp(x))) is True
# Q.complexes
assert ask(Q.complex(Abs(x))) is True
assert ask(Q.complex(re(x))) is True
assert ask(Q.complex(im(x))) is True
def test_even():
assert ask(Q.even(x)) is None
assert ask(Q.even(x), Q.integer(x)) is None
assert ask(Q.even(x), ~Q.integer(x)) is False
assert ask(Q.even(x), Q.rational(x)) is None
assert ask(Q.even(x), Q.positive(x)) is None
assert ask(Q.even(2*x)) is None
assert ask(Q.even(2*x), Q.integer(x)) is True
assert ask(Q.even(2*x), Q.even(x)) is True
assert ask(Q.even(2*x), Q.irrational(x)) is False
assert ask(Q.even(2*x), Q.odd(x)) is True
assert ask(Q.even(2*x), ~Q.integer(x)) is None
assert ask(Q.even(3*x), Q.integer(x)) is None
assert ask(Q.even(3*x), Q.even(x)) is True
assert ask(Q.even(3*x), Q.odd(x)) is False
assert ask(Q.even(x + 1), Q.odd(x)) is True
assert ask(Q.even(x + 1), Q.even(x)) is False
assert ask(Q.even(x + 2), Q.odd(x)) is False
assert ask(Q.even(x + 2), Q.even(x)) is True
assert ask(Q.even(7 - x), Q.odd(x)) is True
assert ask(Q.even(7 + x), Q.odd(x)) is True
assert ask(Q.even(x + y), Q.odd(x) & Q.odd(y)) is True
assert ask(Q.even(x + y), Q.odd(x) & Q.even(y)) is False
assert ask(Q.even(x + y), Q.even(x) & Q.even(y)) is True
assert ask(Q.even(2*x + 1), Q.integer(x)) is False
assert ask(Q.even(2*x*y), Q.rational(x) & Q.rational(x)) is None
assert ask(Q.even(2*x*y), Q.irrational(x) & Q.irrational(x)) is None
assert ask(Q.even(x + y + z), Q.odd(x) & Q.odd(y) & Q.even(z)) is True
assert ask(Q.even(x + y + z + t),
Q.odd(x) & Q.odd(y) & Q.even(z) & Q.integer(t)) is None
assert ask(Q.even(Abs(x)), Q.even(x)) is True
assert ask(Q.even(Abs(x)), ~Q.even(x)) is None
assert ask(Q.even(re(x)), Q.even(x)) is True
assert ask(Q.even(re(x)), ~Q.even(x)) is None
assert ask(Q.even(im(x)), Q.even(x)) is True
assert ask(Q.even(im(x)), Q.real(x)) is True
assert ask(Q.even((-1)**n), Q.integer(n)) is False
assert ask(Q.even(k**2), Q.even(k)) is True
assert ask(Q.even(n**2), Q.odd(n)) is False
assert ask(Q.even(2**k), Q.even(k)) is None
assert ask(Q.even(x**2)) is None
assert ask(Q.even(k**m), Q.even(k) & Q.integer(m) & ~Q.negative(m)) is None
assert ask(Q.even(n**m), Q.odd(n) & Q.integer(m) & ~Q.negative(m)) is False
assert ask(Q.even(k**p), Q.even(k) & Q.integer(p) & Q.positive(p)) is True
assert ask(Q.even(n**p), Q.odd(n) & Q.integer(p) & Q.positive(p)) is False
assert ask(Q.even(m**k), Q.even(k) & Q.integer(m) & ~Q.negative(m)) is None
assert ask(Q.even(p**k), Q.even(k) & Q.integer(p) & Q.positive(p)) is None
assert ask(Q.even(m**n), Q.odd(n) & Q.integer(m) & ~Q.negative(m)) is None
assert ask(Q.even(p**n), Q.odd(n) & Q.integer(p) & Q.positive(p)) is None
assert ask(Q.even(k**x), Q.even(k)) is None
assert ask(Q.even(n**x), Q.odd(n)) is None
assert ask(Q.even(x*y), Q.integer(x) & Q.integer(y)) is None
assert ask(Q.even(x*x), Q.integer(x)) is None
assert ask(Q.even(x*(x + y)), Q.integer(x) & Q.odd(y)) is True
assert ask(Q.even(x*(x + y)), Q.integer(x) & Q.even(y)) is None
@XFAIL
def test_evenness_in_ternary_integer_product_with_odd():
# Tests that oddness inference is independent of term ordering.
# Term ordering at the point of testing depends on SymPy's symbol order, so
# we try to force a different order by modifying symbol names.
assert ask(Q.even(x*y*(y + z)), Q.integer(x) & Q.integer(y) & Q.odd(z)) is True
assert ask(Q.even(y*x*(x + z)), Q.integer(x) & Q.integer(y) & Q.odd(z)) is True
def test_evenness_in_ternary_integer_product_with_even():
assert ask(Q.even(x*y*(y + z)), Q.integer(x) & Q.integer(y) & Q.even(z)) is None
def test_extended_real():
assert ask(Q.extended_real(x), Q.positive(x)) is True
assert ask(Q.extended_real(-x), Q.positive(x)) is True
assert ask(Q.extended_real(-x), Q.negative(x)) is True
assert ask(Q.extended_real(x + S.Infinity), Q.real(x)) is True
def test_rational():
assert ask(Q.rational(x), Q.integer(x)) is True
assert ask(Q.rational(x), Q.irrational(x)) is False
assert ask(Q.rational(x), Q.real(x)) is None
assert ask(Q.rational(x), Q.positive(x)) is None
assert ask(Q.rational(x), Q.negative(x)) is None
assert ask(Q.rational(x), Q.nonzero(x)) is None
assert ask(Q.rational(x), ~Q.algebraic(x)) is False
assert ask(Q.rational(2*x), Q.rational(x)) is True
assert ask(Q.rational(2*x), Q.integer(x)) is True
assert ask(Q.rational(2*x), Q.even(x)) is True
assert ask(Q.rational(2*x), Q.odd(x)) is True
assert ask(Q.rational(2*x), Q.irrational(x)) is False
assert ask(Q.rational(x/2), Q.rational(x)) is True
assert ask(Q.rational(x/2), Q.integer(x)) is True
assert ask(Q.rational(x/2), Q.even(x)) is True
assert ask(Q.rational(x/2), Q.odd(x)) is True
assert ask(Q.rational(x/2), Q.irrational(x)) is False
assert ask(Q.rational(1/x), Q.rational(x)) is True
assert ask(Q.rational(1/x), Q.integer(x)) is True
assert ask(Q.rational(1/x), Q.even(x)) is True
assert ask(Q.rational(1/x), Q.odd(x)) is True
assert ask(Q.rational(1/x), Q.irrational(x)) is False
assert ask(Q.rational(2/x), Q.rational(x)) is True
assert ask(Q.rational(2/x), Q.integer(x)) is True
assert ask(Q.rational(2/x), Q.even(x)) is True
assert ask(Q.rational(2/x), Q.odd(x)) is True
assert ask(Q.rational(2/x), Q.irrational(x)) is False
assert ask(Q.rational(x), ~Q.algebraic(x)) is False
# with multiple symbols
assert ask(Q.rational(x*y), Q.irrational(x) & Q.irrational(y)) is None
assert ask(Q.rational(y/x), Q.rational(x) & Q.rational(y)) is True
assert ask(Q.rational(y/x), Q.integer(x) & Q.rational(y)) is True
assert ask(Q.rational(y/x), Q.even(x) & Q.rational(y)) is True
assert ask(Q.rational(y/x), Q.odd(x) & Q.rational(y)) is True
assert ask(Q.rational(y/x), Q.irrational(x) & Q.rational(y)) is False
for f in [exp, sin, tan, asin, atan, cos]:
assert ask(Q.rational(f(7))) is False
assert ask(Q.rational(f(7, evaluate=False))) is False
assert ask(Q.rational(f(0, evaluate=False))) is True
assert ask(Q.rational(f(x)), Q.rational(x)) is None
assert ask(Q.rational(f(x)), Q.rational(x) & Q.nonzero(x)) is False
for g in [log, acos]:
assert ask(Q.rational(g(7))) is False
assert ask(Q.rational(g(7, evaluate=False))) is False
assert ask(Q.rational(g(1, evaluate=False))) is True
assert ask(Q.rational(g(x)), Q.rational(x)) is None
assert ask(Q.rational(g(x)), Q.rational(x) & Q.nonzero(x - 1)) is False
for h in [cot, acot]:
assert ask(Q.rational(h(7))) is False
assert ask(Q.rational(h(7, evaluate=False))) is False
assert ask(Q.rational(h(x)), Q.rational(x)) is False
def test_hermitian():
assert ask(Q.hermitian(x)) is None
assert ask(Q.hermitian(x), Q.antihermitian(x)) is False
assert ask(Q.hermitian(x), Q.imaginary(x)) is False
assert ask(Q.hermitian(x), Q.prime(x)) is True
assert ask(Q.hermitian(x), Q.real(x)) is True
assert ask(Q.hermitian(x + 1), Q.antihermitian(x)) is False
assert ask(Q.hermitian(x + 1), Q.complex(x)) is None
assert ask(Q.hermitian(x + 1), Q.hermitian(x)) is True
assert ask(Q.hermitian(x + 1), Q.imaginary(x)) is False
assert ask(Q.hermitian(x + 1), Q.real(x)) is True
assert ask(Q.hermitian(x + I), Q.antihermitian(x)) is None
assert ask(Q.hermitian(x + I), Q.complex(x)) is None
assert ask(Q.hermitian(x + I), Q.hermitian(x)) is False
assert ask(Q.hermitian(x + I), Q.imaginary(x)) is None
assert ask(Q.hermitian(x + I), Q.real(x)) is False
assert ask(
Q.hermitian(x + y), Q.antihermitian(x) & Q.antihermitian(y)) is None
assert ask(Q.hermitian(x + y), Q.antihermitian(x) & Q.complex(y)) is None
assert ask(
Q.hermitian(x + y), Q.antihermitian(x) & Q.hermitian(y)) is False
assert ask(Q.hermitian(x + y), Q.antihermitian(x) & Q.imaginary(y)) is None
assert ask(Q.hermitian(x + y), Q.antihermitian(x) & Q.real(y)) is False
assert ask(Q.hermitian(x + y), Q.hermitian(x) & Q.complex(y)) is None
assert ask(Q.hermitian(x + y), Q.hermitian(x) & Q.hermitian(y)) is True
assert ask(Q.hermitian(x + y), Q.hermitian(x) & Q.imaginary(y)) is False
assert ask(Q.hermitian(x + y), Q.hermitian(x) & Q.real(y)) is True
assert ask(Q.hermitian(x + y), Q.imaginary(x) & Q.complex(y)) is None
assert ask(Q.hermitian(x + y), Q.imaginary(x) & Q.imaginary(y)) is None
assert ask(Q.hermitian(x + y), Q.imaginary(x) & Q.real(y)) is False
assert ask(Q.hermitian(x + y), Q.real(x) & Q.complex(y)) is None
assert ask(Q.hermitian(x + y), Q.real(x) & Q.real(y)) is True
assert ask(Q.hermitian(I*x), Q.antihermitian(x)) is True
assert ask(Q.hermitian(I*x), Q.complex(x)) is None
assert ask(Q.hermitian(I*x), Q.hermitian(x)) is False
assert ask(Q.hermitian(I*x), Q.imaginary(x)) is True
assert ask(Q.hermitian(I*x), Q.real(x)) is False
assert ask(Q.hermitian(x*y), Q.hermitian(x) & Q.real(y)) is True
assert ask(
Q.hermitian(x + y + z), Q.real(x) & Q.real(y) & Q.real(z)) is True
assert ask(Q.hermitian(x + y + z),
Q.real(x) & Q.real(y) & Q.imaginary(z)) is False
assert ask(Q.hermitian(x + y + z),
Q.real(x) & Q.imaginary(y) & Q.imaginary(z)) is None
assert ask(Q.hermitian(x + y + z),
Q.imaginary(x) & Q.imaginary(y) & Q.imaginary(z)) is None
assert ask(Q.antihermitian(x)) is None
assert ask(Q.antihermitian(x), Q.real(x)) is False
assert ask(Q.antihermitian(x), Q.prime(x)) is False
assert ask(Q.antihermitian(x + 1), Q.antihermitian(x)) is False
assert ask(Q.antihermitian(x + 1), Q.complex(x)) is None
assert ask(Q.antihermitian(x + 1), Q.hermitian(x)) is None
assert ask(Q.antihermitian(x + 1), Q.imaginary(x)) is False
assert ask(Q.antihermitian(x + 1), Q.real(x)) is False
assert ask(Q.antihermitian(x + I), Q.antihermitian(x)) is True
assert ask(Q.antihermitian(x + I), Q.complex(x)) is None
assert ask(Q.antihermitian(x + I), Q.hermitian(x)) is False
assert ask(Q.antihermitian(x + I), Q.imaginary(x)) is True
assert ask(Q.antihermitian(x + I), Q.real(x)) is False
assert ask(
Q.antihermitian(x + y), Q.antihermitian(x) & Q.antihermitian(y)
) is True
assert ask(
Q.antihermitian(x + y), Q.antihermitian(x) & Q.complex(y)) is None
assert ask(
Q.antihermitian(x + y), Q.antihermitian(x) & Q.hermitian(y)) is False
assert ask(
Q.antihermitian(x + y), Q.antihermitian(x) & Q.imaginary(y)) is True
assert ask(Q.antihermitian(x + y), Q.antihermitian(x) & Q.real(y)
) is False
assert ask(Q.antihermitian(x + y), Q.hermitian(x) & Q.complex(y)) is None
assert ask(Q.antihermitian(x + y), Q.hermitian(x) & Q.hermitian(y)
) is None
assert ask(
Q.antihermitian(x + y), Q.hermitian(x) & Q.imaginary(y)) is False
assert ask(Q.antihermitian(x + y), Q.hermitian(x) & Q.real(y)) is None
assert ask(Q.antihermitian(x + y), Q.imaginary(x) & Q.complex(y)) is None
assert ask(Q.antihermitian(x + y), Q.imaginary(x) & Q.imaginary(y)) is True
assert ask(Q.antihermitian(x + y), Q.imaginary(x) & Q.real(y)) is False
assert ask(Q.antihermitian(x + y), Q.real(x) & Q.complex(y)) is None
assert ask(Q.antihermitian(x + y), Q.real(x) & Q.real(y)) is False
assert ask(Q.antihermitian(I*x), Q.real(x)) is True
assert ask(Q.antihermitian(I*x), Q.antihermitian(x)) is False
assert ask(Q.antihermitian(I*x), Q.complex(x)) is None
assert ask(Q.antihermitian(x*y), Q.antihermitian(x) & Q.real(y)) is True
assert ask(Q.antihermitian(x + y + z),
Q.real(x) & Q.real(y) & Q.real(z)) is False
assert ask(Q.antihermitian(x + y + z),
Q.real(x) & Q.real(y) & Q.imaginary(z)) is None
assert ask(Q.antihermitian(x + y + z),
Q.real(x) & Q.imaginary(y) & Q.imaginary(z)) is False
assert ask(Q.antihermitian(x + y + z),
Q.imaginary(x) & Q.imaginary(y) & Q.imaginary(z)) is True
def test_imaginary():
assert ask(Q.imaginary(0**I)) is False
assert ask(Q.imaginary(0**(-I))) is False
assert ask(Q.imaginary(x)) is None
assert ask(Q.imaginary(x), Q.real(x)) is False
assert ask(Q.imaginary(x), Q.prime(x)) is False
assert ask(Q.imaginary(x + 1), Q.real(x)) is False
assert ask(Q.imaginary(x + 1), Q.imaginary(x)) is False
assert ask(Q.imaginary(x + I), Q.real(x)) is False
assert ask(Q.imaginary(x + I), Q.imaginary(x)) is True
assert ask(Q.imaginary(x + y), Q.imaginary(x) & Q.imaginary(y)) is True
assert ask(Q.imaginary(x + y), Q.real(x) & Q.real(y)) is False
assert ask(Q.imaginary(x + y), Q.imaginary(x) & Q.real(y)) is False
assert ask(Q.imaginary(x + y), Q.complex(x) & Q.real(y)) is None
assert ask(
Q.imaginary(x + y + z), Q.real(x) & Q.real(y) & Q.real(z)) is False
assert ask(Q.imaginary(x + y + z),
Q.real(x) & Q.real(y) & Q.imaginary(z)) is None
assert ask(Q.imaginary(x + y + z),
Q.real(x) & Q.imaginary(y) & Q.imaginary(z)) is False
assert ask(Q.imaginary(I*x), Q.real(x)) is True
assert ask(Q.imaginary(I*x), Q.imaginary(x)) is False
assert ask(Q.imaginary(I*x), Q.complex(x)) is None
assert ask(Q.imaginary(x*y), Q.imaginary(x) & Q.real(y)) is True
assert ask(Q.imaginary(x*y), Q.real(x) & Q.real(y)) is False
assert ask(Q.imaginary(I**x), Q.negative(x)) is None
assert ask(Q.imaginary(I**x), Q.positive(x)) is None
assert ask(Q.imaginary(I**x), Q.even(x)) is False
assert ask(Q.imaginary(I**x), Q.odd(x)) is True
assert ask(Q.imaginary(I**x), Q.imaginary(x)) is False
assert ask(Q.imaginary((2*I)**x), Q.imaginary(x)) is False
assert ask(Q.imaginary(x**0), Q.imaginary(x)) is False
assert ask(Q.imaginary(x**y), Q.imaginary(x) & Q.imaginary(y)) is None
assert ask(Q.imaginary(x**y), Q.imaginary(x) & Q.real(y)) is None
assert ask(Q.imaginary(x**y), Q.real(x) & Q.imaginary(y)) is None
assert ask(Q.imaginary(x**y), Q.real(x) & Q.real(y)) is None
assert ask(Q.imaginary(x**y), Q.imaginary(x) & Q.integer(y)) is None
assert ask(Q.imaginary(x**y), Q.imaginary(y) & Q.integer(x)) is None
assert ask(Q.imaginary(x**y), Q.imaginary(x) & Q.odd(y)) is True
assert ask(Q.imaginary(x**y), Q.imaginary(x) & Q.rational(y)) is None
assert ask(Q.imaginary(x**y), Q.imaginary(x) & Q.even(y)) is False
assert ask(Q.imaginary(x**y), Q.real(x) & Q.integer(y)) is False
assert ask(Q.imaginary(x**y), Q.positive(x) & Q.real(y)) is False
assert ask(Q.imaginary(x**y), Q.negative(x) & Q.real(y)) is None
assert ask(Q.imaginary(x**y), Q.negative(x) & Q.real(y) & ~Q.rational(y)) is False
assert ask(Q.imaginary(x**y), Q.integer(x) & Q.imaginary(y)) is None
assert ask(Q.imaginary(x**y), Q.negative(x) & Q.rational(y) & Q.integer(2*y)) is True
assert ask(Q.imaginary(x**y), Q.negative(x) & Q.rational(y) & ~Q.integer(2*y)) is False
assert ask(Q.imaginary(x**y), Q.negative(x) & Q.rational(y)) is None
assert ask(Q.imaginary(x**y), Q.real(x) & Q.rational(y) & ~Q.integer(2*y)) is False
assert ask(Q.imaginary(x**y), Q.real(x) & Q.rational(y) & Q.integer(2*y)) is None
# logarithm
assert ask(Q.imaginary(log(I))) is True
assert ask(Q.imaginary(log(2*I))) is False
assert ask(Q.imaginary(log(I + 1))) is False
assert ask(Q.imaginary(log(x)), Q.complex(x)) is None
assert ask(Q.imaginary(log(x)), Q.imaginary(x)) is None
assert ask(Q.imaginary(log(x)), Q.positive(x)) is False
assert ask(Q.imaginary(log(exp(x))), Q.complex(x)) is None
assert ask(Q.imaginary(log(exp(x))), Q.imaginary(x)) is None # zoo/I/a+I*b
assert ask(Q.imaginary(log(exp(I)))) is True
# exponential
assert ask(Q.imaginary(exp(x)**x), Q.imaginary(x)) is False
eq = Pow(exp(pi*I*x, evaluate=False), x, evaluate=False)
assert ask(Q.imaginary(eq), Q.even(x)) is False
eq = Pow(exp(pi*I*x/2, evaluate=False), x, evaluate=False)
assert ask(Q.imaginary(eq), Q.odd(x)) is True
assert ask(Q.imaginary(exp(3*I*pi*x)**x), Q.integer(x)) is False
assert ask(Q.imaginary(exp(2*pi*I, evaluate=False))) is False
assert ask(Q.imaginary(exp(pi*I/2, evaluate=False))) is True
# issue 7886
assert ask(Q.imaginary(Pow(x, S.One/4)), Q.real(x) & Q.negative(x)) is False
def test_integer():
assert ask(Q.integer(x)) is None
assert ask(Q.integer(x), Q.integer(x)) is True
assert ask(Q.integer(x), ~Q.integer(x)) is False
assert ask(Q.integer(x), ~Q.real(x)) is False
assert ask(Q.integer(x), ~Q.positive(x)) is None
assert ask(Q.integer(x), Q.even(x) | Q.odd(x)) is True
assert ask(Q.integer(2*x), Q.integer(x)) is True
assert ask(Q.integer(2*x), Q.even(x)) is True
assert ask(Q.integer(2*x), Q.prime(x)) is True
assert ask(Q.integer(2*x), Q.rational(x)) is None
assert ask(Q.integer(2*x), Q.real(x)) is None
assert ask(Q.integer(sqrt(2)*x), Q.integer(x)) is False
assert ask(Q.integer(sqrt(2)*x), Q.irrational(x)) is None
assert ask(Q.integer(x/2), Q.odd(x)) is False
assert ask(Q.integer(x/2), Q.even(x)) is True
assert ask(Q.integer(x/3), Q.odd(x)) is None
assert ask(Q.integer(x/3), Q.even(x)) is None
def test_negative():
assert ask(Q.negative(x), Q.negative(x)) is True
assert ask(Q.negative(x), Q.positive(x)) is False
assert ask(Q.negative(x), ~Q.real(x)) is False
assert ask(Q.negative(x), Q.prime(x)) is False
assert ask(Q.negative(x), ~Q.prime(x)) is None
assert ask(Q.negative(-x), Q.positive(x)) is True
assert ask(Q.negative(-x), ~Q.positive(x)) is None
assert ask(Q.negative(-x), Q.negative(x)) is False
assert ask(Q.negative(-x), Q.positive(x)) is True
assert ask(Q.negative(x - 1), Q.negative(x)) is True
assert ask(Q.negative(x + y)) is None
assert ask(Q.negative(x + y), Q.negative(x)) is None
assert ask(Q.negative(x + y), Q.negative(x) & Q.negative(y)) is True
assert ask(Q.negative(x + y), Q.negative(x) & Q.nonpositive(y)) is True
assert ask(Q.negative(2 + I)) is False
# although this could be False, it is representative of expressions
# that don't evaluate to a zero with precision
assert ask(Q.negative(cos(I)**2 + sin(I)**2 - 1)) is None
assert ask(Q.negative(-I + I*(cos(2)**2 + sin(2)**2))) is None
assert ask(Q.negative(x**2)) is None
assert ask(Q.negative(x**2), Q.real(x)) is False
assert ask(Q.negative(x**1.4), Q.real(x)) is None
assert ask(Q.negative(x**I), Q.positive(x)) is None
assert ask(Q.negative(x*y)) is None
assert ask(Q.negative(x*y), Q.positive(x) & Q.positive(y)) is False
assert ask(Q.negative(x*y), Q.positive(x) & Q.negative(y)) is True
assert ask(Q.negative(x*y), Q.complex(x) & Q.complex(y)) is None
assert ask(Q.negative(x**y)) is None
assert ask(Q.negative(x**y), Q.negative(x) & Q.even(y)) is False
assert ask(Q.negative(x**y), Q.negative(x) & Q.odd(y)) is True
assert ask(Q.negative(x**y), Q.positive(x) & Q.integer(y)) is False
assert ask(Q.negative(Abs(x))) is False
def test_nonzero():
assert ask(Q.nonzero(x)) is None
assert ask(Q.nonzero(x), Q.real(x)) is None
assert ask(Q.nonzero(x), Q.positive(x)) is True
assert ask(Q.nonzero(x), Q.negative(x)) is True
assert ask(Q.nonzero(x), Q.negative(x) | Q.positive(x)) is True
assert ask(Q.nonzero(x + y)) is None
assert ask(Q.nonzero(x + y), Q.positive(x) & Q.positive(y)) is True
assert ask(Q.nonzero(x + y), Q.positive(x) & Q.negative(y)) is None
assert ask(Q.nonzero(x + y), Q.negative(x) & Q.negative(y)) is True
assert ask(Q.nonzero(2*x)) is None
assert ask(Q.nonzero(2*x), Q.positive(x)) is True
assert ask(Q.nonzero(2*x), Q.negative(x)) is True
assert ask(Q.nonzero(x*y), Q.nonzero(x)) is None
assert ask(Q.nonzero(x*y), Q.nonzero(x) & Q.nonzero(y)) is True
assert ask(Q.nonzero(x**y), Q.nonzero(x)) is True
assert ask(Q.nonzero(Abs(x))) is None
assert ask(Q.nonzero(Abs(x)), Q.nonzero(x)) is True
assert ask(Q.nonzero(log(exp(2*I)))) is False
# although this could be False, it is representative of expressions
# that don't evaluate to a zero with precision
assert ask(Q.nonzero(cos(1)**2 + sin(1)**2 - 1)) is None
def test_zero():
assert ask(Q.zero(x)) is None
assert ask(Q.zero(x), Q.real(x)) is None
assert ask(Q.zero(x), Q.positive(x)) is False
assert ask(Q.zero(x), Q.negative(x)) is False
assert ask(Q.zero(x), Q.negative(x) | Q.positive(x)) is False
assert ask(Q.zero(x), Q.nonnegative(x) & Q.nonpositive(x)) is True
assert ask(Q.zero(x + y)) is None
assert ask(Q.zero(x + y), Q.positive(x) & Q.positive(y)) is False
assert ask(Q.zero(x + y), Q.positive(x) & Q.negative(y)) is None
assert ask(Q.zero(x + y), Q.negative(x) & Q.negative(y)) is False
assert ask(Q.zero(2*x)) is None
assert ask(Q.zero(2*x), Q.positive(x)) is False
assert ask(Q.zero(2*x), Q.negative(x)) is False
assert ask(Q.zero(x*y), Q.nonzero(x)) is None
assert ask(Q.zero(Abs(x))) is None
assert ask(Q.zero(Abs(x)), Q.zero(x)) is True
assert ask(Q.integer(x), Q.zero(x)) is True
assert ask(Q.even(x), Q.zero(x)) is True
assert ask(Q.odd(x), Q.zero(x)) is False
assert ask(Q.zero(x), Q.even(x)) is None
assert ask(Q.zero(x), Q.odd(x)) is False
assert ask(Q.zero(x) | Q.zero(y), Q.zero(x*y)) is True
def test_odd():
assert ask(Q.odd(x)) is None
assert ask(Q.odd(x), Q.odd(x)) is True
assert ask(Q.odd(x), Q.integer(x)) is None
assert ask(Q.odd(x), ~Q.integer(x)) is False
assert ask(Q.odd(x), Q.rational(x)) is None
assert ask(Q.odd(x), Q.positive(x)) is None
assert ask(Q.odd(-x), Q.odd(x)) is True
assert ask(Q.odd(2*x)) is None
assert ask(Q.odd(2*x), Q.integer(x)) is False
assert ask(Q.odd(2*x), Q.odd(x)) is False
assert ask(Q.odd(2*x), Q.irrational(x)) is False
assert ask(Q.odd(2*x), ~Q.integer(x)) is None
assert ask(Q.odd(3*x), Q.integer(x)) is None
assert ask(Q.odd(x/3), Q.odd(x)) is None
assert ask(Q.odd(x/3), Q.even(x)) is None
assert ask(Q.odd(x + 1), Q.even(x)) is True
assert ask(Q.odd(x + 2), Q.even(x)) is False
assert ask(Q.odd(x + 2), Q.odd(x)) is True
assert ask(Q.odd(3 - x), Q.odd(x)) is False
assert ask(Q.odd(3 - x), Q.even(x)) is True
assert ask(Q.odd(3 + x), Q.odd(x)) is False
assert ask(Q.odd(3 + x), Q.even(x)) is True
assert ask(Q.odd(x + y), Q.odd(x) & Q.odd(y)) is False
assert ask(Q.odd(x + y), Q.odd(x) & Q.even(y)) is True
assert ask(Q.odd(x - y), Q.even(x) & Q.odd(y)) is True
assert ask(Q.odd(x - y), Q.odd(x) & Q.odd(y)) is False
assert ask(Q.odd(x + y + z), Q.odd(x) & Q.odd(y) & Q.even(z)) is False
assert ask(Q.odd(x + y + z + t),
Q.odd(x) & Q.odd(y) & Q.even(z) & Q.integer(t)) is None
assert ask(Q.odd(2*x + 1), Q.integer(x)) is True
assert ask(Q.odd(2*x + y), Q.integer(x) & Q.odd(y)) is True
assert ask(Q.odd(2*x + y), Q.integer(x) & Q.even(y)) is False
assert ask(Q.odd(2*x + y), Q.integer(x) & Q.integer(y)) is None
assert ask(Q.odd(x*y), Q.odd(x) & Q.even(y)) is False
assert ask(Q.odd(x*y), Q.odd(x) & Q.odd(y)) is True
assert ask(Q.odd(2*x*y), Q.rational(x) & Q.rational(x)) is None
assert ask(Q.odd(2*x*y), Q.irrational(x) & Q.irrational(x)) is None
assert ask(Q.odd(Abs(x)), Q.odd(x)) is True
assert ask(Q.odd((-1)**n), Q.integer(n)) is True
assert ask(Q.odd(k**2), Q.even(k)) is False
assert ask(Q.odd(n**2), Q.odd(n)) is True
assert ask(Q.odd(3**k), Q.even(k)) is None
assert ask(Q.odd(k**m), Q.even(k) & Q.integer(m) & ~Q.negative(m)) is None
assert ask(Q.odd(n**m), Q.odd(n) & Q.integer(m) & ~Q.negative(m)) is True
assert ask(Q.odd(k**p), Q.even(k) & Q.integer(p) & Q.positive(p)) is False
assert ask(Q.odd(n**p), Q.odd(n) & Q.integer(p) & Q.positive(p)) is True
assert ask(Q.odd(m**k), Q.even(k) & Q.integer(m) & ~Q.negative(m)) is None
assert ask(Q.odd(p**k), Q.even(k) & Q.integer(p) & Q.positive(p)) is None
assert ask(Q.odd(m**n), Q.odd(n) & Q.integer(m) & ~Q.negative(m)) is None
assert ask(Q.odd(p**n), Q.odd(n) & Q.integer(p) & Q.positive(p)) is None
assert ask(Q.odd(k**x), Q.even(k)) is None
assert ask(Q.odd(n**x), Q.odd(n)) is None
assert ask(Q.odd(x*y), Q.integer(x) & Q.integer(y)) is None
assert ask(Q.odd(x*x), Q.integer(x)) is None
assert ask(Q.odd(x*(x + y)), Q.integer(x) & Q.odd(y)) is False
assert ask(Q.odd(x*(x + y)), Q.integer(x) & Q.even(y)) is None
@XFAIL
def test_oddness_in_ternary_integer_product_with_odd():
# Tests that oddness inference is independent of term ordering.
# Term ordering at the point of testing depends on SymPy's symbol order, so
# we try to force a different order by modifying symbol names.
assert ask(Q.odd(x*y*(y + z)), Q.integer(x) & Q.integer(y) & Q.odd(z)) is False
assert ask(Q.odd(y*x*(x + z)), Q.integer(x) & Q.integer(y) & Q.odd(z)) is False
def test_oddness_in_ternary_integer_product_with_even():
assert ask(Q.odd(x*y*(y + z)), Q.integer(x) & Q.integer(y) & Q.even(z)) is None
def test_prime():
assert ask(Q.prime(x), Q.prime(x)) is True
assert ask(Q.prime(x), ~Q.prime(x)) is False
assert ask(Q.prime(x), Q.integer(x)) is None
assert ask(Q.prime(x), ~Q.integer(x)) is False
assert ask(Q.prime(2*x), Q.integer(x)) is False
assert ask(Q.prime(x*y)) is None
assert ask(Q.prime(x*y), Q.prime(x)) is None
assert ask(Q.prime(x*y), Q.integer(x) & Q.integer(y)) is False
assert ask(Q.prime(x**2), Q.integer(x)) is False
assert ask(Q.prime(x**2), Q.prime(x)) is False
assert ask(Q.prime(x**y), Q.integer(x) & Q.integer(y)) is False
def test_positive():
assert ask(Q.positive(x), Q.positive(x)) is True
assert ask(Q.positive(x), Q.negative(x)) is False
assert ask(Q.positive(x), Q.nonzero(x)) is None
assert ask(Q.positive(-x), Q.positive(x)) is False
assert ask(Q.positive(-x), Q.negative(x)) is True
assert ask(Q.positive(x + y), Q.positive(x) & Q.positive(y)) is True
assert ask(Q.positive(x + y), Q.positive(x) & Q.nonnegative(y)) is True
assert ask(Q.positive(x + y), Q.positive(x) & Q.negative(y)) is None
assert ask(Q.positive(x + y), Q.positive(x) & Q.imaginary(y)) is False
assert ask(Q.positive(2*x), Q.positive(x)) is True
assumptions = Q.positive(x) & Q.negative(y) & Q.negative(z) & Q.positive(w)
assert ask(Q.positive(x*y*z)) is None
assert ask(Q.positive(x*y*z), assumptions) is True
assert ask(Q.positive(-x*y*z), assumptions) is False
assert ask(Q.positive(x**I), Q.positive(x)) is None
assert ask(Q.positive(x**2), Q.positive(x)) is True
assert ask(Q.positive(x**2), Q.negative(x)) is True
assert ask(Q.positive(x**3), Q.negative(x)) is False
assert ask(Q.positive(1/(1 + x**2)), Q.real(x)) is True
assert ask(Q.positive(2**I)) is False
assert ask(Q.positive(2 + I)) is False
# although this could be False, it is representative of expressions
# that don't evaluate to a zero with precision
assert ask(Q.positive(cos(I)**2 + sin(I)**2 - 1)) is None
assert ask(Q.positive(-I + I*(cos(2)**2 + sin(2)**2))) is None
#exponential
assert ask(Q.positive(exp(x)), Q.real(x)) is True
assert ask(~Q.negative(exp(x)), Q.real(x)) is True
assert ask(Q.positive(x + exp(x)), Q.real(x)) is None
# logarithm
assert ask(Q.positive(log(x)), Q.imaginary(x)) is False
assert ask(Q.positive(log(x)), Q.negative(x)) is False
assert ask(Q.positive(log(x)), Q.positive(x)) is None
assert ask(Q.positive(log(x + 2)), Q.positive(x)) is True
# factorial
assert ask(Q.positive(factorial(x)), Q.integer(x) & Q.positive(x))
assert ask(Q.positive(factorial(x)), Q.integer(x)) is None
#absolute value
assert ask(Q.positive(Abs(x))) is None # Abs(0) = 0
assert ask(Q.positive(Abs(x)), Q.positive(x)) is True
def test_nonpositive():
assert ask(Q.nonpositive(-1))
assert ask(Q.nonpositive(0))
assert ask(Q.nonpositive(1)) is False
assert ask(~Q.positive(x), Q.nonpositive(x))
assert ask(Q.nonpositive(x), Q.positive(x)) is False
assert ask(Q.nonpositive(sqrt(-1))) is False
assert ask(Q.nonpositive(x), Q.imaginary(x)) is False
def test_nonnegative():
assert ask(Q.nonnegative(-1)) is False
assert ask(Q.nonnegative(0))
assert ask(Q.nonnegative(1))
assert ask(~Q.negative(x), Q.nonnegative(x))
assert ask(Q.nonnegative(x), Q.negative(x)) is False
assert ask(Q.nonnegative(sqrt(-1))) is False
assert ask(Q.nonnegative(x), Q.imaginary(x)) is False
def test_real():
assert ask(Q.real(0**I)) is False
assert ask(Q.real(0**(-I))) is False
assert ask(Q.real(x)) is None
assert ask(Q.real(x), Q.real(x)) is True
assert ask(Q.real(x), Q.nonzero(x)) is True
assert ask(Q.real(x), Q.positive(x)) is True
assert ask(Q.real(x), Q.negative(x)) is True
assert ask(Q.real(x), Q.integer(x)) is True
assert ask(Q.real(x), Q.even(x)) is True
assert ask(Q.real(x), Q.prime(x)) is True
assert ask(Q.real(x/sqrt(2)), Q.real(x)) is True
assert ask(Q.real(x/sqrt(-2)), Q.real(x)) is False
assert ask(Q.real(x + 1), Q.real(x)) is True
assert ask(Q.real(x + I), Q.real(x)) is False
assert ask(Q.real(x + I), Q.complex(x)) is None
assert ask(Q.real(2*x), Q.real(x)) is True
assert ask(Q.real(I*x), Q.real(x)) is False
assert ask(Q.real(I*x), Q.imaginary(x)) is True
assert ask(Q.real(I*x), Q.complex(x)) is None
assert ask(Q.real(x**2), Q.real(x)) is True
assert ask(Q.real(sqrt(x)), Q.negative(x)) is False
assert ask(Q.real(x**y), Q.real(x) & Q.integer(y)) is True
assert ask(Q.real(x**y), Q.real(x) & Q.real(y)) is None
assert ask(Q.real(x**y), Q.positive(x) & Q.real(y)) is True
assert ask(Q.real(x**y), Q.imaginary(x) & Q.imaginary(y)) is None # I**I or (2*I)**I
assert ask(Q.real(x**y), Q.imaginary(x) & Q.real(y)) is None # I**1 or I**0
assert ask(Q.real(x**y), Q.real(x) & Q.imaginary(y)) is None # could be exp(2*pi*I) or 2**I
assert ask(Q.real(x**0), Q.imaginary(x)) is True
assert ask(Q.real(x**y), Q.real(x) & Q.integer(y)) is True
assert ask(Q.real(x**y), Q.positive(x) & Q.real(y)) is True
assert ask(Q.real(x**y), Q.real(x) & Q.rational(y)) is None
assert ask(Q.real(x**y), Q.imaginary(x) & Q.integer(y)) is None
assert ask(Q.real(x**y), Q.imaginary(x) & Q.odd(y)) is False
assert ask(Q.real(x**y), Q.imaginary(x) & Q.even(y)) is True
assert ask(Q.real(x**(y/z)), Q.real(x) & Q.real(y/z) & Q.rational(y/z) & Q.even(z) & Q.positive(x)) is True
assert ask(Q.real(x**(y/z)), Q.real(x) & Q.rational(y/z) & Q.even(z) & Q.negative(x)) is False
assert ask(Q.real(x**(y/z)), Q.real(x) & Q.integer(y/z)) is True
assert ask(Q.real(x**(y/z)), Q.real(x) & Q.real(y/z) & Q.positive(x)) is True
assert ask(Q.real(x**(y/z)), Q.real(x) & Q.real(y/z) & Q.negative(x)) is False
assert ask(Q.real((-I)**i), Q.imaginary(i)) is True
assert ask(Q.real(I**i), Q.imaginary(i)) is True
assert ask(Q.real(i**i), Q.imaginary(i)) is None # i might be 2*I
assert ask(Q.real(x**i), Q.imaginary(i)) is None # x could be 0
assert ask(Q.real(x**(I*pi/log(x))), Q.real(x)) is True
# trigonometric functions
assert ask(Q.real(sin(x))) is None
assert ask(Q.real(cos(x))) is None
assert ask(Q.real(sin(x)), Q.real(x)) is True
assert ask(Q.real(cos(x)), Q.real(x)) is True
# exponential function
assert ask(Q.real(exp(x))) is None
assert ask(Q.real(exp(x)), Q.real(x)) is True
assert ask(Q.real(x + exp(x)), Q.real(x)) is True
assert ask(Q.real(exp(2*pi*I, evaluate=False))) is True
assert ask(Q.real(exp(pi*I/2, evaluate=False))) is False
# logarithm
assert ask(Q.real(log(I))) is False
assert ask(Q.real(log(2*I))) is False
assert ask(Q.real(log(I + 1))) is False
assert ask(Q.real(log(x)), Q.complex(x)) is None
assert ask(Q.real(log(x)), Q.imaginary(x)) is False
assert ask(Q.real(log(exp(x))), Q.imaginary(x)) is False # exp(x) will be 0 or a + I*b
assert ask(Q.real(log(exp(x))), Q.complex(x)) is None
eq = Pow(exp(2*pi*I*x, evaluate=False), x, evaluate=False)
assert ask(Q.real(eq), Q.integer(x)) is True
assert ask(Q.real(exp(x)**x), Q.imaginary(x)) is True
assert ask(Q.real(exp(x)**x), Q.complex(x)) is None
# Q.complexes
assert ask(Q.real(re(x))) is True
assert ask(Q.real(im(x))) is True
def test_algebraic():
assert ask(Q.algebraic(x)) is None
assert ask(Q.algebraic(I)) is True
assert ask(Q.algebraic(2*I)) is True
assert ask(Q.algebraic(I/3)) is True
assert ask(Q.algebraic(sqrt(7))) is True
assert ask(Q.algebraic(2*sqrt(7))) is True
assert ask(Q.algebraic(sqrt(7)/3)) is True
assert ask(Q.algebraic(I*sqrt(3))) is True
assert ask(Q.algebraic(sqrt(1 + I*sqrt(3)))) is True
assert ask(Q.algebraic((1 + I*sqrt(3)**(S(17)/31)))) is True
assert ask(Q.algebraic((1 + I*sqrt(3)**(S(17)/pi)))) is False
for f in [exp, sin, tan, asin, atan, cos]:
assert ask(Q.algebraic(f(7))) is False
assert ask(Q.algebraic(f(7, evaluate=False))) is False
assert ask(Q.algebraic(f(0, evaluate=False))) is True
assert ask(Q.algebraic(f(x)), Q.algebraic(x)) is None
assert ask(Q.algebraic(f(x)), Q.algebraic(x) & Q.nonzero(x)) is False
for g in [log, acos]:
assert ask(Q.algebraic(g(7))) is False
assert ask(Q.algebraic(g(7, evaluate=False))) is False
assert ask(Q.algebraic(g(1, evaluate=False))) is True
assert ask(Q.algebraic(g(x)), Q.algebraic(x)) is None
assert ask(Q.algebraic(g(x)), Q.algebraic(x) & Q.nonzero(x - 1)) is False
for h in [cot, acot]:
assert ask(Q.algebraic(h(7))) is False
assert ask(Q.algebraic(h(7, evaluate=False))) is False
assert ask(Q.algebraic(h(x)), Q.algebraic(x)) is False
assert ask(Q.algebraic(sqrt(sin(7)))) is False
assert ask(Q.algebraic(sqrt(y + I*sqrt(7)))) is None
assert ask(Q.algebraic(2.47)) is True
assert ask(Q.algebraic(x), Q.transcendental(x)) is False
assert ask(Q.transcendental(x), Q.algebraic(x)) is False
def test_global():
"""Test ask with global assumptions"""
assert ask(Q.integer(x)) is None
global_assumptions.add(Q.integer(x))
assert ask(Q.integer(x)) is True
global_assumptions.clear()
assert ask(Q.integer(x)) is None
def test_custom_context():
"""Test ask with custom assumptions context"""
assert ask(Q.integer(x)) is None
local_context = AssumptionsContext()
local_context.add(Q.integer(x))
assert ask(Q.integer(x), context=local_context) is True
assert ask(Q.integer(x)) is None
def test_functions_in_assumptions():
assert ask(Q.negative(x), Q.real(x) >> Q.positive(x)) is False
assert ask(Q.negative(x), Equivalent(Q.real(x), Q.positive(x))) is False
assert ask(Q.negative(x), Xor(Q.real(x), Q.negative(x))) is False
def test_composite_ask():
assert ask(Q.negative(x) & Q.integer(x),
assumptions=Q.real(x) >> Q.positive(x)) is False
def test_composite_proposition():
assert ask(True) is True
assert ask(False) is False
assert ask(~Q.negative(x), Q.positive(x)) is True
assert ask(~Q.real(x), Q.commutative(x)) is None
assert ask(Q.negative(x) & Q.integer(x), Q.positive(x)) is False
assert ask(Q.negative(x) & Q.integer(x)) is None
assert ask(Q.real(x) | Q.integer(x), Q.positive(x)) is True
assert ask(Q.real(x) | Q.integer(x)) is None
assert ask(Q.real(x) >> Q.positive(x), Q.negative(x)) is False
assert ask(Implies(
Q.real(x), Q.positive(x), evaluate=False), Q.negative(x)) is False
assert ask(Implies(Q.real(x), Q.positive(x), evaluate=False)) is None
assert ask(Equivalent(Q.integer(x), Q.even(x)), Q.even(x)) is True
assert ask(Equivalent(Q.integer(x), Q.even(x))) is None
assert ask(Equivalent(Q.positive(x), Q.integer(x)), Q.integer(x)) is None
assert ask(Q.real(x) | Q.integer(x), Q.real(x) | Q.integer(x)) is True
def test_tautology():
assert ask(Q.real(x) | ~Q.real(x)) is True
assert ask(Q.real(x) & ~Q.real(x)) is False
def test_composite_assumptions():
assert ask(Q.real(x), Q.real(x) & Q.real(y)) is True
assert ask(Q.positive(x), Q.positive(x) | Q.positive(y)) is None
assert ask(Q.positive(x), Q.real(x) >> Q.positive(y)) is None
assert ask(Q.real(x), ~(Q.real(x) >> Q.real(y))) is True
def test_incompatible_resolutors():
class Prime2AskHandler(AskHandler):
@staticmethod
def Number(expr, assumptions):
return True
register_handler('prime', Prime2AskHandler)
raises(ValueError, lambda: ask(Q.prime(4)))
remove_handler('prime', Prime2AskHandler)
class InconclusiveHandler(AskHandler):
@staticmethod
def Number(expr, assumptions):
return None
register_handler('prime', InconclusiveHandler)
assert ask(Q.prime(3)) is True
def test_key_extensibility():
"""test that you can add keys to the ask system at runtime"""
# make sure the key is not defined
raises(AttributeError, lambda: ask(Q.my_key(x)))
class MyAskHandler(AskHandler):
@staticmethod
def Symbol(expr, assumptions):
return True
register_handler('my_key', MyAskHandler)
assert ask(Q.my_key(x)) is True
assert ask(Q.my_key(x + 1)) is None
remove_handler('my_key', MyAskHandler)
del Q.my_key
raises(AttributeError, lambda: ask(Q.my_key(x)))
def test_type_extensibility():
"""test that new types can be added to the ask system at runtime
We create a custom type MyType, and override ask Q.prime=True with handler
MyAskHandler for this type
TODO: test incompatible resolutors
"""
from sympy.core import Basic
class MyType(Basic):
pass
class MyAskHandler(AskHandler):
@staticmethod
def MyType(expr, assumptions):
return True
a = MyType()
register_handler(Q.prime, MyAskHandler)
assert ask(Q.prime(a)) is True
def test_single_fact_lookup():
known_facts = And(Implies(Q.integer, Q.rational),
Implies(Q.rational, Q.real),
Implies(Q.real, Q.complex))
known_facts_keys = set([Q.integer, Q.rational, Q.real, Q.complex])
known_facts_cnf = to_cnf(known_facts)
mapping = single_fact_lookup(known_facts_keys, known_facts_cnf)
assert mapping[Q.rational] == set([Q.real, Q.rational, Q.complex])
def test_compute_known_facts():
known_facts = And(Implies(Q.integer, Q.rational),
Implies(Q.rational, Q.real),
Implies(Q.real, Q.complex))
known_facts_keys = set([Q.integer, Q.rational, Q.real, Q.complex])
s = compute_known_facts(known_facts, known_facts_keys)
@slow
def test_known_facts_consistent():
""""Test that ask_generated.py is up-to-date"""
from sympy.assumptions.ask import get_known_facts, get_known_facts_keys
from os.path import abspath, dirname, join
filename = join(dirname(dirname(abspath(__file__))), 'ask_generated.py')
with open(filename, 'r') as f:
assert f.read() == \
compute_known_facts(get_known_facts(), get_known_facts_keys())
def test_Add_queries():
assert ask(Q.prime(12345678901234567890 + (cos(1)**2 + sin(1)**2))) is True
assert ask(Q.even(Add(S(2), S(2), evaluate=0))) is True
assert ask(Q.prime(Add(S(2), S(2), evaluate=0))) is False
assert ask(Q.integer(Add(S(2), S(2), evaluate=0))) is True
def test_positive_assuming():
with assuming(Q.positive(x + 1)):
assert not ask(Q.positive(x))
def test_issue_5421():
raises(TypeError, lambda: ask(pi/log(x), Q.real))
def test_issue_3906():
raises(TypeError, lambda: ask(Q.positive))
def test_issue_5833():
assert ask(Q.positive(log(x)**2), Q.positive(x)) is None
assert ask(~Q.negative(log(x)**2), Q.positive(x)) is True
def test_issue_6732():
raises(ValueError, lambda: ask(Q.positive(x), Q.positive(x) & Q.negative(x)))
raises(ValueError, lambda: ask(Q.negative(x), Q.positive(x) & Q.negative(x)))
def test_issue_7246():
assert ask(Q.positive(atan(p)), Q.positive(p)) is True
assert ask(Q.positive(atan(p)), Q.negative(p)) is False
assert ask(Q.positive(atan(p)), Q.zero(p)) is False
assert ask(Q.positive(atan(x))) is None
assert ask(Q.positive(asin(p)), Q.positive(p)) is None
assert ask(Q.positive(asin(p)), Q.zero(p)) is None
assert ask(Q.positive(asin(Rational(1, 7)))) is True
assert ask(Q.positive(asin(x)), Q.positive(x) & Q.nonpositive(x - 1)) is True
assert ask(Q.positive(asin(x)), Q.negative(x) & Q.nonnegative(x + 1)) is False
assert ask(Q.positive(acos(p)), Q.positive(p)) is None
assert ask(Q.positive(acos(Rational(1, 7)))) is True
assert ask(Q.positive(acos(x)), Q.nonnegative(x + 1) & Q.nonpositive(x - 1)) is True
assert ask(Q.positive(acos(x)), Q.nonnegative(x - 1)) is None
assert ask(Q.positive(acot(x)), Q.positive(x)) is True
assert ask(Q.positive(acot(x)), Q.real(x)) is True
assert ask(Q.positive(acot(x)), Q.imaginary(x)) is False
assert ask(Q.positive(acot(x))) is None
@XFAIL
def test_issue_7246_failing():
#Move this test to test_issue_7246 once
#the new assumptions module is improved.
assert ask(Q.positive(acos(x)), Q.zero(x)) is True
def test_deprecated_Q_bounded():
with raises(SymPyDeprecationWarning):
Q.bounded
def test_deprecated_Q_infinity():
with raises(SymPyDeprecationWarning):
Q.infinity
def test_check_old_assumption():
x = symbols('x', real=True)
assert ask(Q.real(x)) is True
assert ask(Q.imaginary(x)) is False
assert ask(Q.complex(x)) is True
x = symbols('x', imaginary=True)
assert ask(Q.real(x)) is False
assert ask(Q.imaginary(x)) is True
assert ask(Q.complex(x)) is True
x = symbols('x', complex=True)
assert ask(Q.real(x)) is None
assert ask(Q.complex(x)) is True
x = symbols('x', positive=True)
assert ask(Q.positive(x)) is True
assert ask(Q.negative(x)) is False
assert ask(Q.real(x)) is True
x = symbols('x', commutative=False)
assert ask(Q.commutative(x)) is False
x = symbols('x', negative=True)
assert ask(Q.positive(x)) is False
assert ask(Q.negative(x)) is True
x = symbols('x', nonnegative=True)
assert ask(Q.negative(x)) is False
assert ask(Q.positive(x)) is None
assert ask(Q.zero(x)) is None
x = symbols('x', finite=True)
assert ask(Q.finite(x)) is True
x = symbols('x', prime=True)
assert ask(Q.prime(x)) is True
assert ask(Q.composite(x)) is False
x = symbols('x', composite=True)
assert ask(Q.prime(x)) is False
assert ask(Q.composite(x)) is True
x = symbols('x', even=True)
assert ask(Q.even(x)) is True
assert ask(Q.odd(x)) is False
x = symbols('x', odd=True)
assert ask(Q.even(x)) is False
assert ask(Q.odd(x)) is True
x = symbols('x', nonzero=True)
assert ask(Q.nonzero(x)) is True
assert ask(Q.zero(x)) is False
x = symbols('x', zero=True)
assert ask(Q.zero(x)) is True
x = symbols('x', integer=True)
assert ask(Q.integer(x)) is True
x = symbols('x', rational=True)
assert ask(Q.rational(x)) is True
assert ask(Q.irrational(x)) is False
x = symbols('x', irrational=True)
assert ask(Q.irrational(x)) is True
assert ask(Q.rational(x)) is False
def test_issue_9636():
assert ask(Q.integer(1.0)) is False
assert ask(Q.prime(3.0)) is False
assert ask(Q.composite(4.0)) is False
assert ask(Q.even(2.0)) is False
assert ask(Q.odd(3.0)) is False
| bsd-3-clause |
Godiyos/python-for-android | python-build/python-libs/gdata/build/lib/gdata/Crypto/Util/number.py | 232 | 5495 | #
# number.py : Number-theoretic functions
#
# Part of the Python Cryptography Toolkit
#
# Distribute and use freely; there are no restrictions on further
# dissemination and usage except those imposed by the laws of your
# country of residence. This software is provided "as is" without
# warranty of fitness for use or suitability for any purpose, express
# or implied. Use at your own risk or not at all.
#
__revision__ = "$Id: number.py,v 1.13 2003/04/04 18:21:07 akuchling Exp $"
bignum = long
try:
from Crypto.PublicKey import _fastmath
except ImportError:
_fastmath = None
# Commented out and replaced with faster versions below
## def long2str(n):
## s=''
## while n>0:
## s=chr(n & 255)+s
## n=n>>8
## return s
## import types
## def str2long(s):
## if type(s)!=types.StringType: return s # Integers will be left alone
## return reduce(lambda x,y : x*256+ord(y), s, 0L)
def size (N):
"""size(N:long) : int
Returns the size of the number N in bits.
"""
bits, power = 0,1L
while N >= power:
bits += 1
power = power << 1
return bits
def getRandomNumber(N, randfunc):
"""getRandomNumber(N:int, randfunc:callable):long
Return an N-bit random number."""
S = randfunc(N/8)
odd_bits = N % 8
if odd_bits != 0:
char = ord(randfunc(1)) >> (8-odd_bits)
S = chr(char) + S
value = bytes_to_long(S)
value |= 2L ** (N-1) # Ensure high bit is set
assert size(value) >= N
return value
def GCD(x,y):
"""GCD(x:long, y:long): long
Return the GCD of x and y.
"""
x = abs(x) ; y = abs(y)
while x > 0:
x, y = y % x, x
return y
def inverse(u, v):
"""inverse(u:long, u:long):long
Return the inverse of u mod v.
"""
u3, v3 = long(u), long(v)
u1, v1 = 1L, 0L
while v3 > 0:
q=u3 / v3
u1, v1 = v1, u1 - v1*q
u3, v3 = v3, u3 - v3*q
while u1<0:
u1 = u1 + v
return u1
# Given a number of bits to generate and a random generation function,
# find a prime number of the appropriate size.
def getPrime(N, randfunc):
"""getPrime(N:int, randfunc:callable):long
Return a random N-bit prime number.
"""
number=getRandomNumber(N, randfunc) | 1
while (not isPrime(number)):
number=number+2
return number
def isPrime(N):
"""isPrime(N:long):bool
Return true if N is prime.
"""
if N == 1:
return 0
if N in sieve:
return 1
for i in sieve:
if (N % i)==0:
return 0
# Use the accelerator if available
if _fastmath is not None:
return _fastmath.isPrime(N)
# Compute the highest bit that's set in N
N1 = N - 1L
n = 1L
while (n<N):
n=n<<1L
n = n >> 1L
# Rabin-Miller test
for c in sieve[:7]:
a=long(c) ; d=1L ; t=n
while (t): # Iterate over the bits in N1
x=(d*d) % N
if x==1L and d!=1L and d!=N1:
return 0 # Square root of 1 found
if N1 & t:
d=(x*a) % N
else:
d=x
t = t >> 1L
if d!=1L:
return 0
return 1
# Small primes used for checking primality; these are all the primes
# less than 256. This should be enough to eliminate most of the odd
# numbers before needing to do a Rabin-Miller test at all.
sieve=[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59,
61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127,
131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193,
197, 199, 211, 223, 227, 229, 233, 239, 241, 251]
# Improved conversion functions contributed by Barry Warsaw, after
# careful benchmarking
import struct
def long_to_bytes(n, blocksize=0):
"""long_to_bytes(n:long, blocksize:int) : string
Convert a long integer to a byte string.
If optional blocksize is given and greater than zero, pad the front of the
byte string with binary zeros so that the length is a multiple of
blocksize.
"""
# after much testing, this algorithm was deemed to be the fastest
s = ''
n = long(n)
pack = struct.pack
while n > 0:
s = pack('>I', n & 0xffffffffL) + s
n = n >> 32
# strip off leading zeros
for i in range(len(s)):
if s[i] != '\000':
break
else:
# only happens when n == 0
s = '\000'
i = 0
s = s[i:]
# add back some pad bytes. this could be done more efficiently w.r.t. the
# de-padding being done above, but sigh...
if blocksize > 0 and len(s) % blocksize:
s = (blocksize - len(s) % blocksize) * '\000' + s
return s
def bytes_to_long(s):
"""bytes_to_long(string) : long
Convert a byte string to a long integer.
This is (essentially) the inverse of long_to_bytes().
"""
acc = 0L
unpack = struct.unpack
length = len(s)
if length % 4:
extra = (4 - length % 4)
s = '\000' * extra + s
length = length + extra
for i in range(0, length, 4):
acc = (acc << 32) + unpack('>I', s[i:i+4])[0]
return acc
# For backwards compatibility...
import warnings
def long2str(n, blocksize=0):
warnings.warn("long2str() has been replaced by long_to_bytes()")
return long_to_bytes(n, blocksize)
def str2long(s):
warnings.warn("str2long() has been replaced by bytes_to_long()")
return bytes_to_long(s)
| apache-2.0 |
gurneyalex/account-financial-tools | __unported__/account_credit_control/__init__.py | 10 | 1159 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi, Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import mail
from . import run
from . import line
from . import account
from . import partner
from . import policy
from . import company
from . import wizard
from . import report
from . import invoice
| agpl-3.0 |
weidongxu84/info-gatherer | requests/packages/charade/langhungarianmodel.py | 184 | 12761 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = {
'charToOrderMap': Latin2_HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "ISO-8859-2"
}
Win1250HungarianModel = {
'charToOrderMap': win1250HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "windows-1250"
}
# flake8: noqa
| mit |
bellowsj/aiopogo | aiopogo/pogoprotos/data/pokemon_display_pb2.py | 1 | 4063 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pogoprotos/data/pokemon_display.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from pogoprotos.enums import costume_pb2 as pogoprotos_dot_enums_dot_costume__pb2
from pogoprotos.enums import form_pb2 as pogoprotos_dot_enums_dot_form__pb2
from pogoprotos.enums import gender_pb2 as pogoprotos_dot_enums_dot_gender__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pogoprotos/data/pokemon_display.proto',
package='pogoprotos.data',
syntax='proto3',
serialized_pb=_b('\n%pogoprotos/data/pokemon_display.proto\x12\x0fpogoprotos.data\x1a\x1epogoprotos/enums/costume.proto\x1a\x1bpogoprotos/enums/form.proto\x1a\x1dpogoprotos/enums/gender.proto\"\x9b\x01\n\x0ePokemonDisplay\x12*\n\x07\x63ostume\x18\x01 \x01(\x0e\x32\x19.pogoprotos.enums.Costume\x12(\n\x06gender\x18\x02 \x01(\x0e\x32\x18.pogoprotos.enums.Gender\x12\r\n\x05shiny\x18\x03 \x01(\x08\x12$\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x16.pogoprotos.enums.Formb\x06proto3')
,
dependencies=[pogoprotos_dot_enums_dot_costume__pb2.DESCRIPTOR,pogoprotos_dot_enums_dot_form__pb2.DESCRIPTOR,pogoprotos_dot_enums_dot_gender__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_POKEMONDISPLAY = _descriptor.Descriptor(
name='PokemonDisplay',
full_name='pogoprotos.data.PokemonDisplay',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='costume', full_name='pogoprotos.data.PokemonDisplay.costume', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gender', full_name='pogoprotos.data.PokemonDisplay.gender', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='shiny', full_name='pogoprotos.data.PokemonDisplay.shiny', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='form', full_name='pogoprotos.data.PokemonDisplay.form', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=151,
serialized_end=306,
)
_POKEMONDISPLAY.fields_by_name['costume'].enum_type = pogoprotos_dot_enums_dot_costume__pb2._COSTUME
_POKEMONDISPLAY.fields_by_name['gender'].enum_type = pogoprotos_dot_enums_dot_gender__pb2._GENDER
_POKEMONDISPLAY.fields_by_name['form'].enum_type = pogoprotos_dot_enums_dot_form__pb2._FORM
DESCRIPTOR.message_types_by_name['PokemonDisplay'] = _POKEMONDISPLAY
PokemonDisplay = _reflection.GeneratedProtocolMessageType('PokemonDisplay', (_message.Message,), dict(
DESCRIPTOR = _POKEMONDISPLAY,
__module__ = 'pogoprotos.data.pokemon_display_pb2'
# @@protoc_insertion_point(class_scope:pogoprotos.data.PokemonDisplay)
))
_sym_db.RegisterMessage(PokemonDisplay)
# @@protoc_insertion_point(module_scope)
| mit |
erikr/django | django/contrib/auth/admin.py | 50 | 8989 | from django.conf import settings
from django.conf.urls import url
from django.contrib import admin, messages
from django.contrib.admin.options import IS_POPUP_VAR
from django.contrib.admin.utils import unquote
from django.contrib.auth import update_session_auth_hash
from django.contrib.auth.forms import (
AdminPasswordChangeForm, UserChangeForm, UserCreationForm,
)
from django.contrib.auth.models import Group, User
from django.core.exceptions import PermissionDenied
from django.db import router, transaction
from django.http import Http404, HttpResponseRedirect
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.translation import ugettext, ugettext_lazy as _
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
csrf_protect_m = method_decorator(csrf_protect)
sensitive_post_parameters_m = method_decorator(sensitive_post_parameters())
@admin.register(Group)
class GroupAdmin(admin.ModelAdmin):
search_fields = ('name',)
ordering = ('name',)
filter_horizontal = ('permissions',)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
if db_field.name == 'permissions':
qs = kwargs.get('queryset', db_field.remote_field.model.objects)
# Avoid a major performance hit resolving permission names which
# triggers a content_type load:
kwargs['queryset'] = qs.select_related('content_type')
return super(GroupAdmin, self).formfield_for_manytomany(
db_field, request=request, **kwargs)
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
add_form_template = 'admin/auth/user/add_form.html'
change_user_password_template = None
fieldsets = (
(None, {'fields': ('username', 'password')}),
(_('Personal info'), {'fields': ('first_name', 'last_name', 'email')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'password1', 'password2'),
}),
)
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff')
list_filter = ('is_staff', 'is_superuser', 'is_active', 'groups')
search_fields = ('username', 'first_name', 'last_name', 'email')
ordering = ('username',)
filter_horizontal = ('groups', 'user_permissions',)
def get_fieldsets(self, request, obj=None):
if not obj:
return self.add_fieldsets
return super(UserAdmin, self).get_fieldsets(request, obj)
def get_form(self, request, obj=None, **kwargs):
"""
Use special form during user creation
"""
defaults = {}
if obj is None:
defaults['form'] = self.add_form
defaults.update(kwargs)
return super(UserAdmin, self).get_form(request, obj, **defaults)
def get_urls(self):
return [
url(
r'^(.+)/password/$',
self.admin_site.admin_view(self.user_change_password),
name='auth_user_password_change',
),
] + super(UserAdmin, self).get_urls()
def lookup_allowed(self, lookup, value):
# See #20078: we don't want to allow any lookups involving passwords.
if lookup.startswith('password'):
return False
return super(UserAdmin, self).lookup_allowed(lookup, value)
@sensitive_post_parameters_m
@csrf_protect_m
def add_view(self, request, form_url='', extra_context=None):
with transaction.atomic(using=router.db_for_write(self.model)):
return self._add_view(request, form_url, extra_context)
def _add_view(self, request, form_url='', extra_context=None):
# It's an error for a user to have add permission but NOT change
# permission for users. If we allowed such users to add users, they
# could create superusers, which would mean they would essentially have
# the permission to change users. To avoid the problem entirely, we
# disallow users from adding users if they don't have change
# permission.
if not self.has_change_permission(request):
if self.has_add_permission(request) and settings.DEBUG:
# Raise Http404 in debug mode so that the user gets a helpful
# error message.
raise Http404(
'Your user does not have the "Change user" permission. In '
'order to add users, Django requires that your user '
'account have both the "Add user" and "Change user" '
'permissions set.')
raise PermissionDenied
if extra_context is None:
extra_context = {}
username_field = self.model._meta.get_field(self.model.USERNAME_FIELD)
defaults = {
'auto_populated_fields': (),
'username_help_text': username_field.help_text,
}
extra_context.update(defaults)
return super(UserAdmin, self).add_view(request, form_url,
extra_context)
@sensitive_post_parameters_m
def user_change_password(self, request, id, form_url=''):
if not self.has_change_permission(request):
raise PermissionDenied
user = self.get_object(request, unquote(id))
if user is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(self.model._meta.verbose_name),
'key': escape(id),
})
if request.method == 'POST':
form = self.change_password_form(user, request.POST)
if form.is_valid():
form.save()
change_message = self.construct_change_message(request, form, None)
self.log_change(request, user, change_message)
msg = ugettext('Password changed successfully.')
messages.success(request, msg)
update_session_auth_hash(request, form.user)
return HttpResponseRedirect(
reverse(
'%s:%s_%s_change' % (
self.admin_site.name,
user._meta.app_label,
user._meta.model_name,
),
args=(user.pk,),
)
)
else:
form = self.change_password_form(user)
fieldsets = [(None, {'fields': list(form.base_fields)})]
adminForm = admin.helpers.AdminForm(form, fieldsets, {})
context = {
'title': _('Change password: %s') % escape(user.get_username()),
'adminForm': adminForm,
'form_url': form_url,
'form': form,
'is_popup': (IS_POPUP_VAR in request.POST or
IS_POPUP_VAR in request.GET),
'add': True,
'change': False,
'has_delete_permission': False,
'has_change_permission': True,
'has_absolute_url': False,
'opts': self.model._meta,
'original': user,
'save_as': False,
'show_save': True,
}
context.update(self.admin_site.each_context(request))
request.current_app = self.admin_site.name
return TemplateResponse(
request,
self.change_user_password_template or
'admin/auth/user/change_password.html',
context,
)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage. It mostly defers to
its superclass implementation but is customized because the User model
has a slightly different workflow.
"""
# We should allow further modification of the user just added i.e. the
# 'Save' button should behave like the 'Save and continue editing'
# button except in two scenarios:
# * The user has pressed the 'Save and add another' button
# * We are adding a user in a popup
if '_addanother' not in request.POST and IS_POPUP_VAR not in request.POST:
request.POST = request.POST.copy()
request.POST['_continue'] = 1
return super(UserAdmin, self).response_add(request, obj,
post_url_continue)
| bsd-3-clause |
AutorestCI/azure-sdk-for-python | azure-mgmt-loganalytics/azure/mgmt/loganalytics/operations/linked_services_operations.py | 2 | 15585 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class LinkedServicesOperations(object):
"""LinkedServicesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client Api Version. Constant value: "2015-11-01-preview".
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2015-11-01-preview"
self.config = config
def create_or_update(
self, resource_group_name, workspace_name, linked_service_name, resource_id, tags=None, custom_headers=None, raw=False, **operation_config):
"""Create or update a linked service.
:param resource_group_name: The name of the resource group to get. The
name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of the Log Analytics Workspace that will
contain the linkedServices resource
:type workspace_name: str
:param linked_service_name: Name of the linkedServices resource
:type linked_service_name: str
:param resource_id: The resource id of the resource that will be
linked to the workspace.
:type resource_id: str
:param tags: Resource tags
:type tags: dict
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: :class:`LinkedService
<azure.mgmt.loganalytics.models.LinkedService>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: :class:`LinkedService
<azure.mgmt.loganalytics.models.LinkedService>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
parameters = models.LinkedService(tags=tags, resource_id=resource_id)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'LinkedService')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LinkedService', response)
if response.status_code == 201:
deserialized = self._deserialize('LinkedService', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete(
self, resource_group_name, workspace_name, linked_service_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a linked service instance.
:param resource_group_name: The name of the resource group to get. The
name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of the Log Analytics Workspace that
contains the linkedServices resource
:type workspace_name: str
:param linked_service_name: Name of the linked service.
:type linked_service_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: None or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def get(
self, resource_group_name, workspace_name, linked_service_name, custom_headers=None, raw=False, **operation_config):
"""Gets a linked service instance.
:param resource_group_name: The name of the resource group to get. The
name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of the Log Analytics Workspace that
contains the linkedServices resource
:type workspace_name: str
:param linked_service_name: Name of the linked service.
:type linked_service_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: :class:`LinkedService
<azure.mgmt.loganalytics.models.LinkedService>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>` if
raw=true
:rtype: :class:`LinkedService
<azure.mgmt.loganalytics.models.LinkedService>` or
:class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices/{linkedServiceName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'linkedServiceName': self._serialize.url("linked_service_name", linked_service_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LinkedService', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def list_by_workspace(
self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config):
"""Gets the linked services instances in a workspace.
:param resource_group_name: The name of the resource group to get. The
name is case insensitive.
:type resource_group_name: str
:param workspace_name: Name of the Log Analytics Workspace that
contains the linked services.
:type workspace_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of :class:`LinkedService
<azure.mgmt.loganalytics.models.LinkedService>`
:rtype: :class:`LinkedServicePaged
<azure.mgmt.loganalytics.models.LinkedServicePaged>`
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/Microsoft.OperationalInsights/workspaces/{workspaceName}/linkedServices'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.LinkedServicePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.LinkedServicePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| mit |
TRESCLOUD/odoopub | addons/hw_scanner/__init__.py | 1894 | 1075 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jittat/cafe-grader-web | lib/assets/Lib/browser/local_storage.py | 6 | 2965 | # local storage in browser
import sys
from javascript import JSObject
from browser import window, console
has_local_storage = hasattr(window, 'localStorage')
class __UnProvided():
pass
class LocalStorage():
storage_type = "local_storage"
def __init__(self):
if not has_local_storage:
raise EnvironmentError("LocalStorage not available")
self.store = JSObject(window.localStorage)
def __delitem__(self, key):
if (not isinstance(key, str)):
raise TypeError("key must be string")
if key not in self:
raise KeyError(key)
self.store.removeItem(key)
def __getitem__(self, key):
if (not isinstance(key, str)):
raise TypeError("key must be string")
res = __BRYTHON__.JSObject(self.store.getItem(key))
if res is not None:
return res
raise KeyError(key)
def __setitem__(self, key, value):
if (not isinstance(key, str)):
raise TypeError("key must be string")
if (not isinstance(value, str)):
raise TypeError("value must be string")
self.store.setItem(key, value)
# implement "in" functionality
def __contains__(self, key):
if (not isinstance(key, str)):
raise TypeError("key must be string")
res = __BRYTHON__.JSObject(self.store.getItem(key))
if res is None:
return False
return True
def __iter__(self):
keys = self.keys()
return keys.__iter__()
def get(self, key, default=None):
if (not isinstance(key, str)):
raise TypeError("key must be string")
return __BRYTHON__.JSObject(self.store.getItem(key)) or default
def pop(self, key, default=__UnProvided()):
if (not isinstance(key, str)):
raise TypeError("key must be string")
if type(default) is __UnProvided:
ret = self.get(key)
del self[key] # will throw key error if doesn't exist
return ret
else:
if key in self:
ret = self.get(key)
del self[key]
return ret
else:
return default
# while a real dict provides a view, returning a generator would less helpful than simply returning a list
# and creating a custom iterator is overkill and would likely result in slower performance
def keys(self):
return [__BRYTHON__.JSObject(self.store.key(i)) for i in range(self.store.length)]
def values(self):
return [__BRYTHON__.JSObject(self.__getitem__(k)) for k in self.keys()]
def items(self):
return list(zip(self.keys(), self.values()))
def clear(self):
self.store.clear()
def __len__(self):
return self.store.length
if has_local_storage:
storage = LocalStorage()
| mit |
awkspace/ansible | lib/ansible/modules/cloud/google/gcp_dns_resource_record_set_facts.py | 5 | 4885 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_dns_resource_record_set_facts
description:
- Gather facts for GCP ResourceRecordSet
short_description: Gather facts for GCP ResourceRecordSet
version_added: 2.8
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
managed_zone:
description:
- Identifies the managed zone addressed by this request.
- 'This field represents a link to a ManagedZone resource in GCP. It can be specified
in two ways. First, you can place in the name of the resource here as a string
Alternatively, you can add `register: name-of-resource` to a gcp_dns_managed_zone
task and then set this managed_zone field to "{{ name-of-resource }}"'
required: true
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: a resource record set facts
gcp_dns_resource_record_set_facts:
managed_zone: "{{ managed_zone }}"
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
'''
RETURN = '''
items:
description: List of items
returned: always
type: complex
contains:
name:
description:
- For example, U(www.example.com.)
returned: success
type: str
type:
description:
- One of valid DNS resource types.
returned: success
type: str
ttl:
description:
- Number of seconds that this ResourceRecordSet can be cached by resolvers.
returned: success
type: int
target:
description:
- As defined in RFC 1035 (section 5) and RFC 1034 (section 3.6.1) .
returned: success
type: list
managed_zone:
description:
- Identifies the managed zone addressed by this request.
returned: success
type: str
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, replace_resource_dict
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict(managed_zone=dict(required=True)))
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/ndev.clouddns.readwrite']
items = fetch_list(module, collection(module))
if items.get('rrsets'):
items = items.get('rrsets')
else:
items = []
return_value = {'items': items}
module.exit_json(**return_value)
def collection(module):
res = {'project': module.params['project'], 'managed_zone': replace_resource_dict(module.params['managed_zone'], 'name')}
return "https://www.googleapis.com/dns/v1/projects/{project}/managedZones/{managed_zone}/changes".format(**res)
def fetch_list(module, link):
auth = GcpSession(module, 'dns')
response = auth.get(link)
return return_if_object(module, response)
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| gpl-3.0 |
arvindks/kle | covariance/covariance.py | 1 | 8799 |
import numpy as np
from exceptions import NotImplementedError, KeyError
class _Residual:
def __init__(self):
self.res = []
def __call__(self, rk):
self.res.append(rk)
def itercount(self):
return len(self.res)
def clear(self):
self.res = []
class CovarianceMatrix:
"""
Implementation of covariance matrix corresponding to covariance kernel
Parameters:
-----------
method: string, {'Dense','FFT','Hmatrix}
decides what method to invoke
pts: n x dim
Location of the points
kernel: Kernel object
See covariance/kernel.py
nugget: double, optional. default = 0.
A hack to improve the convergence of the iterative solver when inverting for the covariance matrix. See solve() for more details.
Attributes:
-----------
shape: (N,N)
N is the size of matrix
P: NxN csr_matrix
corresponding to the preconditioner
Methods:
--------
matvec()
rmatvec()
reset()
itercount()
build_preconditioner()
solve()
Notes:
------
Implementation of three different methods
1. Dense Matrix
2. FFT based operations if kernel is stationary or translation invariant and points are on a regular grid
3. Hierarchical Matrix - works for arbitrary kernels on irregular grids
Details of this implementation (including errors and benchmarking) are provided in chapter 2 in [1]. For details on the algorithms see references within.
Compatible with scipy.sparse.LinearOperator. For example, Q = CovarianceMatrix(...); Qop = aslineroperator(Q)
References:
----------
.. [1] A.K. Saibaba, Fast solvers for geostatistical inverse problems and uncertainty quantification, PhD Thesis 2013, Stanford University
Examples:
---------
import numpy as np
pts = np.random.randn(1000,2)
def kernel(r): return np.exp(-r)
Qd = CovarianceMatrix('Dense', pts, kernel)
Qh = CovarianceMatrix('Hmatrix', pts, kernel, rkmax = 32, eps = 1.e-6)
"""
def __init__(self, method, pts, kernel, nugget = 0.0, **kwargs):
self.method = method
self.kernel = kernel
self.pts = pts
try:
verbose = kwargs['verbose']
except KeyError:
verbose = False
self.verbose = verbose
if method == 'Dense':
from dense import GenerateDenseMatrix
self.mat = GenerateDenseMatrix(pts, kernel)
self.pts = pts
elif method == 'FFT':
from toeplitz import CreateRow, ToeplitzProduct
xmin = kwargs['xmin']
xmax = kwargs['xmax']
N = kwargs['N']
theta = kwargs['theta']
self.N = N
self.row, pts = CreateRow(xmin,xmax,N,kernel,theta)
self.pts = pts
elif method == 'Hmatrix':
from hmatrix import Hmatrix
n = np.size(pts,0)
ind = np.arange(n)
rkmax = 32 if 'rkmax' not in kwargs else kwargs['rkmax']
eps = 1.e-9 if 'eps' not in kwargs else kwargs['eps']
self.H = Hmatrix(pts, kernel, ind, verbose = verbose, rkmax = rkmax, eps = eps)
else:
raise NotImplementedError
self.P = None
n = pts.shape[0]
self.shape = (n,n)
self.nugget = nugget
self.dtype = 'd'
self.count = 0
self.solvmatvecs = 0
def matvec(self, x):
"""
Computes the matrix-vector product
Parameters:
-----------
x: (n,) ndarray
a vector of size n
Returns:
-------
y: (n,) ndarray
Notes:
------
The result of this calculation are dependent on the method chosen. All methods except 'Hmatrix' are exact.
"""
method = self.method
if method == 'Dense':
y = np.dot(self.mat,x)
elif method == 'FFT':
y = ToeplitzProduct(x, self.row, self.N)
elif method == 'Hmatrix':
y = np.zeros_like(x, dtype = 'd')
self.H.mult(x, y, self.verbose)
y += self.nugget*y
self.count += 1
return y
def rmatvec(self, x):
"""
Computes the matrix transpose-vector product
Parameters:
-----------
x: (n,) ndarray
a vector of size n
Returns:
-------
y: (n,) ndarray
Notes:
------
the result of this calculation are dependent on the method chosen. All methods except 'Hmatrix' are exact.
Because of symmetry it is almost the same as matvec, except 'Hmatrix' which is numerically different
"""
method = self.method
if method == 'Dense':
y = np.dot(self.mat.T,x)
elif method == 'FFT':
y = ToeplitzProduct(x, self.row, self.N)
elif method == 'Hmatrix':
y = np.zeros_like(x, dtype = 'd')
self.H.transpmult(x, y, self.verbose)
y += self.nugget*y
self.count += 1
return y
def reset(self):
"""
Resets the counter of matvecs and solves
"""
self.count = 0
self.solvmatvecs = 0
return
def itercount(self):
"""
Returns the counter of matvecs
"""
return self.count
def build_preconditioner(self, k = 100):
"""
Implementation of the preconditioner based on changing basis.
Parameters:
-----------
k: int, optional. default = 100
Number of local centers in the preconditioner. Controls the sparity of the preconditioner.
Notes:
------
Implementation of the preconditioner based on local centers.
The parameter k controls the sparsity and the effectiveness of the preconditioner.
Larger k is more expensive but results in fewer iterations.
For large ill-conditioned systems, it was best to use a nugget effect to make the problem better conditioned.
To Do: implementation based on local centers and additional points. Will remove the hack of using nugget effect.
References:
-----------
"""
from time import time
from scipy.spatial import cKDTree
from scipy.spatial.distance import pdist, cdist
from scipy.linalg import solve
from scipy.sparse import csr_matrix
#If preconditioner already exists, then nothing to do
if self.P != None: return
pts = self.pts
kernel = self.kernel
N = pts.shape[0]
#Build the tree
start = time()
tree = cKDTree(pts, leafsize = 32)
end = time()
if self.verbose:
print "Tree building time = %g" % (end-start)
#Find the nearest neighbors of all the points
start = time()
dist, ind = tree.query(pts,k = k)
end = time()
if self.verbose:
print "Nearest neighbor computation time = %g" % (end-start)
Q = np.zeros((k,k),dtype='d')
y = np.zeros((k,1),dtype='d')
row = np.tile(np.arange(N), (k,1)).transpose()
col = np.copy(ind)
nu = np.zeros((N,k),dtype='d')
y[0] = 1.
start = time()
for i in np.arange(N):
Q = kernel(cdist(pts[ind[i,:],:],pts[ind[i,:],:]))
nui = np.linalg.solve(Q,y)
nu[i,:] = np.copy(nui.transpose())
end = time()
if self.verbose: print "Building preconditioner took = %g" % (end-start)
ij = np.zeros((N*k,2), dtype = 'i')
ij[:,0] = np.copy(np.reshape(row,N*k,order='F').transpose() )
ij[:,1] = np.copy(np.reshape(col,N*k,order='F').transpose() )
data = np.copy(np.reshape(nu,N*k,order='F').transpose())
self.P = csr_matrix((data,ij.transpose()),shape=(N,N), dtype = 'd')
return
def solve(self, b, maxiter = 1000, tol = 1.e-10):
"""
Compute Q^{-1}b
Parameters:
-----------
b: (n,) ndarray
given right hand side
maxiter: int, optional. default = 1000
Maximum number of iterations for the linear solver
tol: float, optional. default = 1.e-10
Residual stoppingtolerance for the iterative solver
Notes:
------
If 'Dense' then inverts using LU factorization otherwise uses iterative solver
- MINRES if used without preconditioner or GMRES with preconditioner.
Preconditioner is not guaranteed to be positive definite.
"""
if self.method == 'Dense':
from scipy.linalg import solve
x = solve(self.mat, b)
else:
from scipy.sparse.linalg import gmres, aslinearoperator, minres
P = self.P
Aop = aslinearoperator(self)
residual = _Residual()
if P != None:
x, info = gmres(Aop, b, tol = tol, restart = 30, maxiter = 1000, callback = residual, M = P)
else:
x, info = minres(Aop, b, tol = tol, maxiter = maxiter, callback = residual )
self.solvmatvecs += residual.itercount()
if self.verbose:
print "Number of iterations is %g and status is %g"% (residual.itercount(), info)
return x
if __name__ == '__main__':
n = 5000
pts = np.random.rand(n, 2)
def kernel(R):
return np.exp(-R)
Q = CovarianceMatrix('Hmatrix',pts,kernel,verbose = True, nugget = 1.e-4)
x = np.ones((n,), dtype = 'd')
y = Q.matvec(x)
Q.BuildPreconditioner(k = 30, view = False)
Q.verbose = False
xd = Q.solve(y)
print np.linalg.norm(x-xd)/np.linalg.norm(x)
| gpl-3.0 |
zaccoz/odoo | addons/portal_project_issue/tests/test_access_rights.py | 338 | 10547 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.portal_project.tests.test_access_rights import TestPortalProjectBase
from openerp.exceptions import AccessError
from openerp.osv.orm import except_orm
from openerp.tools import mute_logger
class TestPortalProjectBase(TestPortalProjectBase):
def setUp(self):
super(TestPortalProjectBase, self).setUp()
cr, uid = self.cr, self.uid
# Useful models
self.project_issue = self.registry('project.issue')
# Various test issues
self.issue_1_id = self.project_issue.create(cr, uid, {
'name': 'Test1', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_2_id = self.project_issue.create(cr, uid, {
'name': 'Test2', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_3_id = self.project_issue.create(cr, uid, {
'name': 'Test3', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_4_id = self.project_issue.create(cr, uid, {
'name': 'Test4', 'user_id': self.user_projectuser_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_5_id = self.project_issue.create(cr, uid, {
'name': 'Test5', 'user_id': self.user_portal_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
self.issue_6_id = self.project_issue.create(cr, uid, {
'name': 'Test6', 'user_id': self.user_public_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True})
class TestPortalIssue(TestPortalProjectBase):
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_00_project_access_rights(self):
""" Test basic project access rights, for project and portal_project """
cr, uid, pigs_id = self.cr, self.uid, self.project_pigs_id
# ----------------------------------------
# CASE1: public project
# ----------------------------------------
# Do: Alfred reads project -> ok (employee ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# Test: all project issues readable
self.project_issue.read(cr, self.user_projectuser_id, issue_ids, ['name'])
# Test: all project issues writable
self.project_issue.write(cr, self.user_projectuser_id, issue_ids, {'description': 'TestDescription'})
# Do: Bert reads project -> crash, no group
# Test: no project issue visible
self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)])
# Test: no project issue readable
self.assertRaises(AccessError, self.project_issue.read, cr, self.user_none_id, issue_ids, ['name'])
# Test: no project issue writable
self.assertRaises(AccessError, self.project_issue.write, cr, self.user_none_id, issue_ids, {'description': 'TestDescription'})
# Do: Chell reads project -> ok (portal ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# Test: all project issues readable
self.project_issue.read(cr, self.user_portal_id, issue_ids, ['name'])
# Test: no project issue writable
self.assertRaises(AccessError, self.project_issue.write, cr, self.user_portal_id, issue_ids, {'description': 'TestDescription'})
# Do: Donovan reads project -> ok (public ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_public_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a public project')
# ----------------------------------------
# CASE2: portal project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'portal'})
# Do: Alfred reads project -> ok (employee ok public)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of a portal project')
# Do: Bert reads project -> crash, no group
# Test: no project issue searchable
self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)])
# Data: issue follower
self.project_issue.message_subscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id])
# Do: Chell reads project -> ok (portal ok public)
# Test: only followed project issues visible + assigned
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: portal user should see the followed issues of a portal project')
# Data: issue follower cleaning
self.project_issue.message_unsubscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id])
# ----------------------------------------
# CASE3: employee project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'employees'})
# Do: Alfred reads project -> ok (employee ok employee)
# Test: all project issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: project user cannot see all issues of an employees project')
# Do: Chell reads project -> ko (portal ko employee)
# Test: no project issue visible + assigned
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
self.assertFalse(issue_ids, 'access rights: portal user should not see issues of an employees project, even if assigned')
# ----------------------------------------
# CASE4: followers project
# ----------------------------------------
self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'followers'})
# Do: Alfred reads project -> ko (employee ko followers)
# Test: no project issue visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_4_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see issues of a not-followed followers project, only assigned')
# Do: Chell reads project -> ko (portal ko employee)
# Test: no project issue visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: portal user should not see issues of a not-followed followers project, only assigned')
# Data: subscribe Alfred, Chell and Donovan as follower
self.project_project.message_subscribe_users(cr, uid, [pigs_id], [self.user_projectuser_id, self.user_portal_id, self.user_public_id])
self.project_issue.message_subscribe_users(cr, self.user_manager_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id, self.user_projectuser_id])
# Do: Alfred reads project -> ok (follower ok followers)
# Test: followed + assigned issues visible
issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_4_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see followed + assigned issues of a follower project')
# Do: Chell reads project -> ok (follower ok follower)
# Test: followed + assigned issues visible
issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_5_id])
self.assertEqual(set(issue_ids), test_issue_ids,
'access rights: employee user should not see followed + assigned issues of a follower project')
| agpl-3.0 |
QinerTech/QinerApps | openerp/addons/account/wizard/account_reconcile.py | 12 | 4599 | from openerp import models, fields, api, _
from openerp.tools.float_utils import float_round
class AccountMoveLineReconcile(models.TransientModel):
"""
Account move line reconcile wizard, it checks for the write off the reconcile entry or directly reconcile.
"""
_name = 'account.move.line.reconcile'
_description = 'Account move line reconcile'
trans_nbr = fields.Integer(string='# of Transaction', readonly=True)
credit = fields.Float(string='Credit amount', readonly=True, digits=0)
debit = fields.Float(string='Debit amount', readonly=True, digits=0)
writeoff = fields.Float(string='Write-Off amount', readonly=True, digits=0)
company_id = fields.Many2one('res.company', string='Company', required=True, default=lambda self: self.env.user.company_id)
@api.model
def default_get(self, fields):
res = super(AccountMoveLineReconcile, self).default_get(fields)
data = self.trans_rec_get()
if 'trans_nbr' in fields:
res.update({'trans_nbr': data['trans_nbr']})
if 'credit' in fields:
res.update({'credit': data['credit']})
if 'debit' in fields:
res.update({'debit': data['debit']})
if 'writeoff' in fields:
res.update({'writeoff': data['writeoff']})
return res
@api.multi
def trans_rec_get(self):
context = self._context or {}
credit = debit = 0
lines = self.env['account.move.line'].browse(context.get('active_ids', []))
for line in lines:
if not line.reconciled:
credit += line.credit
debit += line.debit
precision = self.env.user.company_id.currency_id.decimal_places
writeoff = float_round(debit - credit, precision_digits=precision)
credit = float_round(credit, precision_digits=precision)
debit = float_round(debit, precision_digits=precision)
return {'trans_nbr': len(lines), 'credit': credit, 'debit': debit, 'writeoff': writeoff}
@api.multi
def trans_rec_addendum_writeoff(self):
return self.env['account.move.line.reconcile.writeoff'].trans_rec_addendum()
@api.multi
def trans_rec_reconcile_partial_reconcile(self):
return self.env['account.move.line.reconcile.writeoff'].trans_rec_reconcile_partial()
@api.multi
def trans_rec_reconcile_full(self):
move_lines = self.env['account.move.line'].browse(self._context.get('active_ids', []))
move_lines.reconcile()
return {'type': 'ir.actions.act_window_close'}
class AccountMoveLineReconcileWriteoff(models.TransientModel):
"""
It opens the write off wizard form, in that user can define the journal, account, analytic account for reconcile
"""
_name = 'account.move.line.reconcile.writeoff'
_description = 'Account move line reconcile (writeoff)'
journal_id = fields.Many2one('account.journal', string='Write-Off Journal', required=True)
writeoff_acc_id = fields.Many2one('account.account', string='Write-Off account', required=True, domain=[('deprecated', '=', False)])
date_p = fields.Date(string='Date', default=fields.Date.context_today)
comment = fields.Char(required=True, default='Write-off')
analytic_id = fields.Many2one('account.analytic.account', string='Analytic Account', domain=[('account_type', '=', 'normal')])
@api.multi
def trans_rec_addendum(self):
view = self.env.ref('account.account_move_line_reconcile_writeoff')
return {
'name': _('Reconcile Writeoff'),
'context': self._context,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'account.move.line.reconcile.writeoff',
'views': [(view.id, 'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
@api.multi
def trans_rec_reconcile_partial(self):
context = self._context or {}
self.env['account.move.line'].browse(context.get('active_ids', [])).reconcile()
return {'type': 'ir.actions.act_window_close'}
@api.multi
def trans_rec_reconcile(self):
context = dict(self._context or {})
context['date_p'] = self.date_p
context['comment'] = self.comment
if self.analytic_id:
context['analytic_id'] = self.analytic_id.id
move_lines = self.env['account.move.line'].browse(self._context.get('active_ids', []))
move_lines.with_context(context).reconcile(self.writeoff_acc_id, self.journal_id)
return {'type': 'ir.actions.act_window_close'}
| gpl-3.0 |
keishi/chromium | chrome/test/functional/prefetch.py | 79 | 4170 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This functional test spawns a web server, and runs chrome to point
at that web server.
The content served contains prefetch requests, and the tests assert that the
webserver logs reflect that.
Run like any functional test:
$ python chrome/test/functional/prefetch.py
in a repo with a built pyautolib
The import of multiprocessing implies python 2.6 is required
"""
import os
import time
import multiprocessing
import Queue
import string
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import pyauto_functional # Must be imported before pyauto
import pyauto
# this class handles IPC retrieving server "logs" from our integral
# server. Each test should clear() the log, and then run asserts on
# the retrieval list.
# at startup, the server puts an int in the queue which is its port,
# we store that for subsequent tests
class ServerLog:
def clear(self):
self.log = {}
def __init__(self,queue):
self.clear()
self.port = None
self.queue = queue
def _readQueue(self):
try:
while True:
queueval = self.queue.get(False)
if isinstance(queueval,int):
self.port = queueval
else:
self.log[queueval] = True
except Queue.Empty:
return
def getPort(self):
if not self.port:
self._readQueue()
return self.port
def isRetrieved(self,path):
self._readQueue()
try:
return self.log[path]
except KeyError:
return None
#
# The next few classes run a simple web server that returns log information
# via a multiprocessing.Queue.
#
class AbstractPrefetchServerHandler(BaseHTTPRequestHandler):
content = {
"prefetch-origin.html":
(200, """<html><head>
<link rel="prefetch" href="static-prefetch-target.html">
<script type="text/javascript">
function changeParagraph()
{
var newPara = document.createElement("p");
newPara.innerHTML =
"<link rel=\\"prefetch\\" href=\\"dynamic-prefetch-target.html\\">" +
"<p>This paragraph contains a dynamic link prefetch. " +
"The target of this prefetch is " +
"<a href=\\"dynamic-prefetch-target.html\\">this document.</a>";
var para = document.getElementById("p1");
document.body.insertBefore(newPara,para);
}
</script>
</head>
<body onload="changeParagraph()">
<p id="p1">This is a document that contains a link prefetch. The target of
that prefetch is <a href="static-prefetch-target.html">this document.</a>
</body>"""),
"static-prefetch-target.html":
(200, "<html><head></head><body>empty</body>"),
"dynamic-prefetch-target.html":
(200, "<html><head></head><body>empty</body>")}
def do_GET(self):
self.queue.put(self.path[1:])
try:
response_code, response = self.content[self.path[1:]]
self.send_response(response_code)
self.end_headers()
self.wfile.write(response)
except KeyError:
self.send_response(404)
self.end_headers()
def run_web_server(queue_arg):
class PrefetchServerHandler(AbstractPrefetchServerHandler):
queue = queue_arg
server = HTTPServer(('',0), PrefetchServerHandler)
queue.put(server.server_port)
server.serve_forever()
#
# Here's the test itself
#
queue = multiprocessing.Queue()
server_log = ServerLog(queue)
class PrefetchTest(pyauto.PyUITest):
"""Testcase for Prefetching"""
def testBasic(self):
server_log.clear()
url = "http://localhost:%d/prefetch-origin.html" % server_log.getPort()
self.NavigateToURL(url)
self.assertEqual(True, server_log.isRetrieved("prefetch-origin.html"))
time.sleep(0.1) # required since prefetches occur after onload
self.assertEqual(True, server_log.isRetrieved(
"static-prefetch-target.html"))
self.assertEqual(True, server_log.isRetrieved(
"dynamic-prefetch-target.html"))
if __name__ == '__main__':
web_server = multiprocessing.Process(target=run_web_server,args=(queue,))
web_server.daemon = True
web_server.start()
pyauto_functional.Main()
| bsd-3-clause |
KontorConsulting/odoo | addons/mrp/stock.py | 166 | 19075 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields
from openerp.osv import osv
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, float_compare
class StockMove(osv.osv):
_inherit = 'stock.move'
_columns = {
'production_id': fields.many2one('mrp.production', 'Production Order for Produced Products', select=True, copy=False),
'raw_material_production_id': fields.many2one('mrp.production', 'Production Order for Raw Materials', select=True),
'consumed_for': fields.many2one('stock.move', 'Consumed for', help='Technical field used to make the traceability of produced products'),
}
def check_tracking(self, cr, uid, move, lot_id, context=None):
super(StockMove, self).check_tracking(cr, uid, move, lot_id, context=context)
if move.product_id.track_production and (move.location_id.usage == 'production' or move.location_dest_id.usage == 'production') and not lot_id:
raise osv.except_osv(_('Warning!'), _('You must assign a serial number for the product %s') % (move.product_id.name))
if move.raw_material_production_id and move.location_dest_id.usage == 'production' and move.raw_material_production_id.product_id.track_production and not move.consumed_for:
raise osv.except_osv(_('Warning!'), _("Because the product %s requires it, you must assign a serial number to your raw material %s to proceed further in your production. Please use the 'Produce' button to do so.") % (move.raw_material_production_id.product_id.name, move.product_id.name))
# TODO master: remove me, no longer used
def _check_phantom_bom(self, cr, uid, move, context=None):
"""check if product associated to move has a phantom bom
return list of ids of mrp.bom for that product """
user_company = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id
#doing the search as SUPERUSER because a user with the permission to write on a stock move should be able to explode it
#without giving him the right to read the boms.
domain = [
'|', ('product_id', '=', move.product_id.id),
'&', ('product_id', '=', False), ('product_tmpl_id.product_variant_ids', '=', move.product_id.id),
('type', '=', 'phantom'),
'|', ('date_start', '=', False), ('date_start', '<=', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),
'|', ('date_stop', '=', False), ('date_stop', '>=', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),
('company_id', '=', user_company)]
return self.pool.get('mrp.bom').search(cr, SUPERUSER_ID, domain, context=context)
def _action_explode(self, cr, uid, move, context=None):
""" Explodes pickings.
@param move: Stock moves
@return: True
"""
if context is None:
context = {}
bom_obj = self.pool.get('mrp.bom')
move_obj = self.pool.get('stock.move')
prod_obj = self.pool.get("product.product")
proc_obj = self.pool.get("procurement.order")
uom_obj = self.pool.get("product.uom")
to_explode_again_ids = []
property_ids = context.get('property_ids') or []
bis = bom_obj._bom_find(cr, SUPERUSER_ID, product_id=move.product_id.id, properties=property_ids)
bom_point = bom_obj.browse(cr, SUPERUSER_ID, bis, context=context)
if bis and bom_point.type == 'phantom':
processed_ids = []
factor = uom_obj._compute_qty(cr, SUPERUSER_ID, move.product_uom.id, move.product_uom_qty, bom_point.product_uom.id) / bom_point.product_qty
res = bom_obj._bom_explode(cr, SUPERUSER_ID, bom_point, move.product_id, factor, property_ids, context=context)
for line in res[0]:
product = prod_obj.browse(cr, uid, line['product_id'], context=context)
if product.type != 'service':
valdef = {
'picking_id': move.picking_id.id if move.picking_id else False,
'product_id': line['product_id'],
'product_uom': line['product_uom'],
'product_uom_qty': line['product_qty'],
'product_uos': line['product_uos'],
'product_uos_qty': line['product_uos_qty'],
'state': 'draft', #will be confirmed below
'name': line['name'],
'procurement_id': move.procurement_id.id,
'split_from': move.id, #Needed in order to keep sale connection, but will be removed by unlink
}
mid = move_obj.copy(cr, uid, move.id, default=valdef, context=context)
to_explode_again_ids.append(mid)
else:
if prod_obj.need_procurement(cr, uid, [product.id], context=context):
valdef = {
'name': move.rule_id and move.rule_id.name or "/",
'origin': move.origin,
'company_id': move.company_id and move.company_id.id or False,
'date_planned': move.date,
'product_id': line['product_id'],
'product_qty': line['product_qty'],
'product_uom': line['product_uom'],
'product_uos_qty': line['product_uos_qty'],
'product_uos': line['product_uos'],
'group_id': move.group_id.id,
'priority': move.priority,
'partner_dest_id': move.partner_id.id,
}
if move.procurement_id:
proc = proc_obj.copy(cr, uid, move.procurement_id.id, default=valdef, context=context)
else:
proc = proc_obj.create(cr, uid, valdef, context=context)
proc_obj.run(cr, uid, [proc], context=context) #could be omitted
#check if new moves needs to be exploded
if to_explode_again_ids:
for new_move in self.browse(cr, uid, to_explode_again_ids, context=context):
processed_ids.extend(self._action_explode(cr, uid, new_move, context=context))
if not move.split_from and move.procurement_id:
# Check if procurements have been made to wait for
moves = move.procurement_id.move_ids
if len(moves) == 1:
proc_obj.write(cr, uid, [move.procurement_id.id], {'state': 'done'}, context=context)
if processed_ids and move.state == 'assigned':
# Set the state of resulting moves according to 'assigned' as the original move is assigned
move_obj.write(cr, uid, list(set(processed_ids) - set([move.id])), {'state': 'assigned'}, context=context)
#delete the move with original product which is not relevant anymore
move_obj.unlink(cr, SUPERUSER_ID, [move.id], context=context)
#return list of newly created move
return processed_ids
return [move.id]
def action_confirm(self, cr, uid, ids, context=None):
move_ids = []
for move in self.browse(cr, uid, ids, context=context):
#in order to explode a move, we must have a picking_type_id on that move because otherwise the move
#won't be assigned to a picking and it would be weird to explode a move into several if they aren't
#all grouped in the same picking.
if move.picking_type_id:
move_ids.extend(self._action_explode(cr, uid, move, context=context))
else:
move_ids.append(move.id)
#we go further with the list of ids potentially changed by action_explode
return super(StockMove, self).action_confirm(cr, uid, move_ids, context=context)
def action_consume(self, cr, uid, ids, product_qty, location_id=False, restrict_lot_id=False, restrict_partner_id=False,
consumed_for=False, context=None):
""" Consumed product with specific quantity from specific source location.
@param product_qty: Consumed/produced product quantity (= in quantity of UoM of product)
@param location_id: Source location
@param restrict_lot_id: optionnal parameter that allows to restrict the choice of quants on this specific lot
@param restrict_partner_id: optionnal parameter that allows to restrict the choice of quants to this specific partner
@param consumed_for: optionnal parameter given to this function to make the link between raw material consumed and produced product, for a better traceability
@return: New lines created if not everything was consumed for this line
"""
if context is None:
context = {}
res = []
production_obj = self.pool.get('mrp.production')
if product_qty <= 0:
raise osv.except_osv(_('Warning!'), _('Please provide proper quantity.'))
#because of the action_confirm that can create extra moves in case of phantom bom, we need to make 2 loops
ids2 = []
for move in self.browse(cr, uid, ids, context=context):
if move.state == 'draft':
ids2.extend(self.action_confirm(cr, uid, [move.id], context=context))
else:
ids2.append(move.id)
prod_orders = set()
for move in self.browse(cr, uid, ids2, context=context):
prod_orders.add(move.raw_material_production_id.id or move.production_id.id)
move_qty = move.product_qty
if move_qty <= 0:
raise osv.except_osv(_('Error!'), _('Cannot consume a move with negative or zero quantity.'))
quantity_rest = move_qty - product_qty
# Compare with numbers of move uom as we want to avoid a split with 0 qty
quantity_rest_uom = move.product_uom_qty - self.pool.get("product.uom")._compute_qty_obj(cr, uid, move.product_id.uom_id, product_qty, move.product_uom)
if float_compare(quantity_rest_uom, 0, precision_rounding=move.product_uom.rounding) != 0:
new_mov = self.split(cr, uid, move, quantity_rest, context=context)
if move.production_id:
self.write(cr, uid, [new_mov], {'production_id': move.production_id.id}, context=context)
res.append(new_mov)
vals = {'restrict_lot_id': restrict_lot_id,
'restrict_partner_id': restrict_partner_id,
'consumed_for': consumed_for}
if location_id:
vals.update({'location_id': location_id})
self.write(cr, uid, [move.id], vals, context=context)
# Original moves will be the quantities consumed, so they need to be done
self.action_done(cr, uid, ids2, context=context)
if res:
self.action_assign(cr, uid, res, context=context)
if prod_orders:
production_obj.signal_workflow(cr, uid, list(prod_orders), 'button_produce')
return res
def action_scrap(self, cr, uid, ids, product_qty, location_id, restrict_lot_id=False, restrict_partner_id=False, context=None):
""" Move the scrap/damaged product into scrap location
@param product_qty: Scraped product quantity
@param location_id: Scrap location
@return: Scraped lines
"""
res = []
production_obj = self.pool.get('mrp.production')
for move in self.browse(cr, uid, ids, context=context):
new_moves = super(StockMove, self).action_scrap(cr, uid, [move.id], product_qty, location_id,
restrict_lot_id=restrict_lot_id,
restrict_partner_id=restrict_partner_id, context=context)
#If we are not scrapping our whole move, tracking and lot references must not be removed
production_ids = production_obj.search(cr, uid, [('move_lines', 'in', [move.id])])
for prod_id in production_ids:
production_obj.signal_workflow(cr, uid, [prod_id], 'button_produce')
if move.production_id.id:
self.write(cr, uid, new_moves, {'production_id': move.production_id.id}, context=context)
res.append(new_moves)
return res
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
res = super(StockMove, self).write(cr, uid, ids, vals, context=context)
from openerp import workflow
if vals.get('state') == 'assigned':
moves = self.browse(cr, uid, ids, context=context)
orders = list(set([x.raw_material_production_id.id for x in moves if x.raw_material_production_id and x.raw_material_production_id.state == 'confirmed']))
for order_id in orders:
if self.pool.get('mrp.production').test_ready(cr, uid, [order_id]):
workflow.trg_validate(uid, 'mrp.production', order_id, 'moves_ready', cr)
return res
class stock_warehouse(osv.osv):
_inherit = 'stock.warehouse'
_columns = {
'manufacture_to_resupply': fields.boolean('Manufacture in this Warehouse',
help="When products are manufactured, they can be manufactured in this warehouse."),
'manufacture_pull_id': fields.many2one('procurement.rule', 'Manufacture Rule'),
}
_defaults = {
'manufacture_to_resupply': True,
}
def _get_manufacture_pull_rule(self, cr, uid, warehouse, context=None):
route_obj = self.pool.get('stock.location.route')
data_obj = self.pool.get('ir.model.data')
try:
manufacture_route_id = data_obj.get_object_reference(cr, uid, 'mrp', 'route_warehouse0_manufacture')[1]
except:
manufacture_route_id = route_obj.search(cr, uid, [('name', 'like', _('Manufacture'))], context=context)
manufacture_route_id = manufacture_route_id and manufacture_route_id[0] or False
if not manufacture_route_id:
raise osv.except_osv(_('Error!'), _('Can\'t find any generic Manufacture route.'))
return {
'name': self._format_routename(cr, uid, warehouse, _(' Manufacture'), context=context),
'location_id': warehouse.lot_stock_id.id,
'route_id': manufacture_route_id,
'action': 'manufacture',
'picking_type_id': warehouse.int_type_id.id,
'propagate': False,
'warehouse_id': warehouse.id,
}
def create_routes(self, cr, uid, ids, warehouse, context=None):
pull_obj = self.pool.get('procurement.rule')
res = super(stock_warehouse, self).create_routes(cr, uid, ids, warehouse, context=context)
if warehouse.manufacture_to_resupply:
manufacture_pull_vals = self._get_manufacture_pull_rule(cr, uid, warehouse, context=context)
manufacture_pull_id = pull_obj.create(cr, uid, manufacture_pull_vals, context=context)
res['manufacture_pull_id'] = manufacture_pull_id
return res
def write(self, cr, uid, ids, vals, context=None):
pull_obj = self.pool.get('procurement.rule')
if isinstance(ids, (int, long)):
ids = [ids]
if 'manufacture_to_resupply' in vals:
if vals.get("manufacture_to_resupply"):
for warehouse in self.browse(cr, uid, ids, context=context):
if not warehouse.manufacture_pull_id:
manufacture_pull_vals = self._get_manufacture_pull_rule(cr, uid, warehouse, context=context)
manufacture_pull_id = pull_obj.create(cr, uid, manufacture_pull_vals, context=context)
vals['manufacture_pull_id'] = manufacture_pull_id
else:
for warehouse in self.browse(cr, uid, ids, context=context):
if warehouse.manufacture_pull_id:
pull_obj.unlink(cr, uid, warehouse.manufacture_pull_id.id, context=context)
return super(stock_warehouse, self).write(cr, uid, ids, vals, context=None)
def get_all_routes_for_wh(self, cr, uid, warehouse, context=None):
all_routes = super(stock_warehouse, self).get_all_routes_for_wh(cr, uid, warehouse, context=context)
if warehouse.manufacture_to_resupply and warehouse.manufacture_pull_id and warehouse.manufacture_pull_id.route_id:
all_routes += [warehouse.manufacture_pull_id.route_id.id]
return all_routes
def _handle_renaming(self, cr, uid, warehouse, name, code, context=None):
res = super(stock_warehouse, self)._handle_renaming(cr, uid, warehouse, name, code, context=context)
pull_obj = self.pool.get('procurement.rule')
#change the manufacture pull rule name
if warehouse.manufacture_pull_id:
pull_obj.write(cr, uid, warehouse.manufacture_pull_id.id, {'name': warehouse.manufacture_pull_id.name.replace(warehouse.name, name, 1)}, context=context)
return res
def _get_all_products_to_resupply(self, cr, uid, warehouse, context=None):
res = super(stock_warehouse, self)._get_all_products_to_resupply(cr, uid, warehouse, context=context)
if warehouse.manufacture_pull_id and warehouse.manufacture_pull_id.route_id:
for product_id in res:
for route in self.pool.get('product.product').browse(cr, uid, product_id, context=context).route_ids:
if route.id == warehouse.manufacture_pull_id.route_id.id:
res.remove(product_id)
break
return res
| agpl-3.0 |
ninuxorg/nodeshot | nodeshot/interop/oldimporter/db.py | 5 | 2135 | """
Django database routers are described here:
https://docs.djangoproject.com/en/dev/topics/db/multi-db/#using-routers
"""
class DefaultRouter(object):
def db_for_read(self, model, **hints):
"""
Reads from nodeshot2 db
"""
if model._meta.app_label != 'oldimporter':
return 'default'
return None
def db_for_write(self, model, **hints):
"""
Writes to nodeshot2 db
"""
if model._meta.app_label != 'oldimporter':
return 'default'
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed between nodeshot2 objects only
"""
if obj1._meta.app_label != 'oldimporter' and obj2._meta.app_label != 'oldimporter':
return True
return None
def allow_migrate(self, db, model):
"""
Make sure the old_nodeshot app only appears in the 'old_nodeshot' database
"""
if db != 'old_nodeshot' and model._meta.app_label != 'oldimporter':
return True
return None
class OldNodeshotRouter(object):
def db_for_read(self, model, **hints):
"""
Reads old nodeshot models from old_nodeshot db.
"""
if model._meta.app_label == 'oldimporter':
return 'old_nodeshot'
return None
def db_for_write(self, model, **hints):
"""
Writes not allowed
"""
if model._meta.app_label == 'oldimporter':
return 'old_nodeshot'
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Relations between objects are allowed between old_nodeshot objects only
"""
if obj1._meta.app_label == 'oldimporter' and obj2._meta.app_label == 'oldimporter':
return True
return None
def allow_migrate(self, db, model):
"""
Make sure the old_nodeshot app only appears in the 'old_nodeshot' database
"""
if db != 'old_nodeshot' or model._meta.app_label != 'oldimporter':
return False
return True
| gpl-3.0 |
mkrautz/gyp-libmumble | test/generator-output/gyptest-relocate.py | 216 | 1670 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that a project hierarchy created with the --generator-output=
option can be built even when it's relocated to a different path.
"""
import TestGyp
# Android doesn't support --generator-output.
test = TestGyp.TestGyp(formats=['!android'])
test.writable(test.workpath('src'), False)
test.run_gyp('prog1.gyp',
'-Dset_symroot=1',
'--generator-output=' + test.workpath('gypfiles'),
chdir='src')
test.writable(test.workpath('src'), True)
test.relocate('src', 'relocate/src')
test.relocate('gypfiles', 'relocate/gypfiles')
test.writable(test.workpath('relocate/src'), False)
test.writable(test.workpath('relocate/src/build'), True)
test.writable(test.workpath('relocate/src/subdir2/build'), True)
test.writable(test.workpath('relocate/src/subdir3/build'), True)
test.build('prog1.gyp', test.ALL, chdir='relocate/gypfiles')
chdir = 'relocate/gypfiles'
expect = """\
Hello from %s
Hello from inc.h
Hello from inc1/include1.h
Hello from inc2/include2.h
Hello from inc3/include3.h
Hello from subdir2/deeper/deeper.h
"""
if test.format == 'xcode':
chdir = 'relocate/src'
test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
if test.format == 'xcode':
chdir = 'relocate/src/subdir2'
test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
if test.format == 'xcode':
chdir = 'relocate/src/subdir3'
test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
test.pass_test()
| bsd-3-clause |
Daphron/rsstory | rsstory/models.py | 2 | 1589 | from sqlalchemy import (
# BigInteger,
Column,
ForeignKey,
Integer,
Text,
UniqueConstraint,
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
# relationship,
)
from zope.sqlalchemy import ZopeTransactionExtension
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension(keep_session=True)))
Base = declarative_base()
class Feed(Base):
__tablename__ = 'feeds'
id = Column(Text, primary_key=True)
name = Column(Text)
archive_url = Column(Text, nullable=False)
time_between_posts = Column(Integer, nullable=False)
time_created = Column(Integer, nullable=False)
user = Column(ForeignKey('users.id'))
most_recent_page = Column(ForeignKey('pages.id'))
""" A Page represents one post on the website. There will be many of these
for each feed"""
class Page(Base):
__tablename__ = 'pages'
id = Column(Integer, primary_key=True, autoincrement=True)
archive_url = Column(Text)
name = Column(Text)
page_url = Column(Text, nullable=False)
description = Column(Text)
time_created = Column(Text, nullable=False)
user = Column(ForeignKey('users.id'))
__table_args__ = (UniqueConstraint('user', 'page_url'),)
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(Text, nullable=False)
google_id = Column(Text, unique=True)
email = Column(Text)
##############
# feed = Feed(name=title, data="TESTY1")
# DBSession.add(feed)
| agpl-3.0 |
scotthartbti/android_external_chromium_org | native_client_sdk/src/build_tools/buildbot_run.py | 26 | 2661 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Main entry point for the NaCl SDK buildbot.
The entry point used to be build_sdk.py itself, but we want
to be able to simplify build_sdk (for example separating out
the test code into test_sdk) and change its default behaviour
while being able to separately control excactly what the bots
run.
"""
import buildbot_common
import os
import subprocess
import sys
from buildbot_common import Run
from build_paths import SRC_DIR, SDK_SRC_DIR, SCRIPT_DIR
import getos
def StepRunUnittests():
buildbot_common.BuildStep('Run unittests')
# Our tests shouldn't be using the proxy; they should all be connecting to
# localhost. Some slaves can't route HTTP traffic through the proxy to
# localhost (we get 504 gateway errors), so we clear it here.
env = dict(os.environ)
if 'http_proxy' in env:
del env['http_proxy']
Run([sys.executable, 'test_all.py'], env=env, cwd=SDK_SRC_DIR)
def StepBuildSDK():
is_win = getos.GetPlatform() == 'win'
# Windows has a path length limit of 255 characters, after joining cwd with a
# relative path. Use subst before building to keep the path lengths short.
if is_win:
subst_drive = 'S:'
root_dir = os.path.dirname(SRC_DIR)
new_root_dir = subst_drive + '\\'
subprocess.check_call(['subst', subst_drive, root_dir])
new_script_dir = os.path.join(new_root_dir,
os.path.relpath(SCRIPT_DIR, root_dir))
else:
new_script_dir = SCRIPT_DIR
try:
Run([sys.executable, 'build_sdk.py'], cwd=new_script_dir)
finally:
if is_win:
subprocess.check_call(['subst', '/D', subst_drive])
def StepTestSDK():
cmd = []
if getos.GetPlatform() == 'linux':
# Run all of test_sdk.py under xvfb-run; it's startup time leaves something
# to be desired, so only start it up once.
# We also need to make sure that there are at least 24 bits per pixel.
# https://code.google.com/p/chromium/issues/detail?id=316687
cmd.extend([
'xvfb-run',
'--auto-servernum',
'--server-args', '-screen 0 1024x768x24'
])
cmd.extend([sys.executable, 'test_sdk.py'])
Run(cmd, cwd=SCRIPT_DIR)
def main():
StepRunUnittests()
StepBuildSDK()
# Skip the testing phase if we are running on a build-only bots.
if not buildbot_common.IsBuildOnlyBot():
StepTestSDK()
return 0
if __name__ == '__main__':
try:
sys.exit(main())
except KeyboardInterrupt:
buildbot_common.ErrorExit('buildbot_run: interrupted')
| bsd-3-clause |
harterj/moose | python/peacock/tests/utils/test_FileCache.py | 15 | 1899 | #!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import unittest
from peacock.utils.FileCache import FileCache
from peacock.utils import Testing
from PyQt5 import QtWidgets
class Tests(Testing.PeacockTester):
qapp = QtWidgets.QApplication([])
def checkFileCache(self, fc, dirty=True, val={}, path_data={}):
self.assertEqual(fc.dirty, dirty)
self.assertEqual(fc.val, val)
self.assertEqual(fc.path_data, path_data)
@unittest.skip("Needs update for Python 3")
def testBasic(self):
key = "test_FileCache"
obj = {"foo": "bar"}
FileCache.clearAll(key)
fc = FileCache(key, "/no_exist", 1)
self.checkFileCache(fc)
self.assertEqual(fc.no_exist, True)
ret = fc.add(obj)
self.assertEqual(ret, False)
self.assertEqual(fc.dirty, True)
exe_path = Testing.find_moose_test_exe()
fc = FileCache(key, exe_path, 1)
self.checkFileCache(fc)
val = fc.read()
self.assertEqual(val, None)
ret = fc.add(obj)
self.assertEqual(ret, True)
self.assertEqual(fc.dirty, False)
ret = fc.add(obj)
self.assertEqual(ret, False)
self.assertEqual(fc.dirty, False)
val = fc.read()
self.assertEqual(val, obj)
self.assertEqual(fc.dirty, False)
fc = FileCache(key, exe_path, 1)
self.assertEqual(fc.dirty, False)
# different data version
fc = FileCache(key, exe_path, 2)
self.assertEqual(fc.dirty, True)
FileCache.clearAll(key)
if __name__ == '__main__':
Testing.run_tests()
| lgpl-2.1 |
MyRobotLab/pyrobotlab | service/OledSsd1306.py | 2 | 14828 | # config
port = "COM3"
# Code to be able to use this script with virtalArduino
if ('virtual' in globals() and virtual):
virtualArduino = Runtime.start("virtualArduino", "VirtualArduino")
virtualArduino.connect(port)
#
# Initiate the Arduino
arduino = Runtime.start("Arduino","Arduino")
arduino.connect(port)
# Alternativley you can use the RasPi service to connect the OLED to the GPIO pins
# raspi = Runtime.createAndStart("RasPi","RasPi")
# Select the Arduino as controller for the OLED on bus 1 and i2c address 0x3C
oled = Runtime.start("OLED","OledSsd1306")
# From version 1.0.2316 use attach instead of setController
# oled.setController(arduino,"1","0x3C")
oled.attach(arduino,"1","0x3C")
# Alternative if you use the RasPi
# oled.attach(raspi,"1","0x3C")
# Demo to show how to write images
# Images arrays are created using the LCDAssistance as described here:
# https://learn.adafruit.com/monochrome-oled-breakouts/arduino-library-and-examples
# and downloaded from here:
# http://en.radzio.dxp.pl/bitmap_converter/
#
oled.begin(oled.SSD1306_SWITCHCAPVCC)
oled.display()
oled.clearDisplay()
sleep(5)
oled.startscrollright(0,31)
sleep(10)
oled.stopscroll()
mrlimage = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x80, 0xC0, 0xC0, 0xC0, 0xE0, 0xE0, 0xE0, 0xF0, 0xF0,
0xF0, 0xF8, 0xF8, 0xF8, 0x18, 0x18, 0xEC, 0xEC, 0xEC, 0xEC, 0xEC, 0xEC, 0xEC, 0xEC, 0xEC, 0xEC,
0xEC, 0xEC, 0xF8, 0x18, 0xF8, 0xF8, 0xF8, 0xF0, 0xF0, 0xF0, 0xE0, 0xE0, 0xE0, 0xC0, 0xC0, 0x80,
0x80, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xC0, 0xE0, 0xE0, 0xF0, 0xF8,
0xF8, 0xFC, 0xFC, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x80, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFE, 0xFE, 0xFC, 0xFC, 0xF8, 0xF0, 0xF0, 0xE0, 0xC0, 0x80, 0x80, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x80, 0xC0, 0xE0, 0xE0, 0xF0, 0xF8, 0xFC, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x3F, 0x0F, 0x63, 0x79, 0x7E, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F,
0x7F, 0x7F, 0x7F, 0x7F, 0x7C, 0x71, 0x67, 0x1F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE,
0xFC, 0xF8, 0xF0, 0xE0, 0xC0, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xC0, 0xE0, 0xF8, 0xFC, 0xFE, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x0F, 0x03, 0x00,
0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0xFC, 0xFC, 0xFE, 0xFE, 0xFE, 0xFE, 0xFF, 0xFE, 0xFE,
0x7E, 0x7E, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0x0F, 0x3F, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFC, 0xF8, 0xF0, 0xE0, 0xC0, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0xC0, 0xE0, 0xF8, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F, 0x0F, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x77, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0xFF, 0xFE, 0xF0, 0xE0, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x03, 0x07,
0x1F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xF8,
0xF0, 0xC0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0x1F, 0x07, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xC0, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0x80, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x07, 0x1F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xE0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x03, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x7F, 0x7F, 0x7F, 0xFE, 0x7C, 0x78, 0x78, 0x70, 0x70, 0x70, 0xF0, 0xF0, 0x70, 0xF0,
0xF0, 0x70, 0x70, 0x70, 0xF1, 0xF3, 0xF3, 0xF3, 0x73, 0x63, 0xF3, 0xF3, 0x73, 0x73, 0x73, 0xF3,
0xF3, 0xF3, 0x73, 0x73, 0xF3, 0x70, 0x70, 0x70, 0x70, 0x70, 0xF0, 0xF0, 0x70, 0x70, 0xF0, 0xF0,
0xF8, 0xF8, 0x7C, 0x7C, 0xFE, 0xFF, 0x7F, 0x7F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F,
0x3F, 0x1F, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x01, 0x03, 0x03, 0x07, 0x07,
0x07, 0x0F, 0x0F, 0x0F, 0x1F, 0x1F, 0x1F, 0x1F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x3F, 0x3F, 0x30, 0x30, 0x38, 0x30, 0x30, 0x30, 0x30, 0x30, 0x3E, 0x30, 0x30, 0x30, 0x3C, 0x3F,
0x30, 0x30, 0x30, 0x30, 0x30, 0x32, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x32,
0x30, 0x30, 0x30, 0x30, 0x30, 0x3E, 0x3E, 0x30, 0x30, 0x3E, 0x3F, 0x3F, 0x30, 0x30, 0x37, 0x37,
0x37, 0x30, 0x30, 0x30, 0x30, 0x37, 0x30, 0x30, 0x30, 0x30, 0x36, 0x3F, 0x3F, 0x3F, 0x3F, 0x3F,
0x1F, 0x1F, 0x1F, 0x1F, 0x0F, 0x0F, 0x0F, 0x07, 0x07, 0x07, 0x03, 0x03, 0x01, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
eyeimage = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0xC0, 0xC0, 0xC0, 0xC0, 0xC0,
0xC0, 0xC0, 0xC0, 0xC0, 0xC0, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80, 0x80,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x80, 0x80, 0x80, 0xC0, 0xC0, 0xE0, 0xE0, 0xE0, 0xE0, 0xF0, 0xF0, 0xF0, 0xF8, 0xF8, 0xF8,
0xF8, 0xFC, 0xFC, 0x7C, 0x7C, 0x7E, 0x7E, 0x3E, 0x3E, 0x3E, 0xBF, 0x9F, 0xDF, 0x5F, 0x7F, 0x7F,
0x7F, 0x3F, 0x3F, 0x3F, 0x3F, 0x7F, 0x7F, 0x7F, 0x7F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xDF, 0xDF, 0x9F, 0xBF, 0x3E, 0x3E, 0x3E, 0x7E, 0x7E, 0x7C, 0x7C, 0xFC, 0xFC, 0xF8,
0xF8, 0xF8, 0xF8, 0xF0, 0xF0, 0xF0, 0xE0, 0xE0, 0xE0, 0xE0, 0xC0, 0xC0, 0x80, 0x80, 0x80, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x80, 0xC0, 0xC0, 0xE0, 0xE0, 0xF0, 0xF0, 0xF8, 0xF8, 0xF8, 0xFC, 0xFC, 0xFE, 0x7E, 0x7F, 0x7F,
0x3F, 0x3F, 0x1F, 0x1F, 0x1F, 0x0F, 0x0F, 0x07, 0x07, 0x07, 0x03, 0x03, 0x03, 0x01, 0x01, 0x01,
0x00, 0x00, 0x00, 0xE0, 0xF0, 0xFC, 0xFC, 0xFE, 0xFF, 0xE3, 0xC1, 0x80, 0x80, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x81, 0xC1, 0xF7, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE, 0xFC, 0xFC, 0xF0, 0xE0, 0x00, 0x00, 0x00,
0x01, 0x01, 0x01, 0x03, 0x03, 0x03, 0x07, 0x07, 0x07, 0x0F, 0x0F, 0x1F, 0x1F, 0x1F, 0x3F, 0x3F,
0x7F, 0x7F, 0x7E, 0xFE, 0xFC, 0xFC, 0xF8, 0xF8, 0xF8, 0xF0, 0xF0, 0xE0, 0xE0, 0xC0, 0xC0, 0x80,
0x01, 0x03, 0x03, 0x07, 0x07, 0x0F, 0x0F, 0x1F, 0x1F, 0x1F, 0x3F, 0x3F, 0x7F, 0x7E, 0xFE, 0xFE,
0xFC, 0xFC, 0xF8, 0xF8, 0xF8, 0xF0, 0xF0, 0xE0, 0xE0, 0xE0, 0xC0, 0xC0, 0xC0, 0x80, 0x80, 0x80,
0x00, 0x00, 0x00, 0x07, 0x0F, 0x3F, 0x3F, 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFE, 0xFE, 0xFE, 0xFE, 0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x7F, 0x3F, 0x3F, 0x0F, 0x07, 0x00, 0x00, 0x00,
0x80, 0x80, 0x80, 0xC0, 0xC0, 0xC0, 0xE0, 0xE0, 0xE0, 0xF0, 0xF0, 0xF0, 0xF8, 0xF8, 0xFC, 0xFC,
0xFE, 0xFE, 0x7E, 0x7F, 0x3F, 0x3F, 0x3F, 0x1F, 0x1F, 0x0F, 0x0F, 0x07, 0x07, 0x03, 0x03, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x01, 0x01, 0x03, 0x03, 0x07, 0x07, 0x07, 0x07, 0x0F, 0x0F, 0x0F, 0x1F, 0x1F, 0x1F,
0x1F, 0x3F, 0x3F, 0x3E, 0x3E, 0x7E, 0x7E, 0x7C, 0x7C, 0x7C, 0xFD, 0xF9, 0xFB, 0xFB, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xFB, 0xFB, 0xF9, 0xFD, 0x7C, 0x7C, 0x7C, 0x7E, 0x7E, 0x3E, 0x3E, 0x3F, 0x3F, 0x1F,
0x1F, 0x1F, 0x1F, 0x0F, 0x0F, 0x0F, 0x07, 0x07, 0x07, 0x07, 0x03, 0x03, 0x01, 0x01, 0x01, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x03, 0x03, 0x03, 0x03, 0x03,
0x03, 0x03, 0x03, 0x03, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
logo16_solid_glcd_bmp = [ 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA ]
oled.clearDisplay()
oled.display(mrlimage)
sleep(5)
oled.clearDisplay()
oled.display(eyeimage)
sleep(2)
oled.clearDisplay()
oled.drawString("Now upgraded so that",0,0,1)
oled.drawString("you also can display",0,8,1)
oled.drawString("text very easy",0,16,1)
oled.drawString("MyRobotLab",28,46,1)
oled.display()
sleep(1)
oled.fillDisplay()
oled.drawStringCentered("MyRobotLab",16,0)
oled.display()
sleep(2)
def drawSadEyes():
oled.clearDisplay()
oled.drawBitmap(42, 0, logo16_solid_glcd_bmp, 16, 16, 1)
oled.drawBitmap(42, 20, logo16_solid_glcd_bmp, 16, 16, 1)
oled.drawBitmap(62, 20, logo16_solid_glcd_bmp, 16, 16, 1)
oled.drawBitmap(82, 20, logo16_solid_glcd_bmp, 16, 16, 1)
oled.drawBitmap(42, 40, logo16_solid_glcd_bmp, 16, 16, 1)
oled.drawBitmap(62, 40, logo16_solid_glcd_bmp, 16, 16, 1)
oled.drawBitmap(82, 40, logo16_solid_glcd_bmp, 16, 16, 1)
oled.display()
drawSadEyes()
| apache-2.0 |
jhurt/ReportLab | tests/test_pdfgen_overprint.py | 5 | 3194 | #Copyright ReportLab Europe Ltd. 2000-2008
#see license.txt for license details
# full screen test
"""Tests for overprint/knockout.
This has been placed in a separate file so output can be passed to printers
"""
__version__='''$Id: test_pdfgen_overprint.py 3757 2010-08-19 12:08:11Z damian $'''
from reportlab.lib.testutils import setOutDir,makeSuiteForClasses, outputfile, printLocation
setOutDir(__name__)
import unittest
class OverprintTestCase(unittest.TestCase):
"Testing overprint/knockout."
def test0(self):
"This should open in full screen mode."
import os
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.colors import PCMYKColor, PCMYKColorSep
filename = 'test_pdfgen_overprint.pdf'
desc = "Overprint/knockout tests for ReportLab library"
black = PCMYKColor(0,0,0,100)
cyan = PCMYKColorSep(100,0,0,0,spotName='myCyan')
magenta = PCMYKColorSep(0,100,0,0,spotName='myMagenta')
c = Canvas(filename)
c.setFillColor(black)
c.setFont('Helvetica', 20)
c.drawString(100, 700, desc)
c.setFont('Helvetica', 10)
c.drawString(100, 670, "To view is page properly you probably need to enable 'overprint preview' in Acrobat Reader")
c.drawString(100, 658, "or use a tool like Illustrator, Quark or Acrobat to view separated plates. Starting in")
c.drawString(100, 646, "Acrobat Reader 9 there is a setting that lets you turn on the overprint preview, although")
c.drawString(100, 634, "it's not on by default (Preferences > Page Display > Use Overprint Preview: Always).")
c.drawString(100, 616, "In the top example, the magenta rectangle overprints the cyan one. In the lower one, it")
c.drawString(100, 604, "'knocks out' the cyan underneath which is the default in PDF. This means that the overlap")
c.drawString(100, 592, "area of the first example should show blue, because the two colours merge. However, in many")
c.drawString(100, 580, "PDF viewers and cheap printers, both examples will probably look the same - magenta")
c.drawString(100, 568, "rectangle knocks out part of the cyan one.")
c.drawString(100, 550, "If you can view the separated CMYK plates in a tool like Illustrator, on the cyan plate")
c.drawString(100, 538, "you should see the top rectangle as complete and the bottom one has a chunk knocked out of")
c.drawString(100, 526, "the top right corner.")
c.setFillOverprint(True)
c.setFillColor(cyan)
c.rect(100, 300, 200, 100, fill=True, stroke=False)
c.setFillColor(magenta)
c.rect(200, 350, 200, 100, fill=True, stroke=False)
c.setFillOverprint(False)
c.setFillColor(cyan)
c.rect(100, 100, 200, 100, fill=True, stroke=False)
c.setFillColor(magenta)
c.rect(200, 150, 200, 100, fill=True, stroke=False)
c.save()
assert os.path.exists(filename)
def makeSuite():
return makeSuiteForClasses(OverprintTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
printLocation()
| bsd-3-clause |
narusemotoki/brokkoly | brokkoly/database.py | 1 | 6103 | import contextlib
import datetime
import logging
import os
import sqlite3
import threading
from typing import ( # NOQA
Dict,
Iterator,
Optional,
)
import brokkoly.resource
logger = logging.getLogger(__name__)
class ThreadLocalDBConnectionManager:
_connections = {} # type: Dict[int, sqlite3.Connection]
dbname = None # type: Optional[str]
def get(self) -> Optional[sqlite3.Connection]:
return self._connections.get(threading.get_ident())
def reconnect(self) -> None:
id = threading.get_ident()
connection = sqlite3.connect(self.dbname)
connection.row_factory = sqlite3.Row
logger.debug("Connect sqlite3 (%s) for %s", connection, id)
self._connections[id] = connection
db = ThreadLocalDBConnectionManager()
class Migrator:
def __init__(self, brokkoly_version: str) -> None:
self.brokkoly_version = brokkoly_version
def _has_database(self):
with contextlib.closing(db.get().cursor()) as cursor:
cursor.execute("""
SELECT EXISTS(
SELECT * FROM sqlite_master
WHERE
sqlite_master.type = 'table' AND
sqlite_master.name = 'migrations'
);
""")
return cursor.fetchone()[0]
def _get_migration_version(self):
with contextlib.closing(db.get().cursor()) as cursor:
cursor.execute("""
SELECT version
FROM migrations
ORDER BY version DESC
LIMIT 1
""")
return cursor.fetchone()[0]
def _iter_diff(self, version: str) -> Iterator[str]:
"""Returning data must be sorted.
"""
logger.info("resource_dir: %s", brokkoly.resource.resource_dir)
migration_dir = os.path.join(brokkoly.resource.resource_dir, 'migrations')
return (
os.path.join(migration_dir, filename)
for filename in sorted(os.listdir(migration_dir))
if os.path.splitext(filename)[0] > version
)
def _raise_for_invalid_version(self, schema_version: str) -> None:
if schema_version > self.brokkoly_version:
raise brokkoly.BrokkolyError(
"Blokkory version: {}, Database schema version: {}. The database is setup with "
"newer version of Brokkoly. It doesn't provide downgrade database. Please upgrade "
"Brokkoly.".format(self.brokkoly_version, schema_version)
)
def _run_migration_sql_file(self, filename: str) -> None:
with open(filename, 'r') as f:
sql = f.read()
with contextlib.closing(db.get().cursor()) as cursor:
try:
cursor.executescript(sql)
except sqlite3.Error as e:
logger.exception("Failed to run migration: %s", filename)
raise brokkoly.BrokkolyError("Failed to run migration") from e
def _migrate(self) -> None:
schema_version = self._get_migration_version() if self._has_database() else '0'
logger.info("schema_version: %s", schema_version)
self._raise_for_invalid_version(schema_version)
for sql_file in self._iter_diff(schema_version):
logger.info("Run migration: %s", sql_file)
self._run_migration_sql_file(sql_file)
def migrate(self) -> None:
db.reconnect()
try:
self._migrate()
finally:
db.get().close()
class MessageLog:
def __init__(
self, *, id: int=None, queue_name: str, task_name: str, message: str,
created_at: datetime.datetime
) -> None:
self.id = id
self.queue_name = queue_name
self.task_name = task_name
self.message = message
self.created_at = created_at
@classmethod
def get_by_id(cls, id: int) -> Optional['MessageLog']:
with contextlib.closing(db.get().cursor()) as cursor:
cursor.execute("SELECT * FROM message_logs WHERE message_logs.id = ?", (id, ))
return cls.from_sqlite3_row(cursor.fetchone())
@classmethod
def create(cls, queue_name: str, task_name: str, message: str) -> 'MessageLog':
with contextlib.closing(db.get().cursor()) as cursor:
cursor.execute("""
INSERT INTO message_logs (queue_name, task_name, message)
VALUES (?, ?, ?)
;""", (queue_name, task_name, message, ))
# If SQLite3 supports "returning", I can use it here.
id = cursor.lastrowid
return cls.get_by_id(id)
@classmethod
def list_by_queue_name_and_task_name(
cls, queue_name: str, task_name: str) -> Iterator['MessageLog']:
with contextlib.closing(db.get().cursor()) as cursor:
cursor.execute("""
SELECT *
FROM message_logs
WHERE
message_logs.queue_name = ? AND
message_logs.task_name = ?
ORDER BY message_logs.created_at DESC
;""", (queue_name, task_name, ))
return (cls.from_sqlite3_row(row) for row in cursor.fetchall())
@classmethod
def from_sqlite3_row(cls, row: Optional[sqlite3.Row]) -> Optional['MessageLog']:
return cls(**row) if row else None # type: ignore
@classmethod
def eliminate(cls, queue_name: str, task_name: str) -> None:
with contextlib.closing(db.get().cursor()) as cursor:
cursor.execute("""
DELETE FROM message_logs
WHERE
queue_name = :queue_name AND
task_name = :task_name AND
id NOT IN (
SELECT id
FROM message_logs
WHERE
queue_name = :queue_name AND
task_name = :task_name
ORDER BY created_at DESC
LIMIT 1000
)
;
""", {
'queue_name': queue_name,
'task_name': task_name,
})
| mit |
kakunbsc/enigma2 | lib/python/Plugins/SystemPlugins/OldSoftwareUpdate/plugin.py | 4 | 4906 | from enigma import RT_HALIGN_LEFT, RT_VALIGN_CENTER, eListboxPythonMultiContent, eListbox, eTimer, gFont
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.ScrollLabel import ScrollLabel
from Components.GUIComponent import GUIComponent
from Plugins.Plugin import PluginDescriptor
from os import popen
class Upgrade(Screen):
skin = """
<screen position="100,100" size="550,400" title="IPKG upgrade..." >
<widget name="text" position="0,0" size="550,400" font="Regular;15" />
</screen>"""
def __init__(self, session, args = None):
self.skin = Upgrade.skin
Screen.__init__(self, session)
self["text"] = ScrollLabel(_("Please press OK!"))
self["actions"] = ActionMap(["WizardActions"],
{
"ok": self.go,
"back": self.close,
"up": self["text"].pageUp,
"down": self["text"].pageDown
}, -1)
self.update = True
self.delayTimer = eTimer()
self.delayTimer.callback.append(self.doUpdateDelay)
def go(self):
if self.update:
self.session.openWithCallback(self.doUpdate, MessageBox, _("Do you want to update your Dreambox?\nAfter pressing OK, please wait!"))
else:
self.close()
def doUpdateDelay(self):
lines = popen("ipkg update && ipkg upgrade -force-defaults -force-overwrite", "r").readlines()
string = ""
for x in lines:
string += x
self["text"].setText(_("Updating finished. Here is the result:") + "\n\n" + string)
self.update = False
def doUpdate(self, val = False):
if val == True:
self["text"].setText(_("Updating... Please wait... This can take some minutes..."))
self.delayTimer.start(0, 1)
else:
self.close()
def PacketEntryComponent(packet):
res = [ packet ]
res.append((eListboxPythonMultiContent.TYPE_TEXT, 0, 0,250, 30, 0, RT_HALIGN_LEFT|RT_VALIGN_CENTER, packet[0]))
res.append((eListboxPythonMultiContent.TYPE_TEXT, 250, 0, 200, 30, 1, RT_HALIGN_LEFT|RT_VALIGN_CENTER, packet[1]))
res.append((eListboxPythonMultiContent.TYPE_TEXT, 450, 0, 100, 30, 1, RT_HALIGN_LEFT|RT_VALIGN_CENTER, packet[2]))
return res
class PacketList(GUIComponent):
def __init__(self, list):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.l.setList(list)
self.l.setFont(0, gFont("Regular", 20))
self.l.setFont(1, gFont("Regular", 18))
def getCurrent(self):
return self.l.getCurrentSelection()
def GUIcreate(self, parent):
self.instance = eListbox(parent)
self.instance.setContent(self.l)
self.instance.setItemHeight(30)
def GUIdelete(self):
self.instance.setContent(None)
self.instance = None
def invalidate(self):
self.l.invalidate()
class Ipkg(Screen):
skin = """
<screen position="100,100" size="550,400" title="IPKG upgrade..." >
<widget name="list" position="0,0" size="550,400" scrollbarMode="showOnDemand" />
</screen>"""
def __init__(self, session, args = None):
self.skin = Ipkg.skin
Screen.__init__(self, session)
list = []
self.list = list
self.fillPacketList()
self["list"] = PacketList(self.list)
self["actions"] = ActionMap(["WizardActions"],
{
"ok": self.close,
"back": self.close
}, -1)
def fillPacketList(self):
lines = popen("ipkg list", "r").readlines()
packetlist = []
for x in lines:
split = x.split(' - ')
packetlist.append([split[0].strip(), split[1].strip()])
lines = popen("ipkg list_installed", "r").readlines()
installedlist = {}
for x in lines:
split = x.split(' - ')
installedlist[split[0].strip()] = split[1].strip()
for x in packetlist:
status = ""
if installedlist.has_key(x[0]):
if installedlist[x[0]] == x[1]:
status = "installed"
else:
status = "upgradable"
self.list.append(PacketEntryComponent([x[0], x[1], status]))
def go(self):
if self.update:
self.session.openWithCallback(self.doUpdate, MessageBox, _("Do you want to update your Dreambox?\nAfter pressing OK, please wait!"))
else:
self.close()
def doUpdateDelay(self):
lines = popen("ipkg update && ipkg upgrade", "r").readlines()
string = ""
for x in lines:
string += x
self["text"].setText(_("Updating finished. Here is the result:") + "\n\n" + string)
self.update = False
def doUpdate(self, val = False):
if val == True:
self["text"].setText(_("Updating... Please wait... This can take some minutes..."))
self.delayTimer.start(0, 1)
else:
self.close()
def UpgradeMain(session, **kwargs):
session.open(Upgrade)
def IpkgMain(session, **kwargs):
session.open(Ipkg)
def Plugins(**kwargs):
return [PluginDescriptor(name="Old Softwareupdate", description="Updates your receiver's software", icon="update.png", where = PluginDescriptor.WHERE_PLUGINMENU, fnc=UpgradeMain),
PluginDescriptor(name="IPKG", description="IPKG frontend", icon="update.png", where = PluginDescriptor.WHERE_PLUGINMENU, fnc=IpkgMain)]
| gpl-2.0 |
adambrenecki/django | tests/proxy_models/tests.py | 2 | 15485 | from __future__ import unicode_literals
import copy
from django.conf import settings
from django.contrib import admin
from django.contrib.contenttypes.models import ContentType
from django.core import management
from django.core.exceptions import FieldError
from django.db import models, DEFAULT_DB_ALIAS
from django.db.models import signals
from django.db.models.loading import cache
from django.test import TestCase
from django.test.utils import override_settings
from .models import (MyPerson, Person, StatusPerson, LowerStatusPerson,
MyPersonProxy, Abstract, OtherPerson, User, UserProxy, UserProxyProxy,
Country, State, StateProxy, TrackerUser, BaseUser, Bug, ProxyTrackerUser,
Improvement, ProxyProxyBug, ProxyBug, ProxyImprovement, Issue)
from .admin import admin as force_admin_model_registration
class ProxyModelTests(TestCase):
def test_same_manager_queries(self):
"""
The MyPerson model should be generating the same database queries as
the Person model (when the same manager is used in each case).
"""
my_person_sql = MyPerson.other.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
person_sql = Person.objects.order_by("name").query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
self.assertEqual(my_person_sql, person_sql)
def test_inheretance_new_table(self):
"""
The StatusPerson models should have its own table (it's using ORM-level
inheritance).
"""
sp_sql = StatusPerson.objects.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
p_sql = Person.objects.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
self.assertNotEqual(sp_sql, p_sql)
def test_basic_proxy(self):
"""
Creating a Person makes them accessible through the MyPerson proxy.
"""
person = Person.objects.create(name="Foo McBar")
self.assertEqual(len(Person.objects.all()), 1)
self.assertEqual(len(MyPerson.objects.all()), 1)
self.assertEqual(MyPerson.objects.get(name="Foo McBar").id, person.id)
self.assertFalse(MyPerson.objects.get(id=person.id).has_special_name())
def test_no_proxy(self):
"""
Person is not proxied by StatusPerson subclass.
"""
Person.objects.create(name="Foo McBar")
self.assertEqual(list(StatusPerson.objects.all()), [])
def test_basic_proxy_reverse(self):
"""
A new MyPerson also shows up as a standard Person.
"""
MyPerson.objects.create(name="Bazza del Frob")
self.assertEqual(len(MyPerson.objects.all()), 1)
self.assertEqual(len(Person.objects.all()), 1)
LowerStatusPerson.objects.create(status="low", name="homer")
lsps = [lsp.name for lsp in LowerStatusPerson.objects.all()]
self.assertEqual(lsps, ["homer"])
def test_correct_type_proxy_of_proxy(self):
"""
Correct type when querying a proxy of proxy
"""
Person.objects.create(name="Foo McBar")
MyPerson.objects.create(name="Bazza del Frob")
LowerStatusPerson.objects.create(status="low", name="homer")
pp = sorted(mpp.name for mpp in MyPersonProxy.objects.all())
self.assertEqual(pp, ['Bazza del Frob', 'Foo McBar', 'homer'])
def test_proxy_included_in_ancestors(self):
"""
Proxy models are included in the ancestors for a model's DoesNotExist
and MultipleObjectsReturned
"""
Person.objects.create(name="Foo McBar")
MyPerson.objects.create(name="Bazza del Frob")
LowerStatusPerson.objects.create(status="low", name="homer")
max_id = Person.objects.aggregate(max_id=models.Max('id'))['max_id']
self.assertRaises(Person.DoesNotExist,
MyPersonProxy.objects.get,
name='Zathras'
)
self.assertRaises(Person.MultipleObjectsReturned,
MyPersonProxy.objects.get,
id__lt=max_id + 1
)
self.assertRaises(Person.DoesNotExist,
StatusPerson.objects.get,
name='Zathras'
)
sp1 = StatusPerson.objects.create(name='Bazza Jr.')
sp2 = StatusPerson.objects.create(name='Foo Jr.')
max_id = Person.objects.aggregate(max_id=models.Max('id'))['max_id']
self.assertRaises(Person.MultipleObjectsReturned,
StatusPerson.objects.get,
id__lt=max_id + 1
)
def test_abc(self):
"""
All base classes must be non-abstract
"""
def build_abc():
class NoAbstract(Abstract):
class Meta:
proxy = True
self.assertRaises(TypeError, build_abc)
def test_no_cbc(self):
"""
The proxy must actually have one concrete base class
"""
def build_no_cbc():
class TooManyBases(Person, Abstract):
class Meta:
proxy = True
self.assertRaises(TypeError, build_no_cbc)
def test_no_base_classes(self):
def build_no_base_classes():
class NoBaseClasses(models.Model):
class Meta:
proxy = True
self.assertRaises(TypeError, build_no_base_classes)
def test_new_fields(self):
def build_new_fields():
class NoNewFields(Person):
newfield = models.BooleanField()
class Meta:
proxy = True
self.assertRaises(FieldError, build_new_fields)
def test_swappable(self):
try:
# This test adds dummy applications to the app cache. These
# need to be removed in order to prevent bad interactions
# with the flush operation in other tests.
old_app_models = copy.deepcopy(cache.app_models)
old_app_store = copy.deepcopy(cache.app_store)
settings.TEST_SWAPPABLE_MODEL = 'proxy_models.AlternateModel'
class SwappableModel(models.Model):
class Meta:
swappable = 'TEST_SWAPPABLE_MODEL'
class AlternateModel(models.Model):
pass
# You can't proxy a swapped model
with self.assertRaises(TypeError):
class ProxyModel(SwappableModel):
class Meta:
proxy = True
finally:
del settings.TEST_SWAPPABLE_MODEL
cache.app_models = old_app_models
cache.app_store = old_app_store
def test_myperson_manager(self):
Person.objects.create(name="fred")
Person.objects.create(name="wilma")
Person.objects.create(name="barney")
resp = [p.name for p in MyPerson.objects.all()]
self.assertEqual(resp, ['barney', 'fred'])
resp = [p.name for p in MyPerson._default_manager.all()]
self.assertEqual(resp, ['barney', 'fred'])
def test_otherperson_manager(self):
Person.objects.create(name="fred")
Person.objects.create(name="wilma")
Person.objects.create(name="barney")
resp = [p.name for p in OtherPerson.objects.all()]
self.assertEqual(resp, ['barney', 'wilma'])
resp = [p.name for p in OtherPerson.excluder.all()]
self.assertEqual(resp, ['barney', 'fred'])
resp = [p.name for p in OtherPerson._default_manager.all()]
self.assertEqual(resp, ['barney', 'wilma'])
def test_permissions_created(self):
from django.contrib.auth.models import Permission
try:
Permission.objects.get(name="May display users information")
except Permission.DoesNotExist:
self.fail("The permission 'May display users information' has not been created")
def test_proxy_model_signals(self):
"""
Test save signals for proxy models
"""
output = []
def make_handler(model, event):
def _handler(*args, **kwargs):
output.append('%s %s save' % (model, event))
return _handler
h1 = make_handler('MyPerson', 'pre')
h2 = make_handler('MyPerson', 'post')
h3 = make_handler('Person', 'pre')
h4 = make_handler('Person', 'post')
signals.pre_save.connect(h1, sender=MyPerson)
signals.post_save.connect(h2, sender=MyPerson)
signals.pre_save.connect(h3, sender=Person)
signals.post_save.connect(h4, sender=Person)
dino = MyPerson.objects.create(name="dino")
self.assertEqual(output, [
'MyPerson pre save',
'MyPerson post save'
])
output = []
h5 = make_handler('MyPersonProxy', 'pre')
h6 = make_handler('MyPersonProxy', 'post')
signals.pre_save.connect(h5, sender=MyPersonProxy)
signals.post_save.connect(h6, sender=MyPersonProxy)
dino = MyPersonProxy.objects.create(name="pebbles")
self.assertEqual(output, [
'MyPersonProxy pre save',
'MyPersonProxy post save'
])
signals.pre_save.disconnect(h1, sender=MyPerson)
signals.post_save.disconnect(h2, sender=MyPerson)
signals.pre_save.disconnect(h3, sender=Person)
signals.post_save.disconnect(h4, sender=Person)
signals.pre_save.disconnect(h5, sender=MyPersonProxy)
signals.post_save.disconnect(h6, sender=MyPersonProxy)
def test_content_type(self):
ctype = ContentType.objects.get_for_model
self.assertTrue(ctype(Person) is ctype(OtherPerson))
def test_user_userproxy_userproxyproxy(self):
User.objects.create(name='Bruce')
resp = [u.name for u in User.objects.all()]
self.assertEqual(resp, ['Bruce'])
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
resp = [u.name for u in UserProxyProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
def test_proxy_for_model(self):
self.assertEqual(UserProxy, UserProxyProxy._meta.proxy_for_model)
def test_concrete_model(self):
self.assertEqual(User, UserProxyProxy._meta.concrete_model)
def test_proxy_delete(self):
"""
Proxy objects can be deleted
"""
User.objects.create(name='Bruce')
u2 = UserProxy.objects.create(name='George')
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce', 'George'])
u2.delete()
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
def test_select_related(self):
"""
We can still use `select_related()` to include related models in our
querysets.
"""
country = Country.objects.create(name='Australia')
state = State.objects.create(name='New South Wales', country=country)
resp = [s.name for s in State.objects.select_related()]
self.assertEqual(resp, ['New South Wales'])
resp = [s.name for s in StateProxy.objects.select_related()]
self.assertEqual(resp, ['New South Wales'])
self.assertEqual(StateProxy.objects.get(name='New South Wales').name,
'New South Wales')
resp = StateProxy.objects.select_related().get(name='New South Wales')
self.assertEqual(resp.name, 'New South Wales')
def test_proxy_bug(self):
contributor = TrackerUser.objects.create(name='Contributor',
status='contrib')
someone = BaseUser.objects.create(name='Someone')
Bug.objects.create(summary='fix this', version='1.1beta',
assignee=contributor, reporter=someone)
pcontributor = ProxyTrackerUser.objects.create(name='OtherContributor',
status='proxy')
Improvement.objects.create(summary='improve that', version='1.1beta',
assignee=contributor, reporter=pcontributor,
associated_bug=ProxyProxyBug.objects.all()[0])
# Related field filter on proxy
resp = ProxyBug.objects.get(version__icontains='beta')
self.assertEqual(repr(resp), '<ProxyBug: ProxyBug:fix this>')
# Select related + filter on proxy
resp = ProxyBug.objects.select_related().get(version__icontains='beta')
self.assertEqual(repr(resp), '<ProxyBug: ProxyBug:fix this>')
# Proxy of proxy, select_related + filter
resp = ProxyProxyBug.objects.select_related().get(
version__icontains='beta'
)
self.assertEqual(repr(resp), '<ProxyProxyBug: ProxyProxyBug:fix this>')
# Select related + filter on a related proxy field
resp = ProxyImprovement.objects.select_related().get(
reporter__name__icontains='butor'
)
self.assertEqual(repr(resp),
'<ProxyImprovement: ProxyImprovement:improve that>'
)
# Select related + filter on a related proxy of proxy field
resp = ProxyImprovement.objects.select_related().get(
associated_bug__summary__icontains='fix'
)
self.assertEqual(repr(resp),
'<ProxyImprovement: ProxyImprovement:improve that>'
)
def test_proxy_load_from_fixture(self):
management.call_command('loaddata', 'mypeople.json', verbosity=0)
p = MyPerson.objects.get(pk=100)
self.assertEqual(p.name, 'Elvis Presley')
def test_eq(self):
self.assertEqual(MyPerson(id=100), Person(id=100))
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class ProxyModelAdminTests(TestCase):
fixtures = ['myhorses']
urls = 'proxy_models.urls'
def test_cascade_delete_proxy_model_admin_warning(self):
"""
Test if admin gives warning about cascade deleting models referenced
to concrete model by deleting proxy object.
"""
tracker_user = TrackerUser.objects.all()[0]
base_user = BaseUser.objects.all()[0]
issue = Issue.objects.all()[0]
with self.assertNumQueries(7):
collector = admin.util.NestedObjects('default')
collector.collect(ProxyTrackerUser.objects.all())
self.assertTrue(tracker_user in collector.edges.get(None, ()))
self.assertTrue(base_user in collector.edges.get(None, ()))
self.assertTrue(issue in collector.edges.get(tracker_user, ()))
def test_delete_str_in_model_admin(self):
"""
Test if the admin delete page shows the correct string representation
for a proxy model.
"""
user = TrackerUser.objects.get(name='Django Pony')
proxy = ProxyTrackerUser.objects.get(name='Django Pony')
user_str = (
'Tracker user: <a href="/admin/proxy_models/trackeruser/%s/">%s</a>' % (user.pk, user))
proxy_str = (
'Proxy tracker user: <a href="/admin/proxy_models/proxytrackeruser/%s/">%s</a>' %
(proxy.pk, proxy))
self.client.login(username='super', password='secret')
response = self.client.get('/admin/proxy_models/trackeruser/%s/delete/' % (user.pk,))
delete_str = response.context['deleted_objects'][0]
self.assertEqual(delete_str, user_str)
response = self.client.get('/admin/proxy_models/proxytrackeruser/%s/delete/' % (proxy.pk,))
delete_str = response.context['deleted_objects'][0]
self.assertEqual(delete_str, proxy_str)
self.client.logout()
| bsd-3-clause |
magvugr/AT | EntVirtual/lib/python2.7/site-packages/django/contrib/sessions/backends/cached_db.py | 118 | 2813 | """
Cached, database-backed sessions.
"""
import logging
from django.conf import settings
from django.contrib.sessions.backends.db import SessionStore as DBStore
from django.core.cache import caches
from django.core.exceptions import SuspiciousOperation
from django.utils import timezone
from django.utils.encoding import force_text
KEY_PREFIX = "django.contrib.sessions.cached_db"
class SessionStore(DBStore):
"""
Implements cached, database backed sessions.
"""
cache_key_prefix = KEY_PREFIX
def __init__(self, session_key=None):
self._cache = caches[settings.SESSION_CACHE_ALIAS]
super(SessionStore, self).__init__(session_key)
@property
def cache_key(self):
return self.cache_key_prefix + self._get_or_create_session_key()
def load(self):
try:
data = self._cache.get(self.cache_key)
except Exception:
# Some backends (e.g. memcache) raise an exception on invalid
# cache keys. If this happens, reset the session. See #17810.
data = None
if data is None:
# Duplicate DBStore.load, because we need to keep track
# of the expiry date to set it properly in the cache.
try:
s = self.model.objects.get(
session_key=self.session_key,
expire_date__gt=timezone.now()
)
data = self.decode(s.session_data)
self._cache.set(self.cache_key, data, self.get_expiry_age(expiry=s.expire_date))
except (self.model.DoesNotExist, SuspiciousOperation) as e:
if isinstance(e, SuspiciousOperation):
logger = logging.getLogger('django.security.%s' % e.__class__.__name__)
logger.warning(force_text(e))
self._session_key = None
data = {}
return data
def exists(self, session_key):
if session_key and (self.cache_key_prefix + session_key) in self._cache:
return True
return super(SessionStore, self).exists(session_key)
def save(self, must_create=False):
super(SessionStore, self).save(must_create)
self._cache.set(self.cache_key, self._session, self.get_expiry_age())
def delete(self, session_key=None):
super(SessionStore, self).delete(session_key)
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
self._cache.delete(self.cache_key_prefix + session_key)
def flush(self):
"""
Removes the current session data from the database and regenerates the
key.
"""
self.clear()
self.delete(self.session_key)
self._session_key = None
| gpl-3.0 |
Dapid/numpy | numpy/polynomial/legendre.py | 1 | 57375 | """
Legendre Series (:mod: `numpy.polynomial.legendre`)
===================================================
.. currentmodule:: numpy.polynomial.polynomial
This module provides a number of objects (mostly functions) useful for
dealing with Legendre series, including a `Legendre` class that
encapsulates the usual arithmetic operations. (General information
on how this module represents and works with such polynomials is in the
docstring for its "parent" sub-package, `numpy.polynomial`).
Constants
---------
.. autosummary::
:toctree: generated/
legdomain Legendre series default domain, [-1,1].
legzero Legendre series that evaluates identically to 0.
legone Legendre series that evaluates identically to 1.
legx Legendre series for the identity map, ``f(x) = x``.
Arithmetic
----------
.. autosummary::
:toctree: generated/
legmulx multiply a Legendre series in P_i(x) by x.
legadd add two Legendre series.
legsub subtract one Legendre series from another.
legmul multiply two Legendre series.
legdiv divide one Legendre series by another.
legpow raise a Legendre series to an positive integer power
legval evaluate a Legendre series at given points.
legval2d evaluate a 2D Legendre series at given points.
legval3d evaluate a 3D Legendre series at given points.
leggrid2d evaluate a 2D Legendre series on a Cartesian product.
leggrid3d evaluate a 3D Legendre series on a Cartesian product.
Calculus
--------
.. autosummary::
:toctree: generated/
legder differentiate a Legendre series.
legint integrate a Legendre series.
Misc Functions
--------------
.. autosummary::
:toctree: generated/
legfromroots create a Legendre series with specified roots.
legroots find the roots of a Legendre series.
legvander Vandermonde-like matrix for Legendre polynomials.
legvander2d Vandermonde-like matrix for 2D power series.
legvander3d Vandermonde-like matrix for 3D power series.
leggauss Gauss-Legendre quadrature, points and weights.
legweight Legendre weight function.
legcompanion symmetrized companion matrix in Legendre form.
legfit least-squares fit returning a Legendre series.
legtrim trim leading coefficients from a Legendre series.
legline Legendre series representing given straight line.
leg2poly convert a Legendre series to a polynomial.
poly2leg convert a polynomial to a Legendre series.
Classes
-------
Legendre A Legendre series class.
See also
--------
numpy.polynomial.polynomial
numpy.polynomial.chebyshev
numpy.polynomial.laguerre
numpy.polynomial.hermite
numpy.polynomial.hermite_e
"""
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
import numpy.linalg as la
from . import polyutils as pu
from ._polybase import ABCPolyBase
__all__ = [
'legzero', 'legone', 'legx', 'legdomain', 'legline', 'legadd',
'legsub', 'legmulx', 'legmul', 'legdiv', 'legpow', 'legval', 'legder',
'legint', 'leg2poly', 'poly2leg', 'legfromroots', 'legvander',
'legfit', 'legtrim', 'legroots', 'Legendre', 'legval2d', 'legval3d',
'leggrid2d', 'leggrid3d', 'legvander2d', 'legvander3d', 'legcompanion',
'leggauss', 'legweight']
legtrim = pu.trimcoef
def poly2leg(pol):
"""
Convert a polynomial to a Legendre series.
Convert an array representing the coefficients of a polynomial (relative
to the "standard" basis) ordered from lowest degree to highest, to an
array of the coefficients of the equivalent Legendre series, ordered
from lowest to highest degree.
Parameters
----------
pol : array_like
1-D array containing the polynomial coefficients
Returns
-------
c : ndarray
1-D array containing the coefficients of the equivalent Legendre
series.
See Also
--------
leg2poly
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy import polynomial as P
>>> p = P.Polynomial(np.arange(4))
>>> p
Polynomial([ 0., 1., 2., 3.], [-1., 1.])
>>> c = P.Legendre(P.poly2leg(p.coef))
>>> c
Legendre([ 1. , 3.25, 1. , 0.75], [-1., 1.])
"""
[pol] = pu.as_series([pol])
deg = len(pol) - 1
res = 0
for i in range(deg, -1, -1):
res = legadd(legmulx(res), pol[i])
return res
def leg2poly(c):
"""
Convert a Legendre series to a polynomial.
Convert an array representing the coefficients of a Legendre series,
ordered from lowest degree to highest, to an array of the coefficients
of the equivalent polynomial (relative to the "standard" basis) ordered
from lowest to highest degree.
Parameters
----------
c : array_like
1-D array containing the Legendre series coefficients, ordered
from lowest order term to highest.
Returns
-------
pol : ndarray
1-D array containing the coefficients of the equivalent polynomial
(relative to the "standard" basis) ordered from lowest order term
to highest.
See Also
--------
poly2leg
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> c = P.Legendre(range(4))
>>> c
Legendre([ 0., 1., 2., 3.], [-1., 1.])
>>> p = c.convert(kind=P.Polynomial)
>>> p
Polynomial([-1. , -3.5, 3. , 7.5], [-1., 1.])
>>> P.leg2poly(range(4))
array([-1. , -3.5, 3. , 7.5])
"""
from .polynomial import polyadd, polysub, polymulx
[c] = pu.as_series([c])
n = len(c)
if n < 3:
return c
else:
c0 = c[-2]
c1 = c[-1]
# i is the current degree of c1
for i in range(n - 1, 1, -1):
tmp = c0
c0 = polysub(c[i - 2], (c1*(i - 1))/i)
c1 = polyadd(tmp, (polymulx(c1)*(2*i - 1))/i)
return polyadd(c0, polymulx(c1))
#
# These are constant arrays are of integer type so as to be compatible
# with the widest range of other types, such as Decimal.
#
# Legendre
legdomain = np.array([-1, 1])
# Legendre coefficients representing zero.
legzero = np.array([0])
# Legendre coefficients representing one.
legone = np.array([1])
# Legendre coefficients representing the identity x.
legx = np.array([0, 1])
def legline(off, scl):
"""
Legendre series whose graph is a straight line.
Parameters
----------
off, scl : scalars
The specified line is given by ``off + scl*x``.
Returns
-------
y : ndarray
This module's representation of the Legendre series for
``off + scl*x``.
See Also
--------
polyline, chebline
Examples
--------
>>> import numpy.polynomial.legendre as L
>>> L.legline(3,2)
array([3, 2])
>>> L.legval(-3, L.legline(3,2)) # should be -3
-3.0
"""
if scl != 0:
return np.array([off, scl])
else:
return np.array([off])
def legfromroots(roots):
"""
Generate a Legendre series with given roots.
The function returns the coefficients of the polynomial
.. math:: p(x) = (x - r_0) * (x - r_1) * ... * (x - r_n),
in Legendre form, where the `r_n` are the roots specified in `roots`.
If a zero has multiplicity n, then it must appear in `roots` n times.
For instance, if 2 is a root of multiplicity three and 3 is a root of
multiplicity 2, then `roots` looks something like [2, 2, 2, 3, 3]. The
roots can appear in any order.
If the returned coefficients are `c`, then
.. math:: p(x) = c_0 + c_1 * L_1(x) + ... + c_n * L_n(x)
The coefficient of the last term is not generally 1 for monic
polynomials in Legendre form.
Parameters
----------
roots : array_like
Sequence containing the roots.
Returns
-------
out : ndarray
1-D array of coefficients. If all roots are real then `out` is a
real array, if some of the roots are complex, then `out` is complex
even if all the coefficients in the result are real (see Examples
below).
See Also
--------
polyfromroots, chebfromroots, lagfromroots, hermfromroots,
hermefromroots.
Examples
--------
>>> import numpy.polynomial.legendre as L
>>> L.legfromroots((-1,0,1)) # x^3 - x relative to the standard basis
array([ 0. , -0.4, 0. , 0.4])
>>> j = complex(0,1)
>>> L.legfromroots((-j,j)) # x^2 + 1 relative to the standard basis
array([ 1.33333333+0.j, 0.00000000+0.j, 0.66666667+0.j])
"""
if len(roots) == 0:
return np.ones(1)
else:
[roots] = pu.as_series([roots], trim=False)
roots.sort()
p = [legline(-r, 1) for r in roots]
n = len(p)
while n > 1:
m, r = divmod(n, 2)
tmp = [legmul(p[i], p[i+m]) for i in range(m)]
if r:
tmp[0] = legmul(tmp[0], p[-1])
p = tmp
n = m
return p[0]
def legadd(c1, c2):
"""
Add one Legendre series to another.
Returns the sum of two Legendre series `c1` + `c2`. The arguments
are sequences of coefficients ordered from lowest order term to
highest, i.e., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the Legendre series of their sum.
See Also
--------
legsub, legmul, legdiv, legpow
Notes
-----
Unlike multiplication, division, etc., the sum of two Legendre series
is a Legendre series (without having to "reproject" the result onto
the basis set) so addition, just like that of "standard" polynomials,
is simply "component-wise."
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legadd(c1,c2)
array([ 4., 4., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] += c2
ret = c1
else:
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def legsub(c1, c2):
"""
Subtract one Legendre series from another.
Returns the difference of two Legendre series `c1` - `c2`. The
sequences of coefficients are from lowest order term to highest, i.e.,
[1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their difference.
See Also
--------
legadd, legmul, legdiv, legpow
Notes
-----
Unlike multiplication, division, etc., the difference of two Legendre
series is a Legendre series (without having to "reproject" the result
onto the basis set) so subtraction, just like that of "standard"
polynomials, is simply "component-wise."
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legsub(c1,c2)
array([-2., 0., 2.])
>>> L.legsub(c2,c1) # -C.legsub(c1,c2)
array([ 2., 0., -2.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] -= c2
ret = c1
else:
c2 = -c2
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def legmulx(c):
"""Multiply a Legendre series by x.
Multiply the Legendre series `c` by x, where x is the independent
variable.
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the result of the multiplication.
Notes
-----
The multiplication uses the recursion relationship for Legendre
polynomials in the form
.. math::
xP_i(x) = ((i + 1)*P_{i + 1}(x) + i*P_{i - 1}(x))/(2i + 1)
"""
# c is a trimmed copy
[c] = pu.as_series([c])
# The zero series needs special treatment
if len(c) == 1 and c[0] == 0:
return c
prd = np.empty(len(c) + 1, dtype=c.dtype)
prd[0] = c[0]*0
prd[1] = c[0]
for i in range(1, len(c)):
j = i + 1
k = i - 1
s = i + j
prd[j] = (c[i]*j)/s
prd[k] += (c[i]*i)/s
return prd
def legmul(c1, c2):
"""
Multiply one Legendre series by another.
Returns the product of two Legendre series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their product.
See Also
--------
legadd, legsub, legdiv, legpow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Legendre polynomial basis set. Thus, to express
the product as a Legendre series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2)
>>> P.legmul(c1,c2) # multiplication requires "reprojection"
array([ 4.33333333, 10.4 , 11.66666667, 3.6 ])
"""
# s1, s2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else:
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = legsub(c[-i]*xs, (c1*(nd - 1))/nd)
c1 = legadd(tmp, (legmulx(c1)*(2*nd - 1))/nd)
return legadd(c0, legmulx(c1))
def legdiv(c1, c2):
"""
Divide one Legendre series by another.
Returns the quotient-with-remainder of two Legendre series
`c1` / `c2`. The arguments are sequences of coefficients from lowest
order "term" to highest, e.g., [1,2,3] represents the series
``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
quo, rem : ndarrays
Of Legendre series coefficients representing the quotient and
remainder.
See Also
--------
legadd, legsub, legmul, legpow
Notes
-----
In general, the (polynomial) division of one Legendre series by another
results in quotient and remainder terms that are not in the Legendre
polynomial basis set. Thus, to express these results as a Legendre
series, it is necessary to "reproject" the results onto the Legendre
basis set, which may produce "unintuitive" (but correct) results; see
Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legdiv(c1,c2) # quotient "intuitive," remainder not
(array([ 3.]), array([-8., -4.]))
>>> c2 = (0,1,2,3)
>>> L.legdiv(c2,c1) # neither "intuitive"
(array([-0.07407407, 1.66666667]), array([-1.03703704, -2.51851852]))
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0:
raise ZeroDivisionError()
lc1 = len(c1)
lc2 = len(c2)
if lc1 < lc2:
return c1[:1]*0, c1
elif lc2 == 1:
return c1/c2[-1], c1[:1]*0
else:
quo = np.empty(lc1 - lc2 + 1, dtype=c1.dtype)
rem = c1
for i in range(lc1 - lc2, - 1, -1):
p = legmul([0]*i + [1], c2)
q = rem[-1]/p[-1]
rem = rem[:-1] - q*p[:-1]
quo[i] = q
return quo, pu.trimseq(rem)
def legpow(c, pow, maxpower=16):
"""Raise a Legendre series to a power.
Returns the Legendre series `c` raised to the power `pow`. The
arguement `c` is a sequence of coefficients ordered from low to high.
i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.``
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to
high.
pow : integer
Power to which the series will be raised
maxpower : integer, optional
Maximum power allowed. This is mainly to limit growth of the series
to unmanageable size. Default is 16
Returns
-------
coef : ndarray
Legendre series of power.
See Also
--------
legadd, legsub, legmul, legdiv
Examples
--------
"""
# c is a trimmed copy
[c] = pu.as_series([c])
power = int(pow)
if power != pow or power < 0:
raise ValueError("Power must be a non-negative integer.")
elif maxpower is not None and power > maxpower:
raise ValueError("Power is too large")
elif power == 0:
return np.array([1], dtype=c.dtype)
elif power == 1:
return c
else:
# This can be made more efficient by using powers of two
# in the usual way.
prd = c
for i in range(2, power + 1):
prd = legmul(prd, c)
return prd
def legder(c, m=1, scl=1, axis=0):
"""
Differentiate a Legendre series.
Returns the Legendre series coefficients `c` differentiated `m` times
along `axis`. At each iteration the result is multiplied by `scl` (the
scaling factor is for use in a linear change of variable). The argument
`c` is an array of coefficients from low to high degree along each
axis, e.g., [1,2,3] represents the series ``1*L_0 + 2*L_1 + 3*L_2``
while [[1,2],[1,2]] represents ``1*L_0(x)*L_0(y) + 1*L_1(x)*L_0(y) +
2*L_0(x)*L_1(y) + 2*L_1(x)*L_1(y)`` if axis=0 is ``x`` and axis=1 is
``y``.
Parameters
----------
c : array_like
Array of Legendre series coefficients. If c is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Number of derivatives taken, must be non-negative. (Default: 1)
scl : scalar, optional
Each differentiation is multiplied by `scl`. The end result is
multiplication by ``scl**m``. This is for use in a linear change of
variable. (Default: 1)
axis : int, optional
Axis over which the derivative is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
der : ndarray
Legendre series of the derivative.
See Also
--------
legint
Notes
-----
In general, the result of differentiating a Legendre series does not
resemble the same operation on a power series. Thus the result of this
function may be "unintuitive," albeit correct; see Examples section
below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c = (1,2,3,4)
>>> L.legder(c)
array([ 6., 9., 20.])
>>> L.legder(c, 3)
array([ 60.])
>>> L.legder(c, scl=-1)
array([ -6., -9., -20.])
>>> L.legder(c, 2,-1)
array([ 9., 60.])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of derivation must be integer")
if cnt < 0:
raise ValueError("The order of derivation must be non-negative")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
n = len(c)
if cnt >= n:
c = c[:1]*0
else:
for i in range(cnt):
n = n - 1
c *= scl
der = np.empty((n,) + c.shape[1:], dtype=c.dtype)
for j in range(n, 2, -1):
der[j - 1] = (2*j - 1)*c[j]
c[j - 2] += c[j]
if n > 1:
der[1] = 3*c[2]
der[0] = c[1]
c = der
c = np.rollaxis(c, 0, iaxis + 1)
return c
def legint(c, m=1, k=[], lbnd=0, scl=1, axis=0):
"""
Integrate a Legendre series.
Returns the Legendre series coefficients `c` integrated `m` times from
`lbnd` along `axis`. At each iteration the resulting series is
**multiplied** by `scl` and an integration constant, `k`, is added.
The scaling factor is for use in a linear change of variable. ("Buyer
beware": note that, depending on what one is doing, one may want `scl`
to be the reciprocal of what one might expect; for more information,
see the Notes section below.) The argument `c` is an array of
coefficients from low to high degree along each axis, e.g., [1,2,3]
represents the series ``L_0 + 2*L_1 + 3*L_2`` while [[1,2],[1,2]]
represents ``1*L_0(x)*L_0(y) + 1*L_1(x)*L_0(y) + 2*L_0(x)*L_1(y) +
2*L_1(x)*L_1(y)`` if axis=0 is ``x`` and axis=1 is ``y``.
Parameters
----------
c : array_like
Array of Legendre series coefficients. If c is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Order of integration, must be positive. (Default: 1)
k : {[], list, scalar}, optional
Integration constant(s). The value of the first integral at
``lbnd`` is the first value in the list, the value of the second
integral at ``lbnd`` is the second value, etc. If ``k == []`` (the
default), all constants are set to zero. If ``m == 1``, a single
scalar can be given instead of a list.
lbnd : scalar, optional
The lower bound of the integral. (Default: 0)
scl : scalar, optional
Following each integration the result is *multiplied* by `scl`
before the integration constant is added. (Default: 1)
axis : int, optional
Axis over which the integral is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
S : ndarray
Legendre series coefficient array of the integral.
Raises
------
ValueError
If ``m < 0``, ``len(k) > m``, ``np.isscalar(lbnd) == False``, or
``np.isscalar(scl) == False``.
See Also
--------
legder
Notes
-----
Note that the result of each integration is *multiplied* by `scl`.
Why is this important to note? Say one is making a linear change of
variable :math:`u = ax + b` in an integral relative to `x`. Then
.. math::`dx = du/a`, so one will need to set `scl` equal to
:math:`1/a` - perhaps not what one would have first thought.
Also note that, in general, the result of integrating a C-series needs
to be "reprojected" onto the C-series basis set. Thus, typically,
the result of this function is "unintuitive," albeit correct; see
Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c = (1,2,3)
>>> L.legint(c)
array([ 0.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, 3)
array([ 1.66666667e-02, -1.78571429e-02, 4.76190476e-02,
-1.73472348e-18, 1.90476190e-02, 9.52380952e-03])
>>> L.legint(c, k=3)
array([ 3.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, lbnd=-2)
array([ 7.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, scl=2)
array([ 0.66666667, 0.8 , 1.33333333, 1.2 ])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if not np.iterable(k):
k = [k]
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of integration must be integer")
if cnt < 0:
raise ValueError("The order of integration must be non-negative")
if len(k) > cnt:
raise ValueError("Too many integration constants")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
k = list(k) + [0]*(cnt - len(k))
for i in range(cnt):
n = len(c)
c *= scl
if n == 1 and np.all(c[0] == 0):
c[0] += k[i]
else:
tmp = np.empty((n + 1,) + c.shape[1:], dtype=c.dtype)
tmp[0] = c[0]*0
tmp[1] = c[0]
if n > 1:
tmp[2] = c[1]/3
for j in range(2, n):
t = c[j]/(2*j + 1)
tmp[j + 1] = t
tmp[j - 1] -= t
tmp[0] += k[i] - legval(lbnd, tmp)
c = tmp
c = np.rollaxis(c, 0, iaxis + 1)
return c
def legval(x, c, tensor=True):
"""
Evaluate a Legendre series at points x.
If `c` is of length `n + 1`, this function returns the value:
.. math:: p(x) = c_0 * L_0(x) + c_1 * L_1(x) + ... + c_n * L_n(x)
The parameter `x` is converted to an array only if it is a tuple or a
list, otherwise it is treated as a scalar. In either case, either `x`
or its elements must support multiplication and addition both with
themselves and with the elements of `c`.
If `c` is a 1-D array, then `p(x)` will have the same shape as `x`. If
`c` is multidimensional, then the shape of the result depends on the
value of `tensor`. If `tensor` is true the shape will be c.shape[1:] +
x.shape. If `tensor` is false the shape will be c.shape[1:]. Note that
scalars have shape (,).
Trailing zeros in the coefficients will be used in the evaluation, so
they should be avoided if efficiency is a concern.
Parameters
----------
x : array_like, compatible object
If `x` is a list or tuple, it is converted to an ndarray, otherwise
it is left unchanged and treated as a scalar. In either case, `x`
or its elements must support addition and multiplication with
with themselves and with the elements of `c`.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree n are contained in c[n]. If `c` is multidimensional the
remaining indices enumerate multiple polynomials. In the two
dimensional case the coefficients may be thought of as stored in
the columns of `c`.
tensor : boolean, optional
If True, the shape of the coefficient array is extended with ones
on the right, one for each dimension of `x`. Scalars have dimension 0
for this action. The result is that every column of coefficients in
`c` is evaluated for every element of `x`. If False, `x` is broadcast
over the columns of `c` for the evaluation. This keyword is useful
when `c` is multidimensional. The default value is True.
.. versionadded:: 1.7.0
Returns
-------
values : ndarray, algebra_like
The shape of the return value is described above.
See Also
--------
legval2d, leggrid2d, legval3d, leggrid3d
Notes
-----
The evaluation uses Clenshaw recursion, aka synthetic division.
Examples
--------
"""
c = np.array(c, ndmin=1, copy=0)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if isinstance(x, (tuple, list)):
x = np.asarray(x)
if isinstance(x, np.ndarray) and tensor:
c = c.reshape(c.shape + (1,)*x.ndim)
if len(c) == 1:
c0 = c[0]
c1 = 0
elif len(c) == 2:
c0 = c[0]
c1 = c[1]
else:
nd = len(c)
c0 = c[-2]
c1 = c[-1]
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = c[-i] - (c1*(nd - 1))/nd
c1 = tmp + (c1*x*(2*nd - 1))/nd
return c0 + c1*x
def legval2d(x, y, c):
"""
Evaluate a 2-D Legendre series at points (x, y).
This function returns the values:
.. math:: p(x,y) = \\sum_{i,j} c_{i,j} * L_i(x) * L_j(y)
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars and they
must have the same shape after conversion. In either case, either `x`
and `y` or their elements must support multiplication and addition both
with themselves and with the elements of `c`.
If `c` is a 1-D array a one is implicitly appended to its shape to make
it 2-D. The shape of the result will be c.shape[2:] + x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points `(x, y)`,
where `x` and `y` must have the same shape. If `x` or `y` is a list
or tuple, it is first converted to an ndarray, otherwise it is left
unchanged and if it isn't an ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term
of multi-degree i,j is contained in ``c[i,j]``. If `c` has
dimension greater than two the remaining indices enumerate multiple
sets of coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional Legendre series at points formed
from pairs of corresponding values from `x` and `y`.
See Also
--------
legval, leggrid2d, legval3d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y = np.array((x, y), copy=0)
except:
raise ValueError('x, y are incompatible')
c = legval(x, c)
c = legval(y, c, tensor=False)
return c
def leggrid2d(x, y, c):
"""
Evaluate a 2-D Legendre series on the Cartesian product of x and y.
This function returns the values:
.. math:: p(a,b) = \sum_{i,j} c_{i,j} * L_i(a) * L_j(b)
where the points `(a, b)` consist of all pairs formed by taking
`a` from `x` and `b` from `y`. The resulting points form a grid with
`x` in the first dimension and `y` in the second.
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars. In either
case, either `x` and `y` or their elements must support multiplication
and addition both with themselves and with the elements of `c`.
If `c` has fewer than two dimensions, ones are implicitly appended to
its shape to make it 2-D. The shape of the result will be c.shape[2:] +
x.shape + y.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points in the
Cartesian product of `x` and `y`. If `x` or `y` is a list or
tuple, it is first converted to an ndarray, otherwise it is left
unchanged and, if it isn't an ndarray, it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j is contained in `c[i,j]`. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional Chebyshev series at points in the
Cartesian product of `x` and `y`.
See Also
--------
legval, legval2d, legval3d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
c = legval(x, c)
c = legval(y, c)
return c
def legval3d(x, y, z, c):
"""
Evaluate a 3-D Legendre series at points (x, y, z).
This function returns the values:
.. math:: p(x,y,z) = \\sum_{i,j,k} c_{i,j,k} * L_i(x) * L_j(y) * L_k(z)
The parameters `x`, `y`, and `z` are converted to arrays only if
they are tuples or a lists, otherwise they are treated as a scalars and
they must have the same shape after conversion. In either case, either
`x`, `y`, and `z` or their elements must support multiplication and
addition both with themselves and with the elements of `c`.
If `c` has fewer than 3 dimensions, ones are implicitly appended to its
shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape.
Parameters
----------
x, y, z : array_like, compatible object
The three dimensional series is evaluated at the points
`(x, y, z)`, where `x`, `y`, and `z` must have the same shape. If
any of `x`, `y`, or `z` is a list or tuple, it is first converted
to an ndarray, otherwise it is left unchanged and if it isn't an
ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j,k is contained in ``c[i,j,k]``. If `c` has dimension
greater than 3 the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the multidimensional polynomial on points formed with
triples of corresponding values from `x`, `y`, and `z`.
See Also
--------
legval, legval2d, leggrid2d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y, z = np.array((x, y, z), copy=0)
except:
raise ValueError('x, y, z are incompatible')
c = legval(x, c)
c = legval(y, c, tensor=False)
c = legval(z, c, tensor=False)
return c
def leggrid3d(x, y, z, c):
"""
Evaluate a 3-D Legendre series on the Cartesian product of x, y, and z.
This function returns the values:
.. math:: p(a,b,c) = \\sum_{i,j,k} c_{i,j,k} * L_i(a) * L_j(b) * L_k(c)
where the points `(a, b, c)` consist of all triples formed by taking
`a` from `x`, `b` from `y`, and `c` from `z`. The resulting points form
a grid with `x` in the first dimension, `y` in the second, and `z` in
the third.
The parameters `x`, `y`, and `z` are converted to arrays only if they
are tuples or a lists, otherwise they are treated as a scalars. In
either case, either `x`, `y`, and `z` or their elements must support
multiplication and addition both with themselves and with the elements
of `c`.
If `c` has fewer than three dimensions, ones are implicitly appended to
its shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape + y.shape + z.shape.
Parameters
----------
x, y, z : array_like, compatible objects
The three dimensional series is evaluated at the points in the
Cartesian product of `x`, `y`, and `z`. If `x`,`y`, or `z` is a
list or tuple, it is first converted to an ndarray, otherwise it is
left unchanged and, if it isn't an ndarray, it is treated as a
scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
legval, legval2d, leggrid2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
c = legval(x, c)
c = legval(y, c)
c = legval(z, c)
return c
def legvander(x, deg):
"""Pseudo-Vandermonde matrix of given degree.
Returns the pseudo-Vandermonde matrix of degree `deg` and sample points
`x`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., i] = L_i(x)
where `0 <= i <= deg`. The leading indices of `V` index the elements of
`x` and the last index is the degree of the Legendre polynomial.
If `c` is a 1-D array of coefficients of length `n + 1` and `V` is the
array ``V = legvander(x, n)``, then ``np.dot(V, c)`` and
``legval(x, c)`` are the same up to roundoff. This equivalence is
useful both for least squares fitting and for the evaluation of a large
number of Legendre series of the same degree and sample points.
Parameters
----------
x : array_like
Array of points. The dtype is converted to float64 or complex128
depending on whether any of the elements are complex. If `x` is
scalar it is converted to a 1-D array.
deg : int
Degree of the resulting matrix.
Returns
-------
vander : ndarray
The pseudo-Vandermonde matrix. The shape of the returned matrix is
``x.shape + (deg + 1,)``, where The last index is the degree of the
corresponding Legendre polynomial. The dtype will be the same as
the converted `x`.
"""
ideg = int(deg)
if ideg != deg:
raise ValueError("deg must be integer")
if ideg < 0:
raise ValueError("deg must be non-negative")
x = np.array(x, copy=0, ndmin=1) + 0.0
dims = (ideg + 1,) + x.shape
dtyp = x.dtype
v = np.empty(dims, dtype=dtyp)
# Use forward recursion to generate the entries. This is not as accurate
# as reverse recursion in this application but it is more efficient.
v[0] = x*0 + 1
if ideg > 0:
v[1] = x
for i in range(2, ideg + 1):
v[i] = (v[i-1]*x*(2*i - 1) - v[i-2]*(i - 1))/i
return np.rollaxis(v, 0, v.ndim)
def legvander2d(x, y, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y)`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., deg[1]*i + j] = L_i(x) * L_j(y),
where `0 <= i <= deg[0]` and `0 <= j <= deg[1]`. The leading indices of
`V` index the points `(x, y)` and the last index encodes the degrees of
the Legendre polynomials.
If ``V = legvander2d(x, y, [xdeg, ydeg])``, then the columns of `V`
correspond to the elements of a 2-D coefficient array `c` of shape
(xdeg + 1, ydeg + 1) in the order
.. math:: c_{00}, c_{01}, c_{02} ... , c_{10}, c_{11}, c_{12} ...
and ``np.dot(V, c.flat)`` and ``legval2d(x, y, c)`` will be the same
up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 2-D Legendre
series of the same degrees and sample points.
Parameters
----------
x, y : array_like
Arrays of point coordinates, all of the same shape. The dtypes
will be converted to either float64 or complex128 depending on
whether any of the elements are complex. Scalars are converted to
1-D arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg].
Returns
-------
vander2d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)`. The dtype will be the same
as the converted `x` and `y`.
See Also
--------
legvander, legvander3d. legval2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy = ideg
x, y = np.array((x, y), copy=0) + 0.0
vx = legvander(x, degx)
vy = legvander(y, degy)
v = vx[..., None]*vy[..., None,:]
return v.reshape(v.shape[:-2] + (-1,))
def legvander3d(x, y, z, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y, z)`. If `l, m, n` are the given degrees in `x, y, z`,
then The pseudo-Vandermonde matrix is defined by
.. math:: V[..., (m+1)(n+1)i + (n+1)j + k] = L_i(x)*L_j(y)*L_k(z),
where `0 <= i <= l`, `0 <= j <= m`, and `0 <= j <= n`. The leading
indices of `V` index the points `(x, y, z)` and the last index encodes
the degrees of the Legendre polynomials.
If ``V = legvander3d(x, y, z, [xdeg, ydeg, zdeg])``, then the columns
of `V` correspond to the elements of a 3-D coefficient array `c` of
shape (xdeg + 1, ydeg + 1, zdeg + 1) in the order
.. math:: c_{000}, c_{001}, c_{002},... , c_{010}, c_{011}, c_{012},...
and ``np.dot(V, c.flat)`` and ``legval3d(x, y, z, c)`` will be the
same up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 3-D Legendre
series of the same degrees and sample points.
Parameters
----------
x, y, z : array_like
Arrays of point coordinates, all of the same shape. The dtypes will
be converted to either float64 or complex128 depending on whether
any of the elements are complex. Scalars are converted to 1-D
arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg, z_deg].
Returns
-------
vander3d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)*(deg[2]+1)`. The dtype will
be the same as the converted `x`, `y`, and `z`.
See Also
--------
legvander, legvander3d. legval2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy, degz = ideg
x, y, z = np.array((x, y, z), copy=0) + 0.0
vx = legvander(x, degx)
vy = legvander(y, degy)
vz = legvander(z, degz)
v = vx[..., None, None]*vy[..., None,:, None]*vz[..., None, None,:]
return v.reshape(v.shape[:-3] + (-1,))
def legfit(x, y, deg, rcond=None, full=False, w=None):
"""
Least squares fit of Legendre series to data.
Return the coefficients of a Legendre series of degree `deg` that is the
least squares fit to the data values `y` given at points `x`. If `y` is
1-D the returned coefficients will also be 1-D. If `y` is 2-D multiple
fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * L_1(x) + ... + c_n * L_n(x),
where `n` is `deg`.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int or array_like
Degree of the fitting polynomial. If `deg` is a single integer
all terms up to and including the `deg`'th term are included.
`deg` may alternatively be a list or array specifying which
terms in the Legendre expansion to include in the fit.
.. versionchanged:: 1.11.0
`deg` may be a list specifying which terms to fit
rcond : float, optional
Relative condition number of the fit. Singular values smaller than
this relative to the largest singular value will be ignored. The
default value is len(x)*eps, where eps is the relative precision of
the float type, about 2e-16 in most cases.
full : bool, optional
Switch determining nature of return value. When it is False (the
default) just the coefficients are returned, when True diagnostic
information from the singular value decomposition is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
.. versionadded:: 1.5.0
Returns
-------
coef : ndarray, shape (M,) or (M, K)
Legendre coefficients ordered from low to high. If `y` was
2-D, the coefficients for the data in column k of `y` are in
column `k`. If `deg` is specified as a list, coefficients for
terms not included in the fit are set equal to zero in the
returned `coef`.
[residuals, rank, singular_values, rcond] : list
These values are only returned if `full` = True
resid -- sum of squared residuals of the least squares fit
rank -- the numerical rank of the scaled Vandermonde matrix
sv -- singular values of the scaled Vandermonde matrix
rcond -- value of `rcond`.
For more details, see `linalg.lstsq`.
Warns
-----
RankWarning
The rank of the coefficient matrix in the least-squares fit is
deficient. The warning is only raised if `full` = False. The
warnings can be turned off by
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, polyfit, lagfit, hermfit, hermefit
legval : Evaluates a Legendre series.
legvander : Vandermonde matrix of Legendre series.
legweight : Legendre weight function (= 1).
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the Legendre series `p` that
minimizes the sum of the weighted squared errors
.. math:: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where :math:`w_j` are the weights. This problem is solved by setting up
as the (typically) overdetermined matrix equation
.. math:: V(x) * c = w * y,
where `V` is the weighted pseudo Vandermonde matrix of `x`, `c` are the
coefficients to be solved for, `w` are the weights, and `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected, then a `RankWarning` will be issued. This means that the
coefficient values may be poorly determined. Using a lower order fit
will usually get rid of the warning. The `rcond` parameter can also be
set to a value smaller than its default, but the resulting fit may be
spurious and have large contributions from roundoff error.
Fits using Legendre series are usually better conditioned than fits
using power series, but much can depend on the distribution of the
sample points and the smoothness of the data. If the quality of the fit
is inadequate splines may be a good alternative.
References
----------
.. [1] Wikipedia, "Curve fitting",
http://en.wikipedia.org/wiki/Curve_fitting
Examples
--------
"""
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
deg = np.asarray([deg,], dtype=int).flatten()
# check arguments.
if deg.size < 1:
raise TypeError("expected deg to be one or more integers")
if deg.min() < 0:
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2:
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
if deg.size == 1:
restricted_fit = False
lmax = deg[0]
order = lmax + 1
else:
restricted_fit = True
lmax = deg.max()
order = deg.size
# set up the least squares matrices in transposed form
van = legvander(x, lmax)
if restricted_fit:
van = van[:, deg]
lhs = van.T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# apply weights. Don't use inplace operations as they
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None:
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# Expand c to include non-fitted coefficients which are set to zero
if restricted_fit:
if c.ndim == 2:
cc = np.zeros((lmax+1, c.shape[1]), dtype=c.dtype)
else:
cc = np.zeros(lmax+1, dtype=c.dtype)
cc[deg] = c
c = cc
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning)
if full:
return c, [resids, rank, s, rcond]
else:
return c
def legcompanion(c):
"""Return the scaled companion matrix of c.
The basis polynomials are scaled so that the companion matrix is
symmetric when `c` is an Legendre basis polynomial. This provides
better eigenvalue estimates than the unscaled case and for basis
polynomials the eigenvalues are guaranteed to be real if
`numpy.linalg.eigvalsh` is used to obtain them.
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to high
degree.
Returns
-------
mat : ndarray
Scaled companion matrix of dimensions (deg, deg).
Notes
-----
.. versionadded::1.7.0
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
raise ValueError('Series must have maximum degree of at least 1.')
if len(c) == 2:
return np.array([[-c[0]/c[1]]])
n = len(c) - 1
mat = np.zeros((n, n), dtype=c.dtype)
scl = 1./np.sqrt(2*np.arange(n) + 1)
top = mat.reshape(-1)[1::n+1]
bot = mat.reshape(-1)[n::n+1]
top[...] = np.arange(1, n)*scl[:n-1]*scl[1:n]
bot[...] = top
mat[:, -1] -= (c[:-1]/c[-1])*(scl/scl[-1])*(n/(2*n - 1))
return mat
def legroots(c):
"""
Compute the roots of a Legendre series.
Return the roots (a.k.a. "zeros") of the polynomial
.. math:: p(x) = \\sum_i c[i] * L_i(x).
Parameters
----------
c : 1-D array_like
1-D array of coefficients.
Returns
-------
out : ndarray
Array of the roots of the series. If all the roots are real,
then `out` is also real, otherwise it is complex.
See Also
--------
polyroots, chebroots, lagroots, hermroots, hermeroots
Notes
-----
The root estimates are obtained as the eigenvalues of the companion
matrix, Roots far from the origin of the complex plane may have large
errors due to the numerical instability of the series for such values.
Roots with multiplicity greater than 1 will also show larger errors as
the value of the series near such points is relatively insensitive to
errors in the roots. Isolated roots near the origin can be improved by
a few iterations of Newton's method.
The Legendre series basis polynomials aren't powers of ``x`` so the
results of this function may seem unintuitive.
Examples
--------
>>> import numpy.polynomial.legendre as leg
>>> leg.legroots((1, 2, 3, 4)) # 4L_3 + 3L_2 + 2L_1 + 1L_0, all real roots
array([-0.85099543, -0.11407192, 0.51506735])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
return np.array([], dtype=c.dtype)
if len(c) == 2:
return np.array([-c[0]/c[1]])
m = legcompanion(c)
r = la.eigvals(m)
r.sort()
return r
def leggauss(deg):
"""
Gauss-Legendre quadrature.
Computes the sample points and weights for Gauss-Legendre quadrature.
These sample points and weights will correctly integrate polynomials of
degree :math:`2*deg - 1` or less over the interval :math:`[-1, 1]` with
the weight function :math:`f(x) = 1`.
Parameters
----------
deg : int
Number of sample points and weights. It must be >= 1.
Returns
-------
x : ndarray
1-D ndarray containing the sample points.
y : ndarray
1-D ndarray containing the weights.
Notes
-----
.. versionadded::1.7.0
The results have only been tested up to degree 100, higher degrees may
be problematic. The weights are determined by using the fact that
.. math:: w_k = c / (L'_n(x_k) * L_{n-1}(x_k))
where :math:`c` is a constant independent of :math:`k` and :math:`x_k`
is the k'th root of :math:`L_n`, and then scaling the results to get
the right value when integrating 1.
"""
ideg = int(deg)
if ideg != deg or ideg < 1:
raise ValueError("deg must be a non-negative integer")
# first approximation of roots. We use the fact that the companion
# matrix is symmetric in this case in order to obtain better zeros.
c = np.array([0]*deg + [1])
m = legcompanion(c)
x = la.eigvalsh(m)
# improve roots by one application of Newton
dy = legval(x, c)
df = legval(x, legder(c))
x -= dy/df
# compute the weights. We scale the factor to avoid possible numerical
# overflow.
fm = legval(x, c[1:])
fm /= np.abs(fm).max()
df /= np.abs(df).max()
w = 1/(fm * df)
# for Legendre we can also symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w to get the right value
w *= 2. / w.sum()
return x, w
def legweight(x):
"""
Weight function of the Legendre polynomials.
The weight function is :math:`1` and the interval of integration is
:math:`[-1, 1]`. The Legendre polynomials are orthogonal, but not
normalized, with respect to this weight function.
Parameters
----------
x : array_like
Values at which the weight function will be computed.
Returns
-------
w : ndarray
The weight function at `x`.
Notes
-----
.. versionadded::1.7.0
"""
w = x*0.0 + 1.0
return w
#
# Legendre series class
#
class Legendre(ABCPolyBase):
"""A Legendre series class.
The Legendre class provides the standard Python numerical methods
'+', '-', '*', '//', '%', 'divmod', '**', and '()' as well as the
attributes and methods listed in the `ABCPolyBase` documentation.
Parameters
----------
coef : array_like
Legendre coefficients in order of increasing degree, i.e.,
``(1, 2, 3)`` gives ``1*P_0(x) + 2*P_1(x) + 3*P_2(x)``.
domain : (2,) array_like, optional
Domain to use. The interval ``[domain[0], domain[1]]`` is mapped
to the interval ``[window[0], window[1]]`` by shifting and scaling.
The default value is [-1, 1].
window : (2,) array_like, optional
Window, see `domain` for its use. The default value is [-1, 1].
.. versionadded:: 1.6.0
"""
# Virtual Functions
_add = staticmethod(legadd)
_sub = staticmethod(legsub)
_mul = staticmethod(legmul)
_div = staticmethod(legdiv)
_pow = staticmethod(legpow)
_val = staticmethod(legval)
_int = staticmethod(legint)
_der = staticmethod(legder)
_fit = staticmethod(legfit)
_line = staticmethod(legline)
_roots = staticmethod(legroots)
_fromroots = staticmethod(legfromroots)
# Virtual properties
nickname = 'leg'
domain = np.array(legdomain)
window = np.array(legdomain)
| bsd-3-clause |
itucsdb1621/itucsdb1621 | events.py | 1 | 2016 | import psycopg2
from flask import Blueprint, current_app, render_template, request, session, redirect, url_for
events_app = Blueprint('events_app', __name__)
@events_app.route('/show_events')
def show_events():
if session.get('logged_in')== None:
return redirect(url_for("loginpage"))
with psycopg2.connect(current_app.config['dsn']) as conn:
crs = conn.cursor()
crs.execute("select event_name, event_exp, event_time, event_id from events")
conn.commit()
data = crs.fetchall()
return render_template('allevents.html', data=data)
@events_app.route('/create_event', methods = ['POST'])
def create_event():
new_name = request.form["event-name"]
explan = request.form["event-exp"]
time_event = request.form["event-time"]
with psycopg2.connect(current_app.config['dsn']) as conn:
crs = conn.cursor()
crs.execute("insert into events (event_name, event_exp, event_time) values (%s, %s, %s)", (new_name, explan, time_event))
conn.commit()
return redirect(url_for('events_app.show_events'))
@events_app.route('/delete_event/<id>')
def delete_event(id):
with psycopg2.connect(current_app.config['dsn']) as conn:
crs = conn.cursor()
crs.execute("delete from events where event_id = %s", (id, ))
conn.commit()
return redirect(url_for('events_app.show_events'))
@events_app.route('/updateEvent')
def updateEvent():
return render_template('update_event.html')
@events_app.route('/update_event',methods=["POST"])
def update_event():
old_name = request.form["old-name"]
new_name = request.form["event-name"]
explan = request.form["event-exp"]
time_event = request.form["event-time"]
with psycopg2.connect(current_app.config['dsn']) as conn:
crs = conn.cursor()
crs.execute("update events set event_name=%s, event_exp=%s, event_time=%s where event_name = %s", (new_name, explan, time_event, old_name, ))
return redirect(url_for('events_app.show_events')) | gpl-3.0 |
unistra/django-sympa | docs/conf.py | 1 | 8528 | # -*- coding: utf-8 -*-
#
# sympa documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 25 18:11:49 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from datetime import date
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sympa'
copyright = u'%s, Direction Informatique' % date.today().strftime("%Y")
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'sympadoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'sympa.tex', u'sympa Documentation',
u'Direction Informatique', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'sympa', u'sympa Documentation',
[u'Direction Informatique'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'sympa', u'sympa Documentation',
u'Direction Informatique', 'sympa', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| gpl-2.0 |
TomAugspurger/pandas | pandas/core/ops/docstrings.py | 1 | 18025 | """
Templating for ops docstrings
"""
from typing import Dict, Optional
def _make_flex_doc(op_name, typ):
"""
Make the appropriate substitutions for the given operation and class-typ
into either _flex_doc_SERIES or _flex_doc_FRAME to return the docstring
to attach to a generated method.
Parameters
----------
op_name : str {'__add__', '__sub__', ... '__eq__', '__ne__', ...}
typ : str {series, 'dataframe']}
Returns
-------
doc : str
"""
op_name = op_name.replace("__", "")
op_desc = _op_descriptions[op_name]
if op_name.startswith("r"):
equiv = "other " + op_desc["op"] + " " + typ
else:
equiv = typ + " " + op_desc["op"] + " other"
if typ == "series":
base_doc = _flex_doc_SERIES
if op_desc["reverse"]:
base_doc += _see_also_reverse_SERIES.format(
reverse=op_desc["reverse"], see_also_desc=op_desc["see_also_desc"],
)
doc_no_examples = base_doc.format(
desc=op_desc["desc"],
op_name=op_name,
equiv=equiv,
series_returns=op_desc["series_returns"],
)
if op_desc["series_examples"]:
doc = doc_no_examples + op_desc["series_examples"]
else:
doc = doc_no_examples
elif typ == "dataframe":
base_doc = _flex_doc_FRAME
doc = base_doc.format(
desc=op_desc["desc"],
op_name=op_name,
equiv=equiv,
reverse=op_desc["reverse"],
)
else:
raise AssertionError("Invalid typ argument.")
return doc
_common_examples_algebra_SERIES = """
Examples
--------
>>> a = pd.Series([1, 1, 1, np.nan], index=['a', 'b', 'c', 'd'])
>>> a
a 1.0
b 1.0
c 1.0
d NaN
dtype: float64
>>> b = pd.Series([1, np.nan, 1, np.nan], index=['a', 'b', 'd', 'e'])
>>> b
a 1.0
b NaN
d 1.0
e NaN
dtype: float64"""
_common_examples_comparison_SERIES = """
Examples
--------
>>> a = pd.Series([1, 1, 1, np.nan, 1], index=['a', 'b', 'c', 'd', 'e'])
>>> a
a 1.0
b 1.0
c 1.0
d NaN
e 1.0
dtype: float64
>>> b = pd.Series([0, 1, 2, np.nan, 1], index=['a', 'b', 'c', 'd', 'f'])
>>> b
a 0.0
b 1.0
c 2.0
d NaN
f 1.0
dtype: float64"""
_add_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.add(b, fill_value=0)
a 2.0
b 1.0
c 1.0
d 1.0
e NaN
dtype: float64
"""
)
_sub_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.subtract(b, fill_value=0)
a 0.0
b 1.0
c 1.0
d -1.0
e NaN
dtype: float64
"""
)
_mul_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.multiply(b, fill_value=0)
a 1.0
b 0.0
c 0.0
d 0.0
e NaN
dtype: float64
"""
)
_div_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.divide(b, fill_value=0)
a 1.0
b inf
c inf
d 0.0
e NaN
dtype: float64
"""
)
_floordiv_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.floordiv(b, fill_value=0)
a 1.0
b NaN
c NaN
d 0.0
e NaN
dtype: float64
"""
)
_mod_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.mod(b, fill_value=0)
a 0.0
b NaN
c NaN
d 0.0
e NaN
dtype: float64
"""
)
_pow_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.pow(b, fill_value=0)
a 1.0
b 1.0
c 1.0
d 0.0
e NaN
dtype: float64
"""
)
_ne_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.ne(b, fill_value=0)
a False
b True
c True
d True
e True
dtype: bool
"""
)
_eq_example_SERIES = (
_common_examples_algebra_SERIES
+ """
>>> a.eq(b, fill_value=0)
a True
b False
c False
d False
e False
dtype: bool
"""
)
_lt_example_SERIES = (
_common_examples_comparison_SERIES
+ """
>>> a.lt(b, fill_value=0)
a False
b False
c True
d False
e False
f True
dtype: bool
"""
)
_le_example_SERIES = (
_common_examples_comparison_SERIES
+ """
>>> a.le(b, fill_value=0)
a False
b True
c True
d False
e False
f True
dtype: bool
"""
)
_gt_example_SERIES = (
_common_examples_comparison_SERIES
+ """
>>> a.gt(b, fill_value=0)
a True
b False
c False
d False
e True
f False
dtype: bool
"""
)
_ge_example_SERIES = (
_common_examples_comparison_SERIES
+ """
>>> a.ge(b, fill_value=0)
a True
b True
c False
d False
e True
f False
dtype: bool
"""
)
_returns_series = """Series\n The result of the operation."""
_returns_tuple = """2-Tuple of Series\n The result of the operation."""
_op_descriptions: Dict[str, Dict[str, Optional[str]]] = {
# Arithmetic Operators
"add": {
"op": "+",
"desc": "Addition",
"reverse": "radd",
"series_examples": _add_example_SERIES,
"series_returns": _returns_series,
},
"sub": {
"op": "-",
"desc": "Subtraction",
"reverse": "rsub",
"series_examples": _sub_example_SERIES,
"series_returns": _returns_series,
},
"mul": {
"op": "*",
"desc": "Multiplication",
"reverse": "rmul",
"series_examples": _mul_example_SERIES,
"series_returns": _returns_series,
"df_examples": None,
},
"mod": {
"op": "%",
"desc": "Modulo",
"reverse": "rmod",
"series_examples": _mod_example_SERIES,
"series_returns": _returns_series,
},
"pow": {
"op": "**",
"desc": "Exponential power",
"reverse": "rpow",
"series_examples": _pow_example_SERIES,
"series_returns": _returns_series,
"df_examples": None,
},
"truediv": {
"op": "/",
"desc": "Floating division",
"reverse": "rtruediv",
"series_examples": _div_example_SERIES,
"series_returns": _returns_series,
"df_examples": None,
},
"floordiv": {
"op": "//",
"desc": "Integer division",
"reverse": "rfloordiv",
"series_examples": _floordiv_example_SERIES,
"series_returns": _returns_series,
"df_examples": None,
},
"divmod": {
"op": "divmod",
"desc": "Integer division and modulo",
"reverse": "rdivmod",
"series_examples": None,
"series_returns": _returns_tuple,
"df_examples": None,
},
# Comparison Operators
"eq": {
"op": "==",
"desc": "Equal to",
"reverse": None,
"series_examples": _eq_example_SERIES,
"series_returns": _returns_series,
},
"ne": {
"op": "!=",
"desc": "Not equal to",
"reverse": None,
"series_examples": _ne_example_SERIES,
"series_returns": _returns_series,
},
"lt": {
"op": "<",
"desc": "Less than",
"reverse": None,
"series_examples": _lt_example_SERIES,
"series_returns": _returns_series,
},
"le": {
"op": "<=",
"desc": "Less than or equal to",
"reverse": None,
"series_examples": _le_example_SERIES,
"series_returns": _returns_series,
},
"gt": {
"op": ">",
"desc": "Greater than",
"reverse": None,
"series_examples": _gt_example_SERIES,
"series_returns": _returns_series,
},
"ge": {
"op": ">=",
"desc": "Greater than or equal to",
"reverse": None,
"series_examples": _ge_example_SERIES,
"series_returns": _returns_series,
},
}
_py_num_ref = """see
`Python documentation
<https://docs.python.org/3/reference/datamodel.html#emulating-numeric-types>`_
for more details"""
_op_names = list(_op_descriptions.keys())
for key in _op_names:
reverse_op = _op_descriptions[key]["reverse"]
if reverse_op is not None:
_op_descriptions[reverse_op] = _op_descriptions[key].copy()
_op_descriptions[reverse_op]["reverse"] = key
_op_descriptions[key][
"see_also_desc"
] = f"Reverse of the {_op_descriptions[key]['desc']} operator, {_py_num_ref}"
_op_descriptions[reverse_op][
"see_also_desc"
] = f"Element-wise {_op_descriptions[key]['desc']}, {_py_num_ref}"
_flex_doc_SERIES = """
Return {desc} of series and other, element-wise (binary operator `{op_name}`).
Equivalent to ``{equiv}``, but with support to substitute a fill_value for
missing data in either one of the inputs.
Parameters
----------
other : Series or scalar value
fill_value : None or float value, default None (NaN)
Fill existing missing (NaN) values, and any new element needed for
successful Series alignment, with this value before computation.
If data in both corresponding Series locations is missing
the result of filling (at that location) will be missing.
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level.
Returns
-------
{series_returns}
"""
_see_also_reverse_SERIES = """
See Also
--------
Series.{reverse} : {see_also_desc}.
"""
_arith_doc_FRAME = """
Binary operator %s with support to substitute a fill_value for missing data in
one of the inputs
Parameters
----------
other : Series, DataFrame, or constant
axis : {0, 1, 'index', 'columns'}
For Series input, axis to match Series index on
fill_value : None or float value, default None
Fill existing missing (NaN) values, and any new element needed for
successful DataFrame alignment, with this value before computation.
If data in both corresponding DataFrame locations is missing
the result will be missing
level : int or name
Broadcast across a level, matching Index values on the
passed MultiIndex level
Returns
-------
result : DataFrame
Notes
-----
Mismatched indices will be unioned together
"""
_flex_doc_FRAME = """
Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`).
Equivalent to ``{equiv}``, but with support to substitute a fill_value
for missing data in one of the inputs. With reverse version, `{reverse}`.
Among flexible wrappers (`add`, `sub`, `mul`, `div`, `mod`, `pow`) to
arithmetic operators: `+`, `-`, `*`, `/`, `//`, `%`, `**`.
Parameters
----------
other : scalar, sequence, Series, or DataFrame
Any single or multiple element data structure, or list-like object.
axis : {{0 or 'index', 1 or 'columns'}}
Whether to compare by the index (0 or 'index') or columns
(1 or 'columns'). For Series input, axis to match Series index on.
level : int or label
Broadcast across a level, matching Index values on the
passed MultiIndex level.
fill_value : float or None, default None
Fill existing missing (NaN) values, and any new element needed for
successful DataFrame alignment, with this value before computation.
If data in both corresponding DataFrame locations is missing
the result will be missing.
Returns
-------
DataFrame
Result of the arithmetic operation.
See Also
--------
DataFrame.add : Add DataFrames.
DataFrame.sub : Subtract DataFrames.
DataFrame.mul : Multiply DataFrames.
DataFrame.div : Divide DataFrames (float division).
DataFrame.truediv : Divide DataFrames (float division).
DataFrame.floordiv : Divide DataFrames (integer division).
DataFrame.mod : Calculate modulo (remainder after division).
DataFrame.pow : Calculate exponential power.
Notes
-----
Mismatched indices will be unioned together.
Examples
--------
>>> df = pd.DataFrame({{'angles': [0, 3, 4],
... 'degrees': [360, 180, 360]}},
... index=['circle', 'triangle', 'rectangle'])
>>> df
angles degrees
circle 0 360
triangle 3 180
rectangle 4 360
Add a scalar with operator version which return the same
results.
>>> df + 1
angles degrees
circle 1 361
triangle 4 181
rectangle 5 361
>>> df.add(1)
angles degrees
circle 1 361
triangle 4 181
rectangle 5 361
Divide by constant with reverse version.
>>> df.div(10)
angles degrees
circle 0.0 36.0
triangle 0.3 18.0
rectangle 0.4 36.0
>>> df.rdiv(10)
angles degrees
circle inf 0.027778
triangle 3.333333 0.055556
rectangle 2.500000 0.027778
Subtract a list and Series by axis with operator version.
>>> df - [1, 2]
angles degrees
circle -1 358
triangle 2 178
rectangle 3 358
>>> df.sub([1, 2], axis='columns')
angles degrees
circle -1 358
triangle 2 178
rectangle 3 358
>>> df.sub(pd.Series([1, 1, 1], index=['circle', 'triangle', 'rectangle']),
... axis='index')
angles degrees
circle -1 359
triangle 2 179
rectangle 3 359
Multiply a DataFrame of different shape with operator version.
>>> other = pd.DataFrame({{'angles': [0, 3, 4]}},
... index=['circle', 'triangle', 'rectangle'])
>>> other
angles
circle 0
triangle 3
rectangle 4
>>> df * other
angles degrees
circle 0 NaN
triangle 9 NaN
rectangle 16 NaN
>>> df.mul(other, fill_value=0)
angles degrees
circle 0 0.0
triangle 9 0.0
rectangle 16 0.0
Divide by a MultiIndex by level.
>>> df_multindex = pd.DataFrame({{'angles': [0, 3, 4, 4, 5, 6],
... 'degrees': [360, 180, 360, 360, 540, 720]}},
... index=[['A', 'A', 'A', 'B', 'B', 'B'],
... ['circle', 'triangle', 'rectangle',
... 'square', 'pentagon', 'hexagon']])
>>> df_multindex
angles degrees
A circle 0 360
triangle 3 180
rectangle 4 360
B square 4 360
pentagon 5 540
hexagon 6 720
>>> df.div(df_multindex, level=1, fill_value=0)
angles degrees
A circle NaN 1.0
triangle 1.0 1.0
rectangle 1.0 1.0
B square 0.0 0.0
pentagon 0.0 0.0
hexagon 0.0 0.0
"""
_flex_comp_doc_FRAME = """
Get {desc} of dataframe and other, element-wise (binary operator `{op_name}`).
Among flexible wrappers (`eq`, `ne`, `le`, `lt`, `ge`, `gt`) to comparison
operators.
Equivalent to `==`, `=!`, `<=`, `<`, `>=`, `>` with support to choose axis
(rows or columns) and level for comparison.
Parameters
----------
other : scalar, sequence, Series, or DataFrame
Any single or multiple element data structure, or list-like object.
axis : {{0 or 'index', 1 or 'columns'}}, default 'columns'
Whether to compare by the index (0 or 'index') or columns
(1 or 'columns').
level : int or label
Broadcast across a level, matching Index values on the passed
MultiIndex level.
Returns
-------
DataFrame of bool
Result of the comparison.
See Also
--------
DataFrame.eq : Compare DataFrames for equality elementwise.
DataFrame.ne : Compare DataFrames for inequality elementwise.
DataFrame.le : Compare DataFrames for less than inequality
or equality elementwise.
DataFrame.lt : Compare DataFrames for strictly less than
inequality elementwise.
DataFrame.ge : Compare DataFrames for greater than inequality
or equality elementwise.
DataFrame.gt : Compare DataFrames for strictly greater than
inequality elementwise.
Notes
-----
Mismatched indices will be unioned together.
`NaN` values are considered different (i.e. `NaN` != `NaN`).
Examples
--------
>>> df = pd.DataFrame({{'cost': [250, 150, 100],
... 'revenue': [100, 250, 300]}},
... index=['A', 'B', 'C'])
>>> df
cost revenue
A 250 100
B 150 250
C 100 300
Comparison with a scalar, using either the operator or method:
>>> df == 100
cost revenue
A False True
B False False
C True False
>>> df.eq(100)
cost revenue
A False True
B False False
C True False
When `other` is a :class:`Series`, the columns of a DataFrame are aligned
with the index of `other` and broadcast:
>>> df != pd.Series([100, 250], index=["cost", "revenue"])
cost revenue
A True True
B True False
C False True
Use the method to control the broadcast axis:
>>> df.ne(pd.Series([100, 300], index=["A", "D"]), axis='index')
cost revenue
A True False
B True True
C True True
D True True
When comparing to an arbitrary sequence, the number of columns must
match the number elements in `other`:
>>> df == [250, 100]
cost revenue
A True True
B False False
C False False
Use the method to control the axis:
>>> df.eq([250, 250, 100], axis='index')
cost revenue
A True False
B False True
C True False
Compare to a DataFrame of different shape.
>>> other = pd.DataFrame({{'revenue': [300, 250, 100, 150]}},
... index=['A', 'B', 'C', 'D'])
>>> other
revenue
A 300
B 250
C 100
D 150
>>> df.gt(other)
cost revenue
A False False
B False False
C False True
D False False
Compare to a MultiIndex by level.
>>> df_multindex = pd.DataFrame({{'cost': [250, 150, 100, 150, 300, 220],
... 'revenue': [100, 250, 300, 200, 175, 225]}},
... index=[['Q1', 'Q1', 'Q1', 'Q2', 'Q2', 'Q2'],
... ['A', 'B', 'C', 'A', 'B', 'C']])
>>> df_multindex
cost revenue
Q1 A 250 100
B 150 250
C 100 300
Q2 A 150 200
B 300 175
C 220 225
>>> df.le(df_multindex, level=1)
cost revenue
Q1 A True True
B True True
C True True
Q2 A False True
B True False
C True False
"""
| bsd-3-clause |
glenn-edgar/local_scda | flask_web/werkzeug-master/werkzeug/testsuite/urls.py | 74 | 6868 | # -*- coding: utf-8 -*-
"""
werkzeug.testsuite.urls
~~~~~~~~~~~~~~~~~~~~~~~
URL helper tests.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import unittest
from StringIO import StringIO
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.datastructures import OrderedMultiDict
from werkzeug import urls
class URLsTestCase(WerkzeugTestCase):
def test_quoting(self):
assert urls.url_quote(u'\xf6\xe4\xfc') == '%C3%B6%C3%A4%C3%BC'
assert urls.url_unquote(urls.url_quote(u'#%="\xf6')) == u'#%="\xf6'
assert urls.url_quote_plus('foo bar') == 'foo+bar'
assert urls.url_unquote_plus('foo+bar') == 'foo bar'
assert urls.url_encode({'a': None, 'b': 'foo bar'}) == 'b=foo+bar'
assert urls.url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)') == \
'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
def test_url_decoding(self):
x = urls.url_decode('foo=42&bar=23&uni=H%C3%A4nsel')
assert x['foo'] == '42'
assert x['bar'] == '23'
assert x['uni'] == u'Hänsel'
x = urls.url_decode('foo=42;bar=23;uni=H%C3%A4nsel', separator=';')
assert x['foo'] == '42'
assert x['bar'] == '23'
assert x['uni'] == u'Hänsel'
x = urls.url_decode('%C3%9Ch=H%C3%A4nsel', decode_keys=True)
assert x[u'Üh'] == u'Hänsel'
def test_streamed_url_decoding(self):
item1 = 'a' * 100000
item2 = 'b' * 400
string = 'a=%s&b=%s&c=%s' % (item1, item2, item2)
gen = urls.url_decode_stream(StringIO(string), limit=len(string),
return_iterator=True)
self.assert_equal(gen.next(), ('a', item1))
self.assert_equal(gen.next(), ('b', item2))
self.assert_equal(gen.next(), ('c', item2))
self.assert_raises(StopIteration, gen.next)
def test_url_encoding(self):
assert urls.url_encode({'foo': 'bar 45'}) == 'foo=bar+45'
d = {'foo': 1, 'bar': 23, 'blah': u'Hänsel'}
assert urls.url_encode(d, sort=True) == 'bar=23&blah=H%C3%A4nsel&foo=1'
assert urls.url_encode(d, sort=True, separator=';') == 'bar=23;blah=H%C3%A4nsel;foo=1'
def test_sorted_url_encode(self):
assert urls.url_encode({"a": 42, "b": 23, 1: 1, 2: 2}, sort=True) == '1=1&2=2&a=42&b=23'
assert urls.url_encode({'A': 1, 'a': 2, 'B': 3, 'b': 4}, sort=True,
key=lambda x: x[0].lower()) == 'A=1&a=2&B=3&b=4'
def test_streamed_url_encoding(self):
out = StringIO()
urls.url_encode_stream({'foo': 'bar 45'}, out)
self.assert_equal(out.getvalue(), 'foo=bar+45')
d = {'foo': 1, 'bar': 23, 'blah': u'Hänsel'}
out = StringIO()
urls.url_encode_stream(d, out, sort=True)
self.assert_equal(out.getvalue(), 'bar=23&blah=H%C3%A4nsel&foo=1')
out = StringIO()
urls.url_encode_stream(d, out, sort=True, separator=';')
self.assert_equal(out.getvalue(), 'bar=23;blah=H%C3%A4nsel;foo=1')
gen = urls.url_encode_stream(d, sort=True)
self.assert_equal(gen.next(), 'bar=23')
self.assert_equal(gen.next(), 'blah=H%C3%A4nsel')
self.assert_equal(gen.next(), 'foo=1')
self.assert_raises(StopIteration, gen.next)
def test_url_fixing(self):
x = urls.url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)')
assert x == 'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
x = urls.url_fix('http://example.com/?foo=%2f%2f')
assert x == 'http://example.com/?foo=%2f%2f'
def test_iri_support(self):
self.assert_raises(UnicodeError, urls.uri_to_iri, u'http://föö.com/')
self.assert_raises(UnicodeError, urls.iri_to_uri, 'http://föö.com/')
assert urls.uri_to_iri('http://xn--n3h.net/') == u'http://\u2603.net/'
assert urls.uri_to_iri('http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th') == \
u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th'
assert urls.iri_to_uri(u'http://☃.net/') == 'http://xn--n3h.net/'
assert urls.iri_to_uri(u'http://üser:pässword@☃.net/påth') == \
'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th'
assert urls.uri_to_iri('http://test.com/%3Fmeh?foo=%26%2F') == \
u'http://test.com/%3Fmeh?foo=%26%2F'
# this should work as well, might break on 2.4 because of a broken
# idna codec
assert urls.uri_to_iri('/foo') == u'/foo'
assert urls.iri_to_uri(u'/foo') == '/foo'
def test_ordered_multidict_encoding(self):
d = OrderedMultiDict()
d.add('foo', 1)
d.add('foo', 2)
d.add('foo', 3)
d.add('bar', 0)
d.add('foo', 4)
assert urls.url_encode(d) == 'foo=1&foo=2&foo=3&bar=0&foo=4'
def test_href(self):
x = urls.Href('http://www.example.com/')
assert x('foo') == 'http://www.example.com/foo'
assert x.foo('bar') == 'http://www.example.com/foo/bar'
assert x.foo('bar', x=42) == 'http://www.example.com/foo/bar?x=42'
assert x.foo('bar', class_=42) == 'http://www.example.com/foo/bar?class=42'
assert x.foo('bar', {'class': 42}) == 'http://www.example.com/foo/bar?class=42'
self.assert_raises(AttributeError, lambda: x.__blah__)
x = urls.Href('blah')
assert x.foo('bar') == 'blah/foo/bar'
self.assert_raises(TypeError, x.foo, {"foo": 23}, x=42)
x = urls.Href('')
assert x('foo') == 'foo'
def test_href_url_join(self):
x = urls.Href('test')
assert x('foo:bar') == 'test/foo:bar'
assert x('http://example.com/') == 'test/http://example.com/'
if 0:
# stdlib bug? :(
def test_href_past_root(self):
base_href = urls.Href('http://www.blagga.com/1/2/3')
assert base_href('../foo') == 'http://www.blagga.com/1/2/foo'
assert base_href('../../foo') == 'http://www.blagga.com/1/foo'
assert base_href('../../../foo') == 'http://www.blagga.com/foo'
assert base_href('../../../../foo') == 'http://www.blagga.com/foo'
assert base_href('../../../../../foo') == 'http://www.blagga.com/foo'
assert base_href('../../../../../../foo') == 'http://www.blagga.com/foo'
def test_url_unquote_plus_unicode(self):
# was broken in 0.6
assert urls.url_unquote_plus(u'\x6d') == u'\x6d'
assert type(urls.url_unquote_plus(u'\x6d')) is unicode
def test_quoting_of_local_urls(self):
rv = urls.iri_to_uri(u'/foo\x8f')
assert rv == '/foo%C2%8F'
assert type(rv) is str
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(URLsTestCase))
return suite
| mit |
jmarsik/mopidy | mopidy/internal/path.py | 9 | 7193 | from __future__ import absolute_import, unicode_literals
import logging
import os
import stat
import string
import threading
import urllib
import urlparse
from mopidy import compat, exceptions
from mopidy.compat import queue
from mopidy.internal import encoding, xdg
logger = logging.getLogger(__name__)
XDG_DIRS = xdg.get_dirs()
def get_or_create_dir(dir_path):
if not isinstance(dir_path, bytes):
raise ValueError('Path is not a bytestring.')
dir_path = expand_path(dir_path)
if os.path.isfile(dir_path):
raise OSError(
'A file with the same name as the desired dir, '
'"%s", already exists.' % dir_path)
elif not os.path.isdir(dir_path):
logger.info('Creating dir %s', dir_path)
os.makedirs(dir_path, 0o755)
return dir_path
def get_or_create_file(file_path, mkdir=True, content=None):
if not isinstance(file_path, bytes):
raise ValueError('Path is not a bytestring.')
file_path = expand_path(file_path)
if isinstance(content, compat.text_type):
content = content.encode('utf-8')
if mkdir:
get_or_create_dir(os.path.dirname(file_path))
if not os.path.isfile(file_path):
logger.info('Creating file %s', file_path)
with open(file_path, 'wb') as fh:
if content is not None:
fh.write(content)
return file_path
def path_to_uri(path):
"""
Convert OS specific path to file:// URI.
Accepts either unicode strings or bytestrings. The encoding of any
bytestring will be maintained so that :func:`uri_to_path` can return the
same bytestring.
Returns a file:// URI as an unicode string.
"""
if isinstance(path, compat.text_type):
path = path.encode('utf-8')
path = urllib.quote(path)
return urlparse.urlunsplit((b'file', b'', path, b'', b''))
def uri_to_path(uri):
"""
Convert an URI to a OS specific path.
Returns a bytestring, since the file path can contain chars with other
encoding than UTF-8.
If we had returned these paths as unicode strings, you wouldn't be able to
look up the matching dir or file on your file system because the exact path
would be lost by ignoring its encoding.
"""
if isinstance(uri, compat.text_type):
uri = uri.encode('utf-8')
return urllib.unquote(urlparse.urlsplit(uri).path)
def split_path(path):
parts = []
while True:
path, part = os.path.split(path)
if part:
parts.insert(0, part)
if not path or path == b'/':
break
return parts
def expand_path(path):
# TODO: document as we want people to use this.
if not isinstance(path, bytes):
raise ValueError('Path is not a bytestring.')
try:
path = string.Template(path).substitute(XDG_DIRS)
except KeyError:
return None
path = os.path.expanduser(path)
path = os.path.abspath(path)
return path
def _find_worker(relative, follow, done, work, results, errors):
"""Worker thread for collecting stat() results.
:param str relative: directory to make results relative to
:param bool follow: if symlinks should be followed
:param threading.Event done: event indicating that all work has been done
:param queue.Queue work: queue of paths to process
:param dict results: shared dictionary for storing all the stat() results
:param dict errors: shared dictionary for storing any per path errors
"""
while not done.is_set():
try:
entry, parents = work.get(block=False)
except queue.Empty:
continue
if relative:
path = os.path.relpath(entry, relative)
else:
path = entry
try:
if follow:
st = os.stat(entry)
else:
st = os.lstat(entry)
if (st.st_dev, st.st_ino) in parents:
errors[path] = exceptions.FindError('Sym/hardlink loop found.')
continue
parents = parents + [(st.st_dev, st.st_ino)]
if stat.S_ISDIR(st.st_mode):
for e in os.listdir(entry):
work.put((os.path.join(entry, e), parents))
elif stat.S_ISREG(st.st_mode):
results[path] = st
elif stat.S_ISLNK(st.st_mode):
errors[path] = exceptions.FindError('Not following symlinks.')
else:
errors[path] = exceptions.FindError('Not a file or directory.')
except OSError as e:
errors[path] = exceptions.FindError(
encoding.locale_decode(e.strerror), e.errno)
finally:
work.task_done()
def _find(root, thread_count=10, relative=False, follow=False):
"""Threaded find implementation that provides stat results for files.
Tries to protect against sym/hardlink loops by keeping an eye on parent
(st_dev, st_ino) pairs.
:param str root: root directory to search from, may not be a file
:param int thread_count: number of workers to use, mainly useful to
mitigate network lag when scanning on NFS etc.
:param bool relative: if results should be relative to root or absolute
:param bool follow: if symlinks should be followed
"""
threads = []
results = {}
errors = {}
done = threading.Event()
work = queue.Queue()
work.put((os.path.abspath(root), []))
if not relative:
root = None
args = (root, follow, done, work, results, errors)
for i in range(thread_count):
t = threading.Thread(target=_find_worker, args=args)
t.daemon = True
t.start()
threads.append(t)
work.join()
done.set()
for t in threads:
t.join()
return results, errors
def find_mtimes(root, follow=False):
results, errors = _find(root, relative=False, follow=follow)
# return the mtimes as integer milliseconds
mtimes = {f: int(st.st_mtime * 1000) for f, st in results.items()}
return mtimes, errors
def is_path_inside_base_dir(path, base_path):
if path.endswith(os.sep):
raise ValueError('Path %s cannot end with a path separator'
% path)
# Expand symlinks
real_base_path = os.path.realpath(base_path)
real_path = os.path.realpath(path)
if os.path.isfile(path):
# Use dir of file for prefix comparision, so we don't accept
# /tmp/foo.m3u as being inside /tmp/foo, simply because they have a
# common prefix, /tmp/foo, which matches the base path, /tmp/foo.
real_path = os.path.dirname(real_path)
# Check if dir of file is the base path or a subdir
common_prefix = os.path.commonprefix([real_base_path, real_path])
return common_prefix == real_base_path
# FIXME replace with mock usage in tests.
class Mtime(object):
def __init__(self):
self.fake = None
def __call__(self, path):
if self.fake is not None:
return self.fake
return int(os.stat(path).st_mtime)
def set_fake_time(self, time):
self.fake = time
def undo_fake(self):
self.fake = None
mtime = Mtime()
| apache-2.0 |
oudalab/fajita | pythonAPI/flask/lib/python3.5/site-packages/urllib3/fields.py | 288 | 5943 | from __future__ import absolute_import
import email.utils
import mimetypes
from .packages import six
def guess_content_type(filename, default='application/octet-stream'):
"""
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
if filename:
return mimetypes.guess_type(filename)[0] or default
return default
def format_header_param(name, value):
"""
Helper function to format and quote a single header parameter.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows RFC 2231, as
suggested by RFC 2388 Section 4.4.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
if not any(ch in value for ch in '"\\\r\n'):
result = '%s="%s"' % (name, value)
try:
result.encode('ascii')
except (UnicodeEncodeError, UnicodeDecodeError):
pass
else:
return result
if not six.PY3 and isinstance(value, six.text_type): # Python 2:
value = value.encode('utf-8')
value = email.utils.encode_rfc2231(value, 'utf-8')
value = '%s*=%s' % (name, value)
return value
class RequestField(object):
"""
A data container for request body parameters.
:param name:
The name of this request field.
:param data:
The data/value body.
:param filename:
An optional filename of the request field.
:param headers:
An optional dict-like object of headers to initially use for the field.
"""
def __init__(self, name, data, filename=None, headers=None):
self._name = name
self._filename = filename
self.data = data
self.headers = {}
if headers:
self.headers = dict(headers)
@classmethod
def from_tuples(cls, fieldname, value):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
Supports constructing :class:`~urllib3.fields.RequestField` from
parameter of key/value strings AND key/filetuple. A filetuple is a
(filename, data, MIME type) tuple where the MIME type is optional.
For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
Field names and filenames must be unicode.
"""
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
content_type = guess_content_type(filename)
else:
filename = None
content_type = None
data = value
request_param = cls(fieldname, data, filename=filename)
request_param.make_multipart(content_type=content_type)
return request_param
def _render_part(self, name, value):
"""
Overridable helper function to format a single header parameter.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
return format_header_param(name, value)
def _render_parts(self, header_parts):
"""
Helper function to format and quote a single header.
Useful for single headers that are composed of multiple items. E.g.,
'Content-Disposition' fields.
:param header_parts:
A sequence of (k, v) typles or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
"""
parts = []
iterable = header_parts
if isinstance(header_parts, dict):
iterable = header_parts.items()
for name, value in iterable:
if value is not None:
parts.append(self._render_part(name, value))
return '; '.join(parts)
def render_headers(self):
"""
Renders the headers for this request field.
"""
lines = []
sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location']
for sort_key in sort_keys:
if self.headers.get(sort_key, False):
lines.append('%s: %s' % (sort_key, self.headers[sort_key]))
for header_name, header_value in self.headers.items():
if header_name not in sort_keys:
if header_value:
lines.append('%s: %s' % (header_name, header_value))
lines.append('\r\n')
return '\r\n'.join(lines)
def make_multipart(self, content_disposition=None, content_type=None,
content_location=None):
"""
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
"""
self.headers['Content-Disposition'] = content_disposition or 'form-data'
self.headers['Content-Disposition'] += '; '.join([
'', self._render_parts(
(('name', self._name), ('filename', self._filename))
)
])
self.headers['Content-Type'] = content_type
self.headers['Content-Location'] = content_location
| mit |
jab1982/opennsa | opennsa/discovery/service.py | 1 | 2750 | """
discovery service module
mostly just a place to keep all the information regarding discovery, and export
it to xml
Author: Henrik Thostrup Jensen <htj _at_ nordu.net>
Copyright: NORDUnet (2014)
"""
from xml.etree import ElementTree as ET
from opennsa import constants as cnt
from opennsa.shared import xmlhelper, modifiableresource
from opennsa.discovery.bindings import discovery
ET.register_namespace('nsi', discovery.NSI_DISCOVERY_NS)
ET.register_namespace('gns', discovery.GNS_NS)
class DiscoveryService:
def __init__(self, nsa_id, version=None, name=None, software_version=None, start_time=None,
network_ids=None, interfaces=None, features=None, provider_registry=None,
link_vector=None):
self.nsa_id = nsa_id # string
self.version = version # datetime
self.name = name # string
self.software_version = software_version # string
self.start_time = start_time # datetime
self.network_ids = network_ids # [ string ]
self.interfaces = interfaces # [ (type, url, described_by) ]
self.features = features # [ (type, value) ]
self.provider_registry = provider_registry # provreg.ProviderRegistry
self.link_vector = link_vector # linkvector.LinkVector
def xml(self):
# location not really supported yet
interface_types = [ discovery.InterfaceType(i[0], i[1], i[2]) for i in self.interfaces ]
feature_types = [ discovery.FeatureType(f[0], f[1]) for f in self.features ]
peers_with = self.provider_registry.providers.keys()
peers_with.remove(self.nsa_id)
topology_vectors = [ (cnt.URN_OGF_PREFIX + tv, cost) for tv, cost in self.link_vector.listVectors().items() ]
other = discovery.HolderType( [ discovery.Topology(t,c) for (t,c) in topology_vectors ] )
nsa_element = discovery.NsaType(
self.nsa_id,
xmlhelper.createXMLTime(self.version),
None,
self.name,
self.software_version,
xmlhelper.createXMLTime(self.start_time),
self.network_ids,
interface_types,
feature_types,
peers_with,
other,
)
e = nsa_element.xml(discovery.nsa)
payload = ET.tostring(e, 'utf-8')
return payload
def resource(self):
r = modifiableresource.ModifiableResource('DiscoveryService', 'application/xml')
r.updateResource(self.xml())
return r
| bsd-3-clause |
NaturalGIS/naturalgis_qgis | tests/src/python/test_qgsdistancearea.py | 10 | 79402 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsDistanceArea.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Jürgen E. Fischer'
__date__ = '19/01/2014'
__copyright__ = 'Copyright 2014, The QGIS Project'
import qgis # NOQA
import math
from qgis.core import (QgsGeometry,
QgsPointXY,
QgsDistanceArea,
QgsCoordinateReferenceSystem,
QgsUnitTypes,
QgsProject)
from qgis.testing import start_app, unittest
from qgis.PyQt.QtCore import QLocale
from pprint import pprint
# Convenience instances in case you may need them
# not used in this test
start_app()
class TestQgsDistanceArea(unittest.TestCase):
def testCrs(self):
# test setting/getting the source CRS
da = QgsDistanceArea()
# try setting using a CRS object
crs = QgsCoordinateReferenceSystem(3111, QgsCoordinateReferenceSystem.EpsgCrsId)
da.setSourceCrs(crs, QgsProject.instance().transformContext())
self.assertEqual(da.sourceCrs().srsid(), crs.srsid())
self.assertFalse(da.ellipsoidCrs().isValid())
da.setEllipsoid("GRS80")
# depends on proj version
self.assertIn(da.ellipsoidCrs().toProj(), (
'+proj=longlat +ellps=GRS80 +no_defs', '+proj=longlat +a=6378137 +rf=298.25722210100002 +no_defs'))
da.setEllipsoid("WGS84")
self.assertIn(da.ellipsoidCrs().toProj(), (
'+proj=longlat +ellps=WGS84 +no_defs', '+proj=longlat +a=6378137 +rf=298.25722356300003 +no_defs'))
def testMeasureLine(self):
# +-+
# | |
# +-+ +
linestring = QgsGeometry.fromPolylineXY(
[QgsPointXY(0, 0), QgsPointXY(1, 0), QgsPointXY(1, 1), QgsPointXY(2, 1), QgsPointXY(2, 0), ]
)
da = QgsDistanceArea()
length = da.measureLength(linestring)
myMessage = ('Expected:\n%f\nGot:\n%f\n' %
(4, length))
assert length == 4, myMessage
def testMeasureLineProjected(self):
# +-+
# | |
# +-+ +
# test setting/getting the source CRS
da_3068 = QgsDistanceArea()
da_wsg84 = QgsDistanceArea()
da_3068.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:3068'),
QgsProject.instance().transformContext())
if (da_3068.sourceCrs().isGeographic()):
da_3068.setEllipsoid(da_3068.sourceCrs().ellipsoidAcronym())
print(("setting [{}] srid [{}] description [{}]".format(u'Soldner Berlin', da_3068.sourceCrs().authid(),
da_3068.sourceCrs().description())))
self.assertEqual(da_3068.sourceCrs().authid(), 'EPSG:3068')
da_wsg84.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:4326'),
QgsProject.instance().transformContext())
if (da_wsg84.sourceCrs().isGeographic()):
da_wsg84.setEllipsoid(da_wsg84.sourceCrs().ellipsoidAcronym())
self.assertEqual(da_wsg84.sourceCrs().authid(), 'EPSG:4326')
print(("setting [{}] srid [{}] description [{}] isGeographic[{}]".format(u'Wsg84',
da_wsg84.sourceCrs().authid(),
da_wsg84.sourceCrs().description(),
da_wsg84.sourceCrs().isGeographic())))
# print(("-- projectionAcronym[{}] ellipsoidAcronym[{}] toWkt[{}] mapUnits[{}] toProj4[{}]".format(da_wsg84.sourceCrs().projectionAcronym(),da_wsg84.sourceCrs().ellipsoidAcronym(), da_wsg84.sourceCrs().toWkt(),da_wsg84.sourceCrs().mapUnits(),da_wsg84.sourceCrs().toProj())))
print(("Testing Position change for[{}] years[{}]".format(u'Ampelanlage - Potsdamer Platz, Verkehrsinsel',
u'1924 and 1998')))
# 1924-10-24 SRID=3068;POINT(23099.49 20296.69)
# 1924-10-24 SRID=4326;POINT(13.37650707988041 52.50952361017194)
# 1998-10-02 SRID=3068;POINT(23082.30 20267.80)
# 1998-10-02 SRID=4326;POINT(13.37625537334001 52.50926345498337)
# values returned by SpatiaLite
point_soldner_1924 = QgsPointXY(23099.49, 20296.69)
point_soldner_1998 = QgsPointXY(23082.30, 20267.80)
distance_soldner_meters = 33.617379
azimuth_soldner_1924 = 3.678339
# ST_Transform(point_soldner_1924,point_soldner_1998,4326)
point_wsg84_1924 = QgsPointXY(13.37650707988041, 52.50952361017194)
point_wsg84_1998 = QgsPointXY(13.37625537334001, 52.50926345498337)
# ST_Distance(point_wsg84_1924,point_wsg84_1998,1)
distance_wsg84_meters = 33.617302
# ST_Distance(point_wsg84_1924,point_wsg84_1998)
# distance_wsg84_mapunits=0.000362
distance_wsg84_mapunits_format = QgsDistanceArea.formatDistance(0.000362, 7, QgsUnitTypes.DistanceDegrees, True)
# ST_Azimuth(point_wsg84_1924,point_wsg84_1998)
azimuth_wsg84_1924 = 3.674878
# ST_Azimuth(point_wsg84_1998,point_wsg84_1998)
azimuth_wsg84_1998 = 0.533282
# ST_Project(point_wsg84_1924,33.617302,3.674878)
# SRID=4326;POINT(13.37625537318728 52.50926345503591)
point_soldner_1998_project = QgsPointXY(13.37625537318728, 52.50926345503591)
# ST_Project(point_wsg84_1998,33.617302,0.533282)
# SRID=4326;POINT(13.37650708009255 52.50952361009799)
point_soldner_1924_project = QgsPointXY(13.37650708009255, 52.50952361009799)
distance_qpoint = point_soldner_1924.distance(point_soldner_1998)
azimuth_qpoint = point_soldner_1924.azimuth(point_soldner_1998)
point_soldner_1998_result = point_soldner_1924.project(distance_qpoint, azimuth_qpoint)
point_soldner_1924_result = QgsPointXY(0, 0)
point_soldner_1998_result = QgsPointXY(0, 0)
# Test meter based projected point from point_1924 to point_1998
length_1998_mapunits, point_soldner_1998_result = da_3068.measureLineProjected(point_soldner_1924,
distance_soldner_meters,
azimuth_qpoint)
self.assertEqual(point_soldner_1998_result.toString(6), point_soldner_1998.toString(6))
# Test degree based projected point from point_1924 1 meter due East
point_wsg84_meter_result = QgsPointXY(0, 0)
point_wsg84_1927_meter = QgsPointXY(13.37652180838435, 52.50952361017102)
length_meter_mapunits, point_wsg84_meter_result = da_wsg84.measureLineProjected(point_wsg84_1924, 1.0,
(math.pi / 2))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 7, QgsUnitTypes.DistanceDegrees, True),
'0.0000147 deg')
self.assertEqual(point_wsg84_meter_result.toString(7), point_wsg84_1927_meter.toString(7))
point_wsg84_1998_result = QgsPointXY(0, 0)
length_1928_mapunits, point_wsg84_1998_result = da_wsg84.measureLineProjected(point_wsg84_1924,
distance_wsg84_meters,
azimuth_wsg84_1924)
self.assertEqual(QgsDistanceArea.formatDistance(length_1928_mapunits, 7, QgsUnitTypes.DistanceDegrees, True),
distance_wsg84_mapunits_format)
self.assertEqual(point_wsg84_1998_result.toString(7), point_wsg84_1998.toString(7))
def testMeasureLineProjectedWorldPoints(self):
# +-+
# | |
# +-+ +
# checking returned length_mapunits/projected_points of different world points with results from SpatiaLite ST_Project
da_3068 = QgsDistanceArea()
da_3068.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:3068'),
QgsProject.instance().transformContext())
if (da_3068.sourceCrs().isGeographic()):
da_3068.setEllipsoid(da_3068.sourceCrs().ellipsoidAcronym())
self.assertEqual(da_3068.sourceCrs().authid(), 'EPSG:3068')
print((
"setting [{}] srid [{}] description [{}] isGeographic[{}] lengthUnits[{}] projectionAcronym[{}] ellipsoidAcronym[{}]".format(
u'EPSG:3068', da_3068.sourceCrs().authid(), da_3068.sourceCrs().description(),
da_3068.sourceCrs().isGeographic(), QgsUnitTypes.toString(da_3068.lengthUnits()),
da_3068.sourceCrs().projectionAcronym(), da_3068.sourceCrs().ellipsoidAcronym())))
da_wsg84 = QgsDistanceArea()
da_wsg84.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:4326'),
QgsProject.instance().transformContext())
if (da_wsg84.sourceCrs().isGeographic()):
da_wsg84.setEllipsoid(da_wsg84.sourceCrs().ellipsoidAcronym())
self.assertEqual(da_wsg84.sourceCrs().authid(), 'EPSG:4326')
print((
"setting [{}] srid [{}] description [{}] isGeographic[{}] lengthUnits[{}] projectionAcronym[{}] ellipsoidAcronym[{}] ellipsoid[{}]".format(
u'EPSG:4326', da_wsg84.sourceCrs().authid(), da_wsg84.sourceCrs().description(),
da_wsg84.sourceCrs().isGeographic(), QgsUnitTypes.toString(da_wsg84.lengthUnits()),
da_wsg84.sourceCrs().projectionAcronym(), da_wsg84.sourceCrs().ellipsoidAcronym(),
da_wsg84.ellipsoid())))
da_4314 = QgsDistanceArea()
da_4314.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:4314'),
QgsProject.instance().transformContext())
if (da_4314.sourceCrs().isGeographic()):
da_4314.setEllipsoid(da_4314.sourceCrs().ellipsoidAcronym())
self.assertEqual(da_4314.sourceCrs().authid(), 'EPSG:4314')
print((
"setting [{}] srid [{}] description [{}] isGeographic[{}] lengthUnits[{}] projectionAcronym[{}] ellipsoidAcronym[{}]".format(
u'EPSG:4314', da_4314.sourceCrs().authid(), da_4314.sourceCrs().description(),
da_4314.sourceCrs().isGeographic(), QgsUnitTypes.toString(da_4314.lengthUnits()),
da_4314.sourceCrs().projectionAcronym(), da_4314.sourceCrs().ellipsoidAcronym())))
da_4805 = QgsDistanceArea()
da_4805.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:4805'),
QgsProject.instance().transformContext())
if (da_4805.sourceCrs().isGeographic()):
da_4805.setEllipsoid(da_4805.sourceCrs().ellipsoidAcronym())
self.assertEqual(da_4805.sourceCrs().authid(), 'EPSG:4805')
print((
"setting [{}] srid [{}] description [{}] isGeographic[{}] lengthUnits[{}] projectionAcronym[{}] ellipsoidAcronym[{}]".format(
u'EPSG:4805', da_4805.sourceCrs().authid(), da_4805.sourceCrs().description(),
da_4805.sourceCrs().isGeographic(), QgsUnitTypes.toString(da_4805.lengthUnits()),
da_4805.sourceCrs().projectionAcronym(), da_4805.sourceCrs().ellipsoidAcronym())))
# EPSG:5665 unknown, why?
da_5665 = QgsDistanceArea()
da_5665.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:5665'),
QgsProject.instance().transformContext())
if (da_5665.sourceCrs().isGeographic()):
da_5665.setEllipsoid(da_5665.sourceCrs().ellipsoidAcronym())
print((
"setting [{}] srid [{}] description [{}] isGeographic[{}] lengthUnits[{}] projectionAcronym[{}] ellipsoidAcronym[{}]".format(
u'EPSG:5665', da_5665.sourceCrs().authid(), da_5665.sourceCrs().description(),
da_5665.sourceCrs().isGeographic(), QgsUnitTypes.toString(da_5665.lengthUnits()),
da_5665.sourceCrs().projectionAcronym(), da_5665.sourceCrs().ellipsoidAcronym())))
# self.assertEqual(da_5665.sourceCrs().authid(), 'EPSG:5665')
da_25833 = QgsDistanceArea()
da_25833.setSourceCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:25833'),
QgsProject.instance().transformContext())
if (da_25833.sourceCrs().isGeographic()):
da_25833.setEllipsoid(da_25833.sourceCrs().ellipsoidAcronym())
print((
"setting [{}] srid [{}] description [{}] isGeographic[{}] lengthUnits[{}] projectionAcronym[{}] ellipsoidAcronym[{}]".format(
u'EPSG:25833', da_25833.sourceCrs().authid(), da_25833.sourceCrs().description(),
da_25833.sourceCrs().isGeographic(), QgsUnitTypes.toString(da_25833.lengthUnits()),
da_25833.sourceCrs().projectionAcronym(), da_25833.sourceCrs().ellipsoidAcronym())))
self.assertEqual(da_25833.sourceCrs().authid(), 'EPSG:25833')
# Berlin - Brandenburg Gate - Quadriga
point_berlin_3068 = QgsPointXY(23183.38449999984, 21047.3225000017)
point_berlin_3068_project = point_berlin_3068.project(1, (math.pi / 2))
point_meter_result = QgsPointXY(0, 0)
length_meter_mapunits, point_meter_result = da_3068.measureLineProjected(point_berlin_3068, 1.0, (math.pi / 2))
pprint(point_meter_result)
print('-I-> Berlin 3068 length_meter_mapunits[{}] point_meter_result[{}]'.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_3068.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 1, da_3068.lengthUnits(), True), '1.0 m')
self.assertEqual(point_meter_result.toString(7), point_berlin_3068_project.toString(7))
point_berlin_wsg84 = QgsPointXY(13.37770458660236, 52.51627178856762)
point_berlin_wsg84_project = QgsPointXY(13.37771931736259, 52.51627178856669)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_berlin_wsg84, 1.0,
(math.pi / 2))
print('-I-> Berlin Wsg84 length_meter_mapunits[{}] point_meter_result[{}] ellipsoid[{}]'.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 20, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt(), da_wsg84.ellipsoid()))
# for unknown reasons, this is returning '0.00001473026 m' instead of '0.00001473026 deg' when using da_wsg84.lengthUnits()
# self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits,11,da_wsg84.lengthUnits(),True), '0.00001473026 deg')
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 11, QgsUnitTypes.DistanceDegrees, True),
'0.00001473076 deg')
self.assertEqual(point_meter_result.toString(7), point_berlin_wsg84_project.toString(7))
point_berlin_4314 = QgsPointXY(13.37944343021465, 52.51767872437083)
point_berlin_4314_project = QgsPointXY(13.37945816324759, 52.5176787243699)
length_meter_mapunits, point_meter_result = da_4314.measureLineProjected(point_berlin_4314, 1.0, (math.pi / 2))
print('-I-> Berlin 4314 length_meter_mapunits[{}] point_meter_result[{}]'.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_4314.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 9, QgsUnitTypes.DistanceDegrees, True),
'0.000014733 deg')
self.assertEqual(point_meter_result.toString(7), point_berlin_4314_project.toString(7))
point_berlin_4805 = QgsPointXY(31.04960570069176, 52.5174657497405)
point_berlin_4805_project = QgsPointXY(31.04962043365347, 52.51746574973957)
length_meter_mapunits, point_meter_result = da_4805.measureLineProjected(point_berlin_4805, 1.0, (math.pi / 2))
print('-I-> Berlin 4805 length_meter_mapunits[{}] point_meter_result[{}]'.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_4805.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 9, QgsUnitTypes.DistanceDegrees, True),
'0.000014733 deg')
self.assertEqual(point_meter_result.toString(7), point_berlin_4805_project.toString(7))
point_berlin_25833 = QgsPointXY(389918.0748318382, 5819698.772194743)
point_berlin_25833_project = point_berlin_25833.project(1, (math.pi / 2))
length_meter_mapunits, point_meter_result = da_25833.measureLineProjected(point_berlin_25833, 1.0,
(math.pi / 2))
print('-I-> Berlin 25833 length_meter_mapunits[{}] point_meter_result[{}]'.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_25833.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_25833.lengthUnits(), True),
'1.0000000 m')
self.assertEqual(point_meter_result.toString(7), point_berlin_25833_project.toString(7))
if da_5665.sourceCrs().authid() != "":
point_berlin_5665 = QgsPointXY(3389996.871728864, 5822169.719727578)
point_berlin_5665_project = point_berlin_5665.project(1, (math.pi / 2))
length_meter_mapunits, point_meter_result = da_5665.measureLineProjected(point_berlin_5665, 1.0,
(math.pi / 2))
print('-I-> Berlin 5665 length_meter_mapunits[{}] point_meter_result[{}]'.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_5665.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 1.0, da_5665.lengthUnits(), True),
'1.0 m')
self.assertEqual(point_meter_result.toString(7), point_berlin_5665_project.toString(7))
print('\n12 points ''above over'' and on the Equator')
point_wsg84 = QgsPointXY(25.7844, 71.1725)
point_wsg84_project = QgsPointXY(25.78442775215388, 71.17249999999795)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Nordkap, Norway - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 7, QgsUnitTypes.DistanceDegrees, True),
'0.0000278 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(24.95995, 60.16841)
point_wsg84_project = QgsPointXY(24.95996801277454, 60.16840999999877)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Helsinki, Finnland - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001801 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(12.599278, 55.692861)
point_wsg84_project = QgsPointXY(12.59929390161872, 55.69286099999897)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Copenhagen, Denmark - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001590 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-0.001389, 51.477778)
point_wsg84_project = QgsPointXY(-0.001374606184398, 51.4777779999991)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print(
'-I-> Royal Greenwich Observatory, United Kingdom - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001439 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(7.58769, 47.55814)
point_wsg84_project = QgsPointXY(7.587703287209086, 47.55813999999922)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Basel, Switzerland - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001329 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(11.255278, 43.775278)
point_wsg84_project = QgsPointXY(11.25529042107924, 43.77527799999933)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Florenz, Italy - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001242 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(14.514722, 35.899722)
point_wsg84_project = QgsPointXY(14.51473307693308, 35.89972199999949)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Valletta, Malta - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001108 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-79.933333, 32.783333)
point_wsg84_project = QgsPointXY(-79.93332232547254, 32.78333299999955)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Charlston, South Carolina - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001067 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-17.6666666, 27.733333)
point_wsg84_project = QgsPointXY(-17.66665645831515, 27.73333299999962)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Ferro, Spain - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001014 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-99.133333, 19.433333)
point_wsg84_project = QgsPointXY(-99.1333234776827, 19.43333299999975)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Mexico City, Mexico - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000952 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-79.894444, 9.341667)
point_wsg84_project = QgsPointXY(-79.89443489691369, 9.341666999999882)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Colón, Panama - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000910 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-74.075833, 4.598056)
point_wsg84_project = QgsPointXY(-74.07582398803629, 4.598055999999943)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Bogotá, Colombia - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000901 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(0, 0)
point_wsg84_project = QgsPointXY(0.000008983152841, 0)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Equator, Atlantic Ocean - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000898 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
print('\n12 points ''down under'' and 1 point that should be considered invalid')
point_wsg84 = QgsPointXY(-78.509722, -0.218611)
point_wsg84_project = QgsPointXY(-78.50971301678221, -0.218610999999997)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Quito, Ecuador - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000898 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(106.816667, -6.2)
point_wsg84_project = QgsPointXY(106.8166760356519, -6.199999999999922)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Jakarta, Indonesia - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000904 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-77.018611, -12.035)
point_wsg84_project = QgsPointXY(-77.01860181630058, -12.03499999999985)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Lima, Peru - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000918 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(25.466667, -10.716667)
point_wsg84_project = QgsPointXY(25.46667614155322, -10.71666699999986)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Kolwezi, Congo - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000914 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-70.333333, -18.483333)
point_wsg84_project = QgsPointXY(-70.3333235314429, -18.48333299999976)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Arica, Chile - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00000947 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-70.666667, -33.45)
point_wsg84_project = QgsPointXY(-70.66665624452817, -33.44999999999953)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Santiago, Chile - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001076 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(144.9604, -37.8191)
point_wsg84_project = QgsPointXY(144.96041135746983741, -37.81909999999945171)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Melbourne, Australia - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 8, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001136 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(147.29, -42.88)
point_wsg84_project = QgsPointXY(147.2900122399815, -42.87999999999934)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Hobart City,Tasmania, Australia - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001224 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(168.101667, -46.899722)
point_wsg84_project = QgsPointXY(168.101680123673, -46.89972199999923)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print(
'-I-> Ryan''s Creek Aerodrome, New Zealand - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001312 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-69.216667, -51.633333)
point_wsg84_project = QgsPointXY(-69.21665255700216, -51.6333329999991)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Río Gallegos, Argentina - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001444 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-68.3, -54.8)
point_wsg84_project = QgsPointXY(-68.29998445081456, -54.79999999999899)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print(
'-I-> Ushuaia, Tierra del Fuego, Argentina - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00001555 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-63.494444, -64.825278)
point_wsg84_project = QgsPointXY(-63.49442294002932, -64.82527799999851)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Port Lockroy, Antarctica - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00002106 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-180, -84.863272250)
point_wsg84_project = QgsPointXY(-179.9999000000025, -84.8632722499922)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print('-I-> Someware, Antarctica - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00010000 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
point_wsg84 = QgsPointXY(-180, -85.0511300)
point_wsg84_project = QgsPointXY(-179.9998962142197, -85.05112999999191)
length_meter_mapunits, point_meter_result = da_wsg84.measureLineProjected(point_wsg84, 1.0, (math.pi / 2))
print(
'-W-> Mercator''s Last Stop, Antarctica - Wsg84 - length_meter_mapunits[{}] point_meter_result[{}] '.format(
QgsDistanceArea.formatDistance(length_meter_mapunits, 7, da_wsg84.lengthUnits(), True),
point_meter_result.asWkt()))
self.assertEqual(QgsDistanceArea.formatDistance(length_meter_mapunits, 8, QgsUnitTypes.DistanceDegrees, True),
'0.00010379 deg')
self.assertEqual(point_meter_result.toString(7), point_wsg84_project.toString(7))
def testMeasureMultiLine(self):
# +-+ +-+-+
# | | | |
# +-+ + + +-+
linestring = QgsGeometry.fromMultiPolylineXY(
[
[QgsPointXY(0, 0), QgsPointXY(1, 0), QgsPointXY(1, 1), QgsPointXY(2, 1), QgsPointXY(2, 0), ],
[QgsPointXY(3, 0), QgsPointXY(3, 1), QgsPointXY(5, 1), QgsPointXY(5, 0), QgsPointXY(6, 0), ]
]
)
da = QgsDistanceArea()
length = da.measureLength(linestring)
myMessage = ('Expected:\n%f\nGot:\n%f\n' %
(9, length))
assert length == 9, myMessage
def testMeasurePolygon(self):
# +-+-+
# | |
# + +-+
# | |
# +-+
polygon = QgsGeometry.fromPolygonXY(
[[
QgsPointXY(0, 0), QgsPointXY(1, 0), QgsPointXY(1, 1), QgsPointXY(2, 1), QgsPointXY(2, 2),
QgsPointXY(0, 2), QgsPointXY(0, 0),
]]
)
da = QgsDistanceArea()
area = da.measureArea(polygon)
assert area == 3, 'Expected:\n%f\nGot:\n%f\n' % (3, area)
perimeter = da.measurePerimeter(polygon)
assert perimeter == 8, 'Expected:\n%f\nGot:\n%f\n' % (8, perimeter)
def testMeasurePolygonWithHole(self):
# +-+-+-+
# | |
# + +-+ +
# | | | |
# + +-+ +
# | |
# +-+-+-+
polygon = QgsGeometry.fromPolygonXY(
[
[QgsPointXY(0, 0), QgsPointXY(3, 0), QgsPointXY(3, 3), QgsPointXY(0, 3), QgsPointXY(0, 0)],
[QgsPointXY(1, 1), QgsPointXY(2, 1), QgsPointXY(2, 2), QgsPointXY(1, 2), QgsPointXY(1, 1)],
]
)
da = QgsDistanceArea()
area = da.measureArea(polygon)
assert area == 8, "Expected:\n%f\nGot:\n%f\n" % (8, area)
# MH150729: Changed behavior to consider inner rings for perimeter calculation. Therefore, expected result is 16.
perimeter = da.measurePerimeter(polygon)
assert perimeter == 16, "Expected:\n%f\nGot:\n%f\n" % (16, perimeter)
def testMeasureMultiPolygon(self):
# +-+-+ +-+-+
# | | | |
# + +-+ +-+ +
# | | | |
# +-+ +-+
polygon = QgsGeometry.fromMultiPolygonXY(
[
[[QgsPointXY(0, 0), QgsPointXY(1, 0), QgsPointXY(1, 1), QgsPointXY(2, 1), QgsPointXY(2, 2),
QgsPointXY(0, 2), QgsPointXY(0, 0), ]],
[[QgsPointXY(4, 0), QgsPointXY(5, 0), QgsPointXY(5, 2), QgsPointXY(3, 2), QgsPointXY(3, 1),
QgsPointXY(4, 1), QgsPointXY(4, 0), ]]
]
)
da = QgsDistanceArea()
area = da.measureArea(polygon)
assert area == 6, 'Expected:\n%f\nGot:\n%f\n' % (6, area)
perimeter = da.measurePerimeter(polygon)
assert perimeter == 16, "Expected:\n%f\nGot:\n%f\n" % (16, perimeter)
def testWillUseEllipsoid(self):
"""test QgsDistanceArea::willUseEllipsoid """
da = QgsDistanceArea()
da.setEllipsoid("NONE")
self.assertFalse(da.willUseEllipsoid())
da.setEllipsoid("WGS84")
self.assertTrue(da.willUseEllipsoid())
def testLengthMeasureAndUnits(self):
"""Test a variety of length measurements in different CRS and ellipsoid modes, to check that the
calculated lengths and units are always consistent
"""
da = QgsDistanceArea()
da.setSourceCrs(QgsCoordinateReferenceSystem.fromSrsId(3452), QgsProject.instance().transformContext())
da.setEllipsoid("NONE")
# We check both the measured length AND the units, in case the logic regarding
# ellipsoids and units changes in future
distance = da.measureLine(QgsPointXY(1, 1), QgsPointXY(2, 3))
units = da.lengthUnits()
print(("measured {} in {}".format(distance, QgsUnitTypes.toString(units))))
assert ((abs(distance - 2.23606797) < 0.00000001 and units == QgsUnitTypes.DistanceDegrees)
or (abs(distance - 248.52) < 0.01 and units == QgsUnitTypes.DistanceMeters))
da.setEllipsoid("WGS84")
distance = da.measureLine(QgsPointXY(1, 1), QgsPointXY(2, 3))
units = da.lengthUnits()
print(("measured {} in {}".format(distance, QgsUnitTypes.toString(units))))
# should always be in Meters
self.assertAlmostEqual(distance, 247555.57, delta=0.01)
self.assertEqual(units, QgsUnitTypes.DistanceMeters)
# test converting the resultant length
distance = da.convertLengthMeasurement(distance, QgsUnitTypes.DistanceNauticalMiles)
self.assertAlmostEqual(distance, 133.669, delta=0.01)
# now try with a source CRS which is in feet
da.setSourceCrs(QgsCoordinateReferenceSystem.fromSrsId(27469), QgsProject.instance().transformContext())
da.setEllipsoid("NONE")
# measurement should be in feet
distance = da.measureLine(QgsPointXY(1, 1), QgsPointXY(2, 3))
units = da.lengthUnits()
print(("measured {} in {}".format(distance, QgsUnitTypes.toString(units))))
self.assertAlmostEqual(distance, 2.23606797, delta=0.000001)
self.assertEqual(units, QgsUnitTypes.DistanceFeet)
# test converting the resultant length
distance = da.convertLengthMeasurement(distance, QgsUnitTypes.DistanceMeters)
self.assertAlmostEqual(distance, 0.6815, delta=0.001)
da.setEllipsoid("WGS84")
# now should be in Meters again
distance = da.measureLine(QgsPointXY(1, 1), QgsPointXY(2, 3))
units = da.lengthUnits()
print(("measured {} in {}".format(distance, QgsUnitTypes.toString(units))))
self.assertAlmostEqual(distance, 0.67953772, delta=0.000001)
self.assertEqual(units, QgsUnitTypes.DistanceMeters)
# test converting the resultant length
distance = da.convertLengthMeasurement(distance, QgsUnitTypes.DistanceFeet)
self.assertAlmostEqual(distance, 2.2294, delta=0.001)
def testAreaMeasureAndUnits(self):
"""Test a variety of area measurements in different CRS and ellipsoid modes, to check that the
calculated areas and units are always consistent
"""
da = QgsDistanceArea()
da.setSourceCrs(QgsCoordinateReferenceSystem.fromSrsId(3452), QgsProject.instance().transformContext())
da.setEllipsoid("NONE")
polygon = QgsGeometry.fromPolygonXY(
[[
QgsPointXY(0, 0), QgsPointXY(1, 0), QgsPointXY(1, 1), QgsPointXY(2, 1), QgsPointXY(2, 2),
QgsPointXY(0, 2), QgsPointXY(0, 0),
]]
)
# We check both the measured area AND the units, in case the logic regarding
# ellipsoids and units changes in future
area = da.measureArea(polygon)
units = da.areaUnits()
print(("measured {} in {}".format(area, QgsUnitTypes.toString(units))))
assert ((abs(area - 3.0) < 0.00000001 and units == QgsUnitTypes.AreaSquareDegrees)
or (abs(area - 37176087091.5) < 0.1 and units == QgsUnitTypes.AreaSquareMeters))
da.setEllipsoid("WGS84")
area = da.measureArea(polygon)
units = da.areaUnits()
print(("measured {} in {}".format(area, QgsUnitTypes.toString(units))))
# should always be in Meters Squared
self.assertAlmostEqual(area, 36918093794.121284, delta=0.1)
self.assertEqual(units, QgsUnitTypes.AreaSquareMeters)
# test converting the resultant area
area = da.convertAreaMeasurement(area, QgsUnitTypes.AreaSquareMiles)
self.assertAlmostEqual(area, 14254.155703182701, delta=0.001)
# now try with a source CRS which is in feet
polygon = QgsGeometry.fromPolygonXY(
[[
QgsPointXY(1850000, 4423000), QgsPointXY(1851000, 4423000), QgsPointXY(1851000, 4424000),
QgsPointXY(1852000, 4424000), QgsPointXY(1852000, 4425000), QgsPointXY(1851000, 4425000),
QgsPointXY(1850000, 4423000)
]]
)
da.setSourceCrs(QgsCoordinateReferenceSystem.fromSrsId(27469), QgsProject.instance().transformContext())
da.setEllipsoid("NONE")
# measurement should be in square feet
area = da.measureArea(polygon)
units = da.areaUnits()
print(("measured {} in {}".format(area, QgsUnitTypes.toString(units))))
self.assertAlmostEqual(area, 2000000, delta=0.001)
self.assertEqual(units, QgsUnitTypes.AreaSquareFeet)
# test converting the resultant area
area = da.convertAreaMeasurement(area, QgsUnitTypes.AreaSquareYards)
self.assertAlmostEqual(area, 222222.2222, delta=0.001)
da.setEllipsoid("WGS84")
# now should be in Square Meters again
area = da.measureArea(polygon)
units = da.areaUnits()
print(("measured {} in {}".format(area, QgsUnitTypes.toString(units))))
self.assertAlmostEqual(area, 185818.59096575077, delta=1.0)
self.assertEqual(units, QgsUnitTypes.AreaSquareMeters)
# test converting the resultant area
area = da.convertAreaMeasurement(area, QgsUnitTypes.AreaSquareYards)
self.assertAlmostEqual(area, 222237.18521272976, delta=1.0)
def testFormatDistance(self):
"""Test formatting distances"""
QLocale.setDefault(QLocale.c())
self.assertEqual(QgsDistanceArea.formatDistance(45, 3, QgsUnitTypes.DistanceMeters), '45.000 m')
self.assertEqual(QgsDistanceArea.formatDistance(1300, 1, QgsUnitTypes.DistanceMeters, False), '1.3 km')
self.assertEqual(QgsDistanceArea.formatDistance(.005, 1, QgsUnitTypes.DistanceMeters, False), '5.0 mm')
self.assertEqual(QgsDistanceArea.formatDistance(.05, 1, QgsUnitTypes.DistanceMeters, False), '5.0 cm')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QgsUnitTypes.DistanceKilometers, True), '1.500 km')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QgsUnitTypes.DistanceKilometers, False), '1.500 km')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 3, QgsUnitTypes.DistanceKilometers, True), '0.500 km')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 3, QgsUnitTypes.DistanceKilometers, False), '500.000 m')
self.assertEqual(QgsDistanceArea.formatDistance(6000, 0, QgsUnitTypes.DistanceFeet, True), '6000 ft')
self.assertEqual(QgsDistanceArea.formatDistance(6000, 3, QgsUnitTypes.DistanceFeet, False), '1.136 mi')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QgsUnitTypes.DistanceFeet, True), '300 ft')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QgsUnitTypes.DistanceFeet, False), '300 ft')
self.assertEqual(QgsDistanceArea.formatDistance(3000, 0, QgsUnitTypes.DistanceYards, True), '3000 yd')
self.assertEqual(QgsDistanceArea.formatDistance(3000, 3, QgsUnitTypes.DistanceYards, False), '1.705 mi')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QgsUnitTypes.DistanceYards, True), '300 yd')
self.assertEqual(QgsDistanceArea.formatDistance(300, 0, QgsUnitTypes.DistanceYards, False), '300 yd')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QgsUnitTypes.DistanceMiles, True), '1.500 mi')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 3, QgsUnitTypes.DistanceMiles, False), '1.500 mi')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 3, QgsUnitTypes.DistanceMiles, True), '0.500 mi')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 0, QgsUnitTypes.DistanceMiles, False), '2640 ft')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 1, QgsUnitTypes.DistanceNauticalMiles, True), '0.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(0.5, 1, QgsUnitTypes.DistanceNauticalMiles, False), '0.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 1, QgsUnitTypes.DistanceNauticalMiles, True), '1.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 1, QgsUnitTypes.DistanceNauticalMiles, False), '1.5 NM')
self.assertEqual(QgsDistanceArea.formatDistance(1.5, 1, QgsUnitTypes.DistanceDegrees, True), '1.5 deg')
self.assertEqual(QgsDistanceArea.formatDistance(1.0, 1, QgsUnitTypes.DistanceDegrees, False), '1.0 deg')
self.assertEqual(QgsDistanceArea.formatDistance(1.0, 1, QgsUnitTypes.DistanceUnknownUnit, False), '1.0')
QLocale.setDefault(QLocale.system())
def testGeodesicIntersectionAtAntimeridian(self):
da = QgsDistanceArea()
crs = QgsCoordinateReferenceSystem(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
da.setSourceCrs(crs, QgsProject.instance().transformContext())
da.setEllipsoid("WGS84")
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(0, 0), QgsPointXY(-170, 0))
self.assertAlmostEqual(lat, 0, 5)
self.assertAlmostEqual(fract, 0, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-170, 0), QgsPointXY(170, 0))
self.assertAlmostEqual(lat, 0, 5)
self.assertAlmostEqual(fract, 0.5, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(179, 0), QgsPointXY(181, 0))
self.assertAlmostEqual(lat, 0, 5)
self.assertAlmostEqual(fract, 0.5, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-170, 0), QgsPointXY(170, 0))
self.assertAlmostEqual(lat, 0, 5)
self.assertAlmostEqual(fract, 0.5, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(180, 0), QgsPointXY(180, 0))
self.assertAlmostEqual(lat, 0, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(180, -10), QgsPointXY(180, -10))
self.assertAlmostEqual(lat, -10, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(171, 0), QgsPointXY(181, 0))
self.assertAlmostEqual(lat, 0, 5)
self.assertAlmostEqual(fract, 0.9, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(181, 0), QgsPointXY(171, 0))
self.assertAlmostEqual(lat, 0, 5)
self.assertAlmostEqual(fract, 0.1, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(138.26237, -20.314687),
QgsPointXY(-151.6, -77.8))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.007113545719515548, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(138.26237, -20.314687),
QgsPointXY(-151.6 + 360, -77.8))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.007113545719515548, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-151.6, -77.8),
QgsPointXY(138.26237, -20.314687))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.9928864542804845, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(170.60188754234980024, -70.81368329001529105),
QgsPointXY(-164.61259948055175073, -76.66761193248410677))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.0879577697523441, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-164.61259948055175073, -76.66761193248410677),
QgsPointXY(170.60188754234980024,
-70.81368329001529105))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.9120422302476558, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(178.44469761238570982, -73.47820480021761114),
QgsPointXY(-179.21026002627399976, -74.08952948682963324))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.6713541474159178, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-179.21026002627399976, -74.08952948682963324),
QgsPointXY(178.44469761238570982,
-73.47820480021761114))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.3286458525840822, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(179.83103440731269984, -73.8481044794813215),
QgsPointXY(-179.93191793815378787, -73.90885909527753483))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.7135414998986486, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-179.93191793815378787, -73.90885909527753483),
QgsPointXY(179.83103440731269984, -73.8481044794813215))
self.assertAlmostEqual(lat, -73.89148222666744914, 5)
self.assertAlmostEqual(fract, 0.28645850010135143, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(179.92498611649580198, 7.24703528617311754),
QgsPointXY(-178.20070563806575592, 16.09649962419504732))
self.assertAlmostEqual(lat, 7.6112109902580265, 5)
self.assertAlmostEqual(fract, 0.04111771567489498, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-178.20070563806575592, 16.09649962419504732),
QgsPointXY(179.92498611649580198, 7.24703528617311754))
self.assertAlmostEqual(lat, 7.6112109902580265, 5)
self.assertAlmostEqual(fract, 0.958882284325105, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(
QgsPointXY(360 - 178.20070563806575592, 16.09649962419504732),
QgsPointXY(179.92498611649580198, 7.24703528617311754))
self.assertAlmostEqual(lat, 7.6112109902580265, 5)
self.assertAlmostEqual(fract, 0.95888228432510, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(175.76717768974583578, 8.93749416467257873),
QgsPointXY(-175.15030911497356669, 8.59851183021221033))
self.assertAlmostEqual(lat, 8.80683758146703966, 5)
self.assertAlmostEqual(fract, 0.46581637044475815, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-175.15030911497356669, 8.59851183021221033),
QgsPointXY(175.76717768974583578,
8.93749416467257873))
self.assertAlmostEqual(lat, 8.80683758146703966, 5)
self.assertAlmostEqual(fract, 0.5341836295552418, 5)
# calculation should be ellipsoid dependent!
da.setEllipsoid('PARAMETER:6370997:6370997')
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-175.15030911497356669, 8.59851183021221033),
QgsPointXY(175.76717768974583578,
8.93749416467257873))
self.assertAlmostEqual(lat, 8.806658717133244, 5)
self.assertAlmostEqual(fract, 0.5341851152000393, 5)
# no ellipsoid
da.setEllipsoid("NONE")
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-175, 8),
QgsPointXY(175,
9))
self.assertAlmostEqual(lat, 8.5, 5)
self.assertAlmostEqual(fract, 0.5, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(165, 8),
QgsPointXY(-175,
9))
self.assertAlmostEqual(lat, 8.75, 5)
self.assertAlmostEqual(fract, 0.75, 5)
lat, fract = da.latitudeGeodesicCrossesAntimeridian(QgsPointXY(-175, 8),
QgsPointXY(165,
9))
self.assertAlmostEqual(lat, 8.25, 5)
self.assertAlmostEqual(fract, 0.25, 5)
def testGeodesicLine(self):
da = QgsDistanceArea()
crs = QgsCoordinateReferenceSystem(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
da.setSourceCrs(crs, QgsProject.instance().transformContext())
da.setEllipsoid("WGS84")
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(105.4, 66.4), QgsPointXY(208.4, -77.8),
1000000, True))
self.assertEqual(g.asWkt(5),
'MultiLineString ((105.4 66.4, 114.11119 58.36882, 119.52376 49.95732, 123.30625 41.35664, 126.19835 32.6479, 128.57411 23.87234, 130.647 15.05482, 132.55465 6.21309, 134.39916 -2.63822, 136.27014 -11.48549, 138.26237 -20.31469, 140.4956 -29.10966, 143.14591 -37.84912, 146.5073 -46.50015, 151.13295 -55.00229, 158.2045 -63.2234, 170.60189 -70.81368, 180 -73.89148),(-180 -73.89148, -164.6126 -76.66761, -151.6 -77.8))')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(105.4, 66.4), QgsPointXY(208.4, -77.8),
1000000, False))
self.assertEqual(g.asWkt(5),
'MultiLineString ((105.4 66.4, 114.11119 58.36882, 119.52376 49.95732, 123.30625 41.35664, 126.19835 32.6479, 128.57411 23.87234, 130.647 15.05482, 132.55465 6.21309, 134.39916 -2.63822, 136.27014 -11.48549, 138.26237 -20.31469, 140.4956 -29.10966, 143.14591 -37.84912, 146.5073 -46.50015, 151.13295 -55.00229, 158.2045 -63.2234, 170.60189 -70.81368, -164.6126 -76.66761, -151.6 -77.8))')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(105.4, 66.4), QgsPointXY(208.4, -77.8),
100000, True))
self.assertEqual(g.asWkt(5),
'MultiLineString ((105.4 66.4, 106.50684 65.62452, 107.54925 64.84137, 108.53251 64.05125, 109.46143 63.25477, 110.34036 62.45245, 111.17326 61.6448, 111.96369 60.83224, 112.7149 60.01516, 113.42984 59.19392, 114.11119 58.36882, 114.76141 57.54016, 115.38271 56.70818, 115.97713 55.87314, 116.54653 55.03522, 117.09261 54.19464, 117.61695 53.35156, 118.12096 52.50615, 118.60597 51.65855, 119.0732 50.8089, 119.52376 49.95732, 119.95868 49.10392, 120.37892 48.24881, 120.78537 47.39209, 121.17884 46.53385, 121.5601 45.67416, 121.92984 44.81311, 122.28874 43.95077, 122.6374 43.08721, 122.97639 42.22247, 123.30625 41.35664, 123.62747 40.48975, 123.94053 39.62186, 124.24585 38.75302, 124.54386 37.88326, 124.83494 37.01265, 125.11945 36.14121, 125.39773 35.26898, 125.67011 34.39599, 125.93688 33.52229, 126.19835 32.6479, 126.45477 31.77286, 126.7064 30.89719, 126.9535 30.02092, 127.19628 29.14407, 127.43498 28.26668, 127.66979 27.38877, 127.90093 26.51035, 128.12857 25.63146, 128.35291 24.75212, 128.57411 23.87234, 128.79234 22.99214, 129.00775 22.11156, 129.22051 21.23059, 129.43076 20.34927, 129.63863 19.46761, 129.84427 18.58563, 130.0478 17.70334, 130.24935 16.82077, 130.44904 15.93792, 130.647 15.05482, 130.84333 14.17148, 131.03815 13.28792, 131.23156 12.40414, 131.42367 11.52017, 131.61458 10.63603, 131.8044 9.75171, 131.99322 8.86725, 132.18114 7.98265, 132.36825 7.09792, 132.55465 6.21309, 132.74043 5.32816, 132.92567 4.44315, 133.11048 3.55808, 133.29493 2.67295, 133.47912 1.78778, 133.66313 0.90258, 133.84706 0.01736, 134.03098 -0.86785, 134.21498 -1.75305, 134.39916 -2.63822, 134.5836 -3.52335, 134.76839 -4.40843, 134.95362 -5.29344, 135.13937 -6.17837, 135.32575 -7.06321, 135.51283 -7.94794, 135.70071 -8.83255, 135.8895 -9.71702, 136.07928 -10.60134, 136.27014 -11.48549, 136.46221 -12.36947, 136.65557 -13.25325, 136.85033 -14.13682, 137.04659 -15.02017, 137.24448 -15.90328, 137.44411 -16.78614, 137.64558 -17.66872, 137.84904 -18.55102, 138.05459 -19.43301, 138.26237 -20.31469, 138.47252 -21.19602, 138.68518 -22.077, 138.90049 -22.9576, 139.1186 -23.83781, 139.33968 -24.71761, 139.56389 -25.59697, 139.7914 -26.47588, 140.0224 -27.35431, 140.25706 -28.23225, 140.4956 -29.10966, 140.73822 -29.98653, 140.98515 -30.86282, 141.2366 -31.73851, 141.49283 -32.61358, 141.75409 -33.488, 142.02065 -34.36173, 142.2928 -35.23474, 142.57084 -36.107, 142.8551 -36.97847, 143.14591 -37.84912, 143.44364 -38.71891, 143.74868 -39.58779, 144.06142 -40.45572, 144.38232 -41.32265, 144.71183 -42.18852, 145.05045 -43.0533, 145.39872 -43.91692, 145.7572 -44.77931, 146.12651 -45.64041, 146.5073 -46.50015, 146.90028 -47.35845, 147.3062 -48.21523, 147.72589 -49.0704, 148.16022 -49.92387, 148.61014 -50.77552, 149.0767 -51.62525, 149.56099 -52.47294, 150.06423 -53.31844, 150.58774 -54.16161, 151.13295 -55.00229, 151.7014 -55.84031, 152.29481 -56.67548, 152.91501 -57.50758, 153.56405 -58.33637, 154.24414 -59.16162, 154.95771 -59.98302, 155.70745 -60.80027, 156.49628 -61.61301, 157.32744 -62.42086, 158.2045 -63.2234, 159.13138 -64.02012, 160.11242 -64.8105, 161.15241 -65.59393, 162.25662 -66.36973, 163.43088 -67.13713, 164.68163 -67.89527, 166.01595 -68.64317, 167.44162 -69.37973, 168.96717 -70.10371, 170.60189 -70.81368, 172.35586 -71.50806, 174.2399 -72.18501, 176.26551 -72.8425, 178.4447 -73.4782, 180 -73.89148),(-180 -73.89148, -179.21026 -74.08953, -176.68721 -74.67356, -173.97467 -75.22708, -171.06257 -75.74654, -167.94325 -76.22808, -164.6126 -76.66761, -161.07142 -77.06086, -157.32679 -77.40349, -153.39333 -77.69129, -151.6 -77.8))')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(105.4, 66.4), QgsPointXY(208.4, -77.8),
100000, False))
self.assertEqual(g.asWkt(5),
'MultiLineString ((105.4 66.4, 106.50684 65.62452, 107.54925 64.84137, 108.53251 64.05125, 109.46143 63.25477, 110.34036 62.45245, 111.17326 61.6448, 111.96369 60.83224, 112.7149 60.01516, 113.42984 59.19392, 114.11119 58.36882, 114.76141 57.54016, 115.38271 56.70818, 115.97713 55.87314, 116.54653 55.03522, 117.09261 54.19464, 117.61695 53.35156, 118.12096 52.50615, 118.60597 51.65855, 119.0732 50.8089, 119.52376 49.95732, 119.95868 49.10392, 120.37892 48.24881, 120.78537 47.39209, 121.17884 46.53385, 121.5601 45.67416, 121.92984 44.81311, 122.28874 43.95077, 122.6374 43.08721, 122.97639 42.22247, 123.30625 41.35664, 123.62747 40.48975, 123.94053 39.62186, 124.24585 38.75302, 124.54386 37.88326, 124.83494 37.01265, 125.11945 36.14121, 125.39773 35.26898, 125.67011 34.39599, 125.93688 33.52229, 126.19835 32.6479, 126.45477 31.77286, 126.7064 30.89719, 126.9535 30.02092, 127.19628 29.14407, 127.43498 28.26668, 127.66979 27.38877, 127.90093 26.51035, 128.12857 25.63146, 128.35291 24.75212, 128.57411 23.87234, 128.79234 22.99214, 129.00775 22.11156, 129.22051 21.23059, 129.43076 20.34927, 129.63863 19.46761, 129.84427 18.58563, 130.0478 17.70334, 130.24935 16.82077, 130.44904 15.93792, 130.647 15.05482, 130.84333 14.17148, 131.03815 13.28792, 131.23156 12.40414, 131.42367 11.52017, 131.61458 10.63603, 131.8044 9.75171, 131.99322 8.86725, 132.18114 7.98265, 132.36825 7.09792, 132.55465 6.21309, 132.74043 5.32816, 132.92567 4.44315, 133.11048 3.55808, 133.29493 2.67295, 133.47912 1.78778, 133.66313 0.90258, 133.84706 0.01736, 134.03098 -0.86785, 134.21498 -1.75305, 134.39916 -2.63822, 134.5836 -3.52335, 134.76839 -4.40843, 134.95362 -5.29344, 135.13937 -6.17837, 135.32575 -7.06321, 135.51283 -7.94794, 135.70071 -8.83255, 135.8895 -9.71702, 136.07928 -10.60134, 136.27014 -11.48549, 136.46221 -12.36947, 136.65557 -13.25325, 136.85033 -14.13682, 137.04659 -15.02017, 137.24448 -15.90328, 137.44411 -16.78614, 137.64558 -17.66872, 137.84904 -18.55102, 138.05459 -19.43301, 138.26237 -20.31469, 138.47252 -21.19602, 138.68518 -22.077, 138.90049 -22.9576, 139.1186 -23.83781, 139.33968 -24.71761, 139.56389 -25.59697, 139.7914 -26.47588, 140.0224 -27.35431, 140.25706 -28.23225, 140.4956 -29.10966, 140.73822 -29.98653, 140.98515 -30.86282, 141.2366 -31.73851, 141.49283 -32.61358, 141.75409 -33.488, 142.02065 -34.36173, 142.2928 -35.23474, 142.57084 -36.107, 142.8551 -36.97847, 143.14591 -37.84912, 143.44364 -38.71891, 143.74868 -39.58779, 144.06142 -40.45572, 144.38232 -41.32265, 144.71183 -42.18852, 145.05045 -43.0533, 145.39872 -43.91692, 145.7572 -44.77931, 146.12651 -45.64041, 146.5073 -46.50015, 146.90028 -47.35845, 147.3062 -48.21523, 147.72589 -49.0704, 148.16022 -49.92387, 148.61014 -50.77552, 149.0767 -51.62525, 149.56099 -52.47294, 150.06423 -53.31844, 150.58774 -54.16161, 151.13295 -55.00229, 151.7014 -55.84031, 152.29481 -56.67548, 152.91501 -57.50758, 153.56405 -58.33637, 154.24414 -59.16162, 154.95771 -59.98302, 155.70745 -60.80027, 156.49628 -61.61301, 157.32744 -62.42086, 158.2045 -63.2234, 159.13138 -64.02012, 160.11242 -64.8105, 161.15241 -65.59393, 162.25662 -66.36973, 163.43088 -67.13713, 164.68163 -67.89527, 166.01595 -68.64317, 167.44162 -69.37973, 168.96717 -70.10371, 170.60189 -70.81368, 172.35586 -71.50806, 174.2399 -72.18501, 176.26551 -72.8425, 178.4447 -73.4782, -179.21026 -74.08953, -176.68721 -74.67356, -173.97467 -75.22708, -171.06257 -75.74654, -167.94325 -76.22808, -164.6126 -76.66761, -161.07142 -77.06086, -157.32679 -77.40349, -153.39333 -77.69129, -151.6 -77.8))')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(121.4, -76.4), QgsPointXY(-121.6, 76.8),
1000000, True))
self.assertEqual(g.asWkt(5),
'MultiLineString ((121.4 -76.4, 144.24671 -70.15471, 155.62067 -62.40689, 162.18341 -54.11007, 166.52394 -45.56426, 169.70696 -36.88448, 172.23571 -28.12423, 174.38053 -19.31324, 176.30526 -10.47118, 178.12301 -1.61326, 179.92499 7.24704, 180 7.61121),(-180 7.61121, -178.20071 16.0965, -176.15151 24.92076, -173.78654 33.70222, -170.88367 42.4158, -167.0472 51.01916, -161.47936 59.42708, -152.33578 67.43298, -134.83075 74.42214, -121.6 76.8))')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(121.4, -76.4), QgsPointXY(-121.6, 76.8),
1000000, False))
self.assertEqual(g.asWkt(5),
'MultiLineString ((121.4 -76.4, 144.24671 -70.15471, 155.62067 -62.40689, 162.18341 -54.11007, 166.52394 -45.56426, 169.70696 -36.88448, 172.23571 -28.12423, 174.38053 -19.31324, 176.30526 -10.47118, 178.12301 -1.61326, 179.92499 7.24704, -178.20071 16.0965, -176.15151 24.92076, -173.78654 33.70222, -170.88367 42.4158, -167.0472 51.01916, -161.47936 59.42708, -152.33578 67.43298, -134.83075 74.42214, -121.6 76.8))')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(121.4, 6.4), QgsPointXY(-121.6, 2.8),
1000000, True))
self.assertEqual(g.asWkt(5),
'MultiLineString ((121.4 6.4, 130.40033 7.32484, 139.43407 8.06935, 148.49637 8.61432, 157.57946 8.9455, 166.67342 9.05419, 175.76718 8.93749, 180 8.80684),(-180 8.80684, -175.15031 8.59851, -166.08891 8.04617, -157.05629 7.29488, -148.0572 6.36403, -139.09301 5.2773, -130.16168 4.06195, -121.6 2.8))')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(121.4, 6.4), QgsPointXY(-121.6, 2.8),
1000000, False))
self.assertEqual(g.asWkt(5),
'MultiLineString ((121.4 6.4, 130.40033 7.32484, 139.43407 8.06935, 148.49637 8.61432, 157.57946 8.9455, 166.67342 9.05419, 175.76718 8.93749, -175.15031 8.59851, -166.08891 8.04617, -157.05629 7.29488, -148.0572 6.36403, -139.09301 5.2773, -130.16168 4.06195, -121.6 2.8))')
# different ellipsoid, should be respected
da.setEllipsoid('PARAMETER:6370997:6370997')
g = QgsGeometry.fromMultiPolylineXY(da.geodesicLine(QgsPointXY(121.4, 6.4), QgsPointXY(-121.6, 2.8),
1000000, False))
self.assertEqual(g.asWkt(5),
'MultiLineString ((121.4 6.4, 130.41144 7.31297, 139.45604 8.04667, 148.52889 8.58224, 157.62221 8.90571, 166.72609 9.0086, 175.82954 8.88819, -175.07842 8.54766, -166.00754 7.99595, -156.96541 7.24741, -147.95669 6.32128, -138.98271 5.24103, -130.04141 4.03364, -121.6 2.8))')
da.setEllipsoid("WGS84")
# with reprojection
da.setSourceCrs(QgsCoordinateReferenceSystem('EPSG:3857'), QgsProject.instance().transformContext())
g = QgsGeometry.fromMultiPolylineXY(
da.geodesicLine(QgsPointXY(-13536427, 14138932), QgsPointXY(13760912, -13248201),
1000000, False))
self.assertEqual(g.asWkt(0),
'MultiLineString ((-13536427 14138932, -16514348 11691516, -17948849 9406595, -18744235 7552985, -19255354 6014890, -19622372 4688888, -19909239 3505045, 19925702 2415579, 19712755 1385803, 19513769 388441, 19318507 -600065, 19117459 -1602293, 18899973 -2642347, 18651869 -3748726, 18351356 -4958346, 17960498 -6322823, 17404561 -7918366, 16514601 -9855937, 14851845 -12232940, 13760912 -13248201))')
g = QgsGeometry.fromMultiPolylineXY(
da.geodesicLine(QgsPointXY(-13536427, 14138932), QgsPointXY(13760912, -13248201),
1000000, True))
self.assertEqual(g.asWkt(0),
'MultiLineString ((-13536427 14138932, -16514348 11691516, -17948849 9406595, -18744235 7552985, -19255354 6014890, -19622372 4688888, -19909239 3505045, -20037508 2933522),(20037508 2933522, 19925702 2415579, 19712755 1385803, 19513769 388441, 19318507 -600065, 19117459 -1602293, 18899973 -2642347, 18651869 -3748726, 18351356 -4958346, 17960498 -6322823, 17404561 -7918366, 16514601 -9855937, 14851845 -12232940, 13760912 -13248201))')
g = QgsGeometry.fromMultiPolylineXY(
da.geodesicLine(QgsPointXY(18933544, -5448034), QgsPointXY(-11638480, 3962206),
1000000, True))
self.assertEqual(g.asWkt(0),
'MultiLineString ((18933544 -5448034, 20037508 -4772933),(-20037508 -4772933, -20002064 -4748323, -19015781 -3988451, -18153035 -3204936, -17383137 -2416816, -16678635 -1632067, -16015884 -852355, -15374147 -76043, -14734258 699941, -14077193 1478790, -13382634 2262546, -12627598 3050380, -11785404 3835868, -11638480 3962206))')
def testSplitGeometryAtAntimeridian(self):
da = QgsDistanceArea()
crs = QgsCoordinateReferenceSystem(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
da.setSourceCrs(crs, QgsProject.instance().transformContext())
da.setEllipsoid("WGS84")
# noops
g = da.splitGeometryAtAntimeridian(QgsGeometry())
self.assertTrue(g.isNull())
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('Point(1 2)'))
self.assertEqual(g.asWkt(), 'Point (1 2)')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('MultiPoint(1 2, 3 4)'))
self.assertEqual(g.asWkt(), 'MultiPoint ((1 2),(3 4))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('PointZ(1 2 3)'))
self.assertEqual(g.asWkt(), 'PointZ (1 2 3)')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('PointM(1 2 3)'))
self.assertEqual(g.asWkt(), 'PointM (1 2 3)')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString EMPTY'))
self.assertEqual(g.asWkt(), 'MultiLineString EMPTY')
# lines
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(0 0, -170 0)'))
self.assertEqual(g.asWkt(), 'MultiLineString ((0 0, -170 0))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(-170 0, 0 0)'))
self.assertEqual(g.asWkt(), 'MultiLineString ((-170 0, 0 0))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(179 0, -179 0)'))
self.assertEqual(g.asWkt(), 'MultiLineString ((179 0, 180 0),(-180 0, -179 0))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(179 0, 181 0)'))
self.assertEqual(g.asWkt(), 'MultiLineString ((179 0, 180 0),(-180 0, -179 0))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(-179 0, 179 0)'))
self.assertEqual(g.asWkt(), 'MultiLineString ((-179 0, -180 0),(180 0, 179 0))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(181 0, 179 0)'))
self.assertEqual(g.asWkt(), 'MultiLineString ((-179 0, -180 0),(180 0, 179 0))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(179 10, -179 -20)'))
self.assertEqual(g.asWkt(3), 'MultiLineString ((179 10, 180 -5.362),(-180 -5.362, -179 -20))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(179 -80, -179 70)'))
self.assertEqual(g.asWkt(3), 'MultiLineString ((179 -80, 180 -55.685),(-180 -55.685, -179 70))')
# multiline input
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('MultiLineString((1 10, 50 30),(179 -80, -179 70))'))
self.assertEqual(g.asWkt(3), 'MultiLineString ((1 10, 50 30),(179 -80, 180 -55.685),(-180 -55.685, -179 70))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('MultiLineString((1 10, 50 30),(179 -80, 179.99 70))'))
self.assertEqual(g.asWkt(3), 'MultiLineString ((1 10, 50 30),(179 -80, 179.99 70))')
# with z/m
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineStringZ(179 -80 1, -179 70 10)'))
self.assertEqual(g.asWkt(3),
'MultiLineStringZ ((179 -80 1, 180 -55.685 2.466),(-180 -55.685 2.466, -179 70 10))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineStringM(179 -80 1, -179 70 10)'))
self.assertEqual(g.asWkt(3),
'MultiLineStringM ((179 -80 1, 180 -55.685 2.466),(-180 -55.685 2.466, -179 70 10))')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineStringZM(179 -80 1 -4, -179 70 10 -30)'))
self.assertEqual(g.asWkt(3),
'MultiLineStringZM ((179 -80 1 -4, 180 -55.685 2.466 -8.234),(-180 -55.685 2.466 -8.234, -179 70 10 -30))')
g = da.splitGeometryAtAntimeridian(
QgsGeometry.fromWkt('MultiLineStringZ((179 -80 1, -179 70 10),(-170 -5 1, -181 10 5))'))
self.assertEqual(g.asWkt(3),
'MultiLineStringZ ((179 -80 1, 180 -55.685 2.466),(-180 -55.685 2.466, -179 70 10),(-170 -5 1, -181 10 5))')
# different ellipsoid - should change intersection latitude
da.setEllipsoid('PARAMETER:6370997:6370997')
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString(179 10, -179 -20)'))
self.assertEqual(g.asWkt(3), 'MultiLineString ((179 10, 180 -5.361),(-180 -5.361, -179 -20))')
# with reprojection
da.setEllipsoid("WGS84")
# with reprojection
da.setSourceCrs(QgsCoordinateReferenceSystem('EPSG:3857'), QgsProject.instance().transformContext())
g = da.splitGeometryAtAntimeridian(QgsGeometry.fromWkt('LineString( -13536427 14138932, 13760912 -13248201)'))
self.assertEqual(g.asWkt(1),
'MultiLineString ((-13536427 14138932, -20037508.3 2933521.7),(20037508.3 2933521.7, 13760912 -13248201))')
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
Brocade-OpenSource/OpenStack-DNRM | dnrm/exceptions/base.py | 1 | 1442 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base exception handling.
"""
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class SupervisorException(Exception):
"""Base RDB Exception.
To correctly use this class, inherit from it and define
a 'message' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
message = _("An unknown exception occurred.")
def __init__(self, **kwargs):
try:
super(SupervisorException, self).__init__(self.message % kwargs)
except Exception:
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
# at least get the core message out if something happened
super(SupervisorException, self).__init__(self.message)
| apache-2.0 |
csrocha/OpenUpgrade | openerp/addons/base/ir/ir_values.py | 228 | 26238 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pickle
from openerp import tools
from openerp.osv import osv, fields
from openerp.osv.orm import except_orm
EXCLUDED_FIELDS = set((
'report_sxw_content', 'report_rml_content', 'report_sxw', 'report_rml',
'report_sxw_content_data', 'report_rml_content_data', 'search_view', ))
#: Possible slots to bind an action to with :meth:`~.set_action`
ACTION_SLOTS = [
"client_action_multi", # sidebar wizard action
"client_print_multi", # sidebar report printing button
"client_action_relate", # sidebar related link
"tree_but_open", # double-click on item in tree view
"tree_but_action", # deprecated: same as tree_but_open
]
class ir_values(osv.osv):
"""Holds internal model-specific action bindings and user-defined default
field values. definitions. This is a legacy internal model, mixing
two different concepts, and will likely be updated or replaced in a
future version by cleaner, separate models. You should not depend
explicitly on it.
The purpose of each ``ir.values`` entry depends on its type, defined
by the ``key`` column:
* 'default': user-defined default values, used when creating new
records of this model:
* 'action': binding of an action to a particular *action slot* of
this model, making the action easily available in the user
interface for this model.
The ``key2`` column acts as a qualifier, further refining the type
of the entry. The possible values are:
* for 'default' entries: an optional condition restricting the
cases where this particular default value will be applicable,
or ``False`` for no condition
* for 'action' entries: the ``key2`` qualifier is one of the available
action slots, defining how this action can be invoked:
* ``'client_print_multi'`` for report printing actions that will
be available on views displaying items from this model
* ``'client_action_multi'`` for assistants (wizards) actions
that will be available in views displaying objects of this model
* ``'client_action_relate'`` for links towards related documents
that should be available in views displaying objects of this model
* ``'tree_but_open'`` for actions that will be triggered when
double-clicking an item from this model in a hierarchical tree view
Each entry is specific to a model (``model`` column), and for ``'actions'``
type, may even be made specific to a given record of that model when the
``res_id`` column contains a record ID (``False`` means it's global for
all records).
The content of the entry is defined by the ``value`` column, which may either
contain an arbitrary value, or a reference string defining the action that
should be executed.
.. rubric:: Usage: default values
The ``'default'`` entries are usually defined manually by the
users, and set by their UI clients calling :meth:`~.set_default`.
These default values are then automatically used by the
ORM every time a new record is about to be created, i.e. when
:meth:`~openerp.osv.osv.osv.default_get`
or :meth:`~openerp.osv.osv.osv.create` are called.
.. rubric:: Usage: action bindings
Business applications will usually bind their actions during
installation, and OpenERP UI clients will apply them as defined,
based on the list of actions included in the result of
:meth:`~openerp.osv.osv.osv.fields_view_get`,
or directly returned by explicit calls to :meth:`~.get_actions`.
"""
_name = 'ir.values'
def _value_unpickle(self, cursor, user, ids, name, arg, context=None):
res = {}
for record in self.browse(cursor, user, ids, context=context):
value = record[name[:-9]]
if record.key == 'default' and value:
# default values are pickled on the fly
try:
value = str(pickle.loads(value))
except Exception:
pass
res[record.id] = value
return res
def _value_pickle(self, cursor, user, id, name, value, arg, context=None):
if context is None:
context = {}
ctx = context.copy()
if self.CONCURRENCY_CHECK_FIELD in ctx:
del ctx[self.CONCURRENCY_CHECK_FIELD]
record = self.browse(cursor, user, id, context=context)
if record.key == 'default':
# default values are pickled on the fly
value = pickle.dumps(value)
self.write(cursor, user, id, {name[:-9]: value}, context=ctx)
def onchange_object_id(self, cr, uid, ids, object_id, context=None):
if not object_id: return {}
act = self.pool.get('ir.model').browse(cr, uid, object_id, context=context)
return {
'value': {'model': act.model}
}
def onchange_action_id(self, cr, uid, ids, action_id, context=None):
if not action_id: return {}
act = self.pool.get('ir.actions.actions').browse(cr, uid, action_id, context=context)
return {
'value': {'value_unpickle': act.type+','+str(act.id)}
}
_columns = {
'name': fields.char('Name', required=True),
'model': fields.char('Model Name', select=True, required=True,
help="Model to which this entry applies"),
# TODO: model_id and action_id should be read-write function fields
'model_id': fields.many2one('ir.model', 'Model (change only)', size=128,
help="Model to which this entry applies - "
"helper field for setting a model, will "
"automatically set the correct model name"),
'action_id': fields.many2one('ir.actions.actions', 'Action (change only)',
help="Action bound to this entry - "
"helper field for binding an action, will "
"automatically set the correct reference"),
'value': fields.text('Value', help="Default value (pickled) or reference to an action"),
'value_unpickle': fields.function(_value_unpickle, fnct_inv=_value_pickle,
type='text',
string='Default value or action reference'),
'key': fields.selection([('action','Action'),('default','Default')],
'Type', select=True, required=True,
help="- Action: an action attached to one slot of the given model\n"
"- Default: a default value for a model field"),
'key2' : fields.char('Qualifier', select=True,
help="For actions, one of the possible action slots: \n"
" - client_action_multi\n"
" - client_print_multi\n"
" - client_action_relate\n"
" - tree_but_open\n"
"For defaults, an optional condition"
,),
'res_id': fields.integer('Record ID', select=True,
help="Database identifier of the record to which this applies. "
"0 = for all records"),
'user_id': fields.many2one('res.users', 'User', ondelete='cascade', select=True,
help="If set, action binding only applies for this user."),
'company_id': fields.many2one('res.company', 'Company', ondelete='cascade', select=True,
help="If set, action binding only applies for this company")
}
_defaults = {
'key': 'action',
'key2': 'tree_but_open',
}
def _auto_init(self, cr, context=None):
super(ir_values, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_values_key_model_key2_res_id_user_id_idx\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_values_key_model_key2_res_id_user_id_idx ON ir_values (key, model, key2, res_id, user_id)')
def create(self, cr, uid, vals, context=None):
res = super(ir_values, self).create(cr, uid, vals, context=context)
self.get_defaults_dict.clear_cache(self)
return res
def write(self, cr, uid, ids, vals, context=None):
res = super(ir_values, self).write(cr, uid, ids, vals, context=context)
self.get_defaults_dict.clear_cache(self)
return res
def unlink(self, cr, uid, ids, context=None):
res = super(ir_values, self).unlink(cr, uid, ids, context=context)
self.get_defaults_dict.clear_cache(self)
return res
def set_default(self, cr, uid, model, field_name, value, for_all_users=True, company_id=False, condition=False):
"""Defines a default value for the given model and field_name. Any previous
default for the same scope (model, field_name, value, for_all_users, company_id, condition)
will be replaced and lost in the process.
Defaults can be later retrieved via :meth:`~.get_defaults`, which will return
the highest priority default for any given field. Defaults that are more specific
have a higher priority, in the following order (highest to lowest):
* specific to user and company
* specific to user only
* specific to company only
* global to everyone
:param string model: model name
:param string field_name: field name to which the default applies
:param value: the default field value to set
:type value: any serializable Python value
:param bool for_all_users: whether the default should apply to everybody or only
the user calling the method
:param int company_id: optional ID of the company to which the default should
apply. If omitted, the default will be global. If True
is passed, the current user's company will be used.
:param string condition: optional condition specification that can be used to
restrict the applicability of the default values
(e.g. based on another field's value). This is an
opaque string as far as the API is concerned, but client
stacks typically use single-field conditions in the
form ``'key=stringified_value'``.
(Currently, the condition is trimmed to 200 characters,
so values that share the same first 200 characters always
match)
:return: id of the newly created ir.values entry
"""
if isinstance(value, unicode):
value = value.encode('utf8')
if company_id is True:
# should be company-specific, need to get company id
user = self.pool.get('res.users').browse(cr, uid, uid)
company_id = user.company_id.id
# remove existing defaults for the same scope
search_criteria = [
('key', '=', 'default'),
('key2', '=', condition and condition[:200]),
('model', '=', model),
('name', '=', field_name),
('user_id', '=', False if for_all_users else uid),
('company_id','=', company_id)
]
self.unlink(cr, uid, self.search(cr, uid, search_criteria))
return self.create(cr, uid, {
'name': field_name,
'value': pickle.dumps(value),
'model': model,
'key': 'default',
'key2': condition and condition[:200],
'user_id': False if for_all_users else uid,
'company_id': company_id,
})
def get_default(self, cr, uid, model, field_name, for_all_users=True, company_id=False, condition=False):
""" Return the default value defined for model, field_name, users, company and condition.
Return ``None`` if no such default exists.
"""
search_criteria = [
('key', '=', 'default'),
('key2', '=', condition and condition[:200]),
('model', '=', model),
('name', '=', field_name),
('user_id', '=', False if for_all_users else uid),
('company_id','=', company_id)
]
defaults = self.browse(cr, uid, self.search(cr, uid, search_criteria))
return pickle.loads(defaults[0].value.encode('utf-8')) if defaults else None
def get_defaults(self, cr, uid, model, condition=False):
"""Returns any default values that are defined for the current model and user,
(and match ``condition``, if specified), previously registered via
:meth:`~.set_default`.
Defaults are global to a model, not field-specific, but an optional
``condition`` can be provided to restrict matching default values
to those that were defined for the same condition (usually based
on another field's value).
Default values also have priorities depending on whom they apply
to: only the highest priority value will be returned for any
field. See :meth:`~.set_default` for more details.
:param string model: model name
:param string condition: optional condition specification that can be used to
restrict the applicability of the default values
(e.g. based on another field's value). This is an
opaque string as far as the API is concerned, but client
stacks typically use single-field conditions in the
form ``'key=stringified_value'``.
(Currently, the condition is trimmed to 200 characters,
so values that share the same first 200 characters always
match)
:return: list of default values tuples of the form ``(id, field_name, value)``
(``id`` is the ID of the default entry, usually irrelevant)
"""
# use a direct SQL query for performance reasons,
# this is called very often
query = """SELECT v.id, v.name, v.value FROM ir_values v
LEFT JOIN res_users u ON (v.user_id = u.id)
WHERE v.key = %%s AND v.model = %%s
AND (v.user_id = %%s OR v.user_id IS NULL)
AND (v.company_id IS NULL OR
v.company_id =
(SELECT company_id from res_users where id = %%s)
)
%s
ORDER BY v.user_id, u.company_id"""
params = ('default', model, uid, uid)
if condition:
query %= 'AND v.key2 = %s'
params += (condition[:200],)
else:
query %= 'AND v.key2 is NULL'
cr.execute(query, params)
# keep only the highest priority default for each field
defaults = {}
for row in cr.dictfetchall():
defaults.setdefault(row['name'],
(row['id'], row['name'], pickle.loads(row['value'].encode('utf-8'))))
return defaults.values()
# use ormcache: this is called a lot by BaseModel.default_get()!
@tools.ormcache(skiparg=2)
def get_defaults_dict(self, cr, uid, model, condition=False):
""" Returns a dictionary mapping field names with their corresponding
default value. This method simply improves the returned value of
:meth:`~.get_defaults`.
"""
return dict((f, v) for i, f, v in self.get_defaults(cr, uid, model, condition))
def set_action(self, cr, uid, name, action_slot, model, action, res_id=False):
"""Binds an the given action to the given model's action slot - for later
retrieval via :meth:`~.get_actions`. Any existing binding of the same action
to the same slot is first removed, allowing an update of the action's name.
See the class description for more details about the various action
slots: :class:`~ir_values`.
:param string name: action label, usually displayed by UI client
:param string action_slot: the action slot to which the action should be
bound to - one of ``client_action_multi``,
``client_print_multi``, ``client_action_relate``,
``tree_but_open``.
:param string model: model name
:param string action: action reference, in the form ``'model,id'``
:param int res_id: optional record id - will bind the action only to a
specific record of the model, not all records.
:return: id of the newly created ir.values entry
"""
assert isinstance(action, basestring) and ',' in action, \
'Action definition must be an action reference, e.g. "ir.actions.act_window,42"'
assert action_slot in ACTION_SLOTS, \
'Action slot (%s) must be one of: %r' % (action_slot, ACTION_SLOTS)
# remove existing action definition of same slot and value
search_criteria = [
('key', '=', 'action'),
('key2', '=', action_slot),
('model', '=', model),
('res_id', '=', res_id or 0), # int field -> NULL == 0
('value', '=', action),
]
self.unlink(cr, uid, self.search(cr, uid, search_criteria))
return self.create(cr, uid, {
'key': 'action',
'key2': action_slot,
'model': model,
'res_id': res_id,
'name': name,
'value': action,
})
def get_actions(self, cr, uid, action_slot, model, res_id=False, context=None):
"""Retrieves the list of actions bound to the given model's action slot.
See the class description for more details about the various action
slots: :class:`~.ir_values`.
:param string action_slot: the action slot to which the actions should be
bound to - one of ``client_action_multi``,
``client_print_multi``, ``client_action_relate``,
``tree_but_open``.
:param string model: model name
:param int res_id: optional record id - will bind the action only to a
specific record of the model, not all records.
:return: list of action tuples of the form ``(id, name, action_def)``,
where ``id`` is the ID of the default entry, ``name`` is the
action label, and ``action_def`` is a dict containing the
action definition as obtained by calling
:meth:`~openerp.osv.osv.osv.read` on the action record.
"""
assert action_slot in ACTION_SLOTS, 'Illegal action slot value: %s' % action_slot
# use a direct SQL query for performance reasons,
# this is called very often
query = """SELECT v.id, v.name, v.value FROM ir_values v
WHERE v.key = %s AND v.key2 = %s
AND v.model = %s
AND (v.res_id = %s
OR v.res_id IS NULL
OR v.res_id = 0)
ORDER BY v.id"""
cr.execute(query, ('action', action_slot, model, res_id or None))
results = {}
for action in cr.dictfetchall():
if not action['value']:
continue # skip if undefined
action_model_name, action_id = action['value'].split(',')
if action_model_name not in self.pool:
continue # unknow model? skip it
action_model = self.pool[action_model_name]
fields = [field for field in action_model._fields if field not in EXCLUDED_FIELDS]
# FIXME: needs cleanup
try:
action_def = action_model.read(cr, uid, int(action_id), fields, context)
if action_def:
if action_model_name in ('ir.actions.report.xml', 'ir.actions.act_window'):
groups = action_def.get('groups_id')
if groups:
cr.execute('SELECT 1 FROM res_groups_users_rel WHERE gid IN %s AND uid=%s',
(tuple(groups), uid))
if not cr.fetchone():
if action['name'] == 'Menuitem':
raise osv.except_osv('Error!',
'You do not have the permission to perform this operation!!!')
continue
# keep only the first action registered for each action name
results[action['name']] = (action['id'], action['name'], action_def)
except except_orm:
continue
return sorted(results.values())
def _map_legacy_model_list(self, model_list, map_fn, merge_results=False):
"""Apply map_fn to the various models passed, according to
legacy way to specify models/records.
"""
assert isinstance(model_list, (list, tuple)), \
"model_list should be in the form [model,..] or [(model,res_id), ..]"
results = []
for model in model_list:
res_id = False
if isinstance(model, (list, tuple)):
model, res_id = model
result = map_fn(model, res_id)
# some of the functions return one result at a time (tuple or id)
# and some return a list of many of them - care for both
if merge_results:
results.extend(result)
else:
results.append(result)
return results
# Backards-compatibility adapter layer to retrofit into split API
def set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=False, preserve_user=False, company=False):
"""Deprecated legacy method to set default values and bind actions to models' action slots.
Now dispatches to the newer API methods according to the value of ``key``: :meth:`~.set_default`
(``key=='default'``) or :meth:`~.set_action` (``key == 'action'``).
:deprecated: As of v6.1, ``set_default()`` or ``set_action()`` should be used directly.
"""
assert key in ['default', 'action'], "ir.values entry keys must be in ['default','action']"
if key == 'default':
def do_set(model,res_id):
return self.set_default(cr, uid, model, field_name=name, value=value,
for_all_users=(not preserve_user), company_id=company,
condition=key2)
elif key == 'action':
def do_set(model,res_id):
return self.set_action(cr, uid, name, action_slot=key2, model=model, action=value, res_id=res_id)
return self._map_legacy_model_list(models, do_set)
def get(self, cr, uid, key, key2, models, meta=False, context=None, res_id_req=False, without_user=True, key2_req=True):
"""Deprecated legacy method to get the list of default values or actions bound to models' action slots.
Now dispatches to the newer API methods according to the value of ``key``: :meth:`~.get_defaults`
(``key=='default'``) or :meth:`~.get_actions` (``key == 'action'``)
:deprecated: As of v6.1, ``get_defaults()`` or ``get_actions()`` should be used directly.
"""
assert key in ['default', 'action'], "ir.values entry keys must be in ['default','action']"
if key == 'default':
def do_get(model,res_id):
return self.get_defaults(cr, uid, model, condition=key2)
elif key == 'action':
def do_get(model,res_id):
return self.get_actions(cr, uid, action_slot=key2, model=model, res_id=res_id, context=context)
return self._map_legacy_model_list(models, do_get, merge_results=True)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hangzhang925/WhereHows | wherehows-etl/src/main/resources/jython/requests/packages/chardet/langthaimodel.py | 2930 | 11275 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
| apache-2.0 |
ciex/motor | lib/gae_mini_profiler/sampling_profiler.py | 13 | 7408 | """CPU profiler that works by sampling the call stack periodically.
This profiler provides a very simplistic view of where your request is spending
its time. It does this by periodically sampling your request's call stack to
figure out in which functions real time is being spent.
PRO: since the profiler only samples the call stack occasionally, it has much
less overhead than an instrumenting profiler, and avoids biases that
instrumenting profilers have due to instrumentation overhead (which causes
instrumenting profilers to overstate how much time is spent in frequently
called functions, or functions with deep call stacks).
CON: since the profiler only samples, it does not allow you to accurately
answer a question like, "how much time was spent in routine X?", especially if
routine X takes relatively little time. (You *can* answer questions like "what
is the ratio of time spent in routine X vs routine Y," at least if both
routines take a reasonable amount of time.) It is better suited for answering
the question, "Where is the time spent by my app?"
"""
from collections import defaultdict
import logging
import os
import sys
import time
import threading
import traceback
from gae_mini_profiler import util
_is_dev_server = os.environ["SERVER_SOFTWARE"].startswith("Devel")
class InspectingThread(threading.Thread):
"""Thread that periodically triggers profiler inspections."""
SAMPLES_PER_SECOND = 250
def __init__(self, profile=None):
super(InspectingThread, self).__init__()
self._stop_event = threading.Event()
self.profile = profile
def stop(self):
"""Stop this thread."""
# http://stackoverflow.com/questions/323972/is-there-any-way-to-kill-a-thread-in-python
self._stop_event.set()
def should_stop(self):
return self._stop_event.is_set()
def run(self):
"""Start periodic profiler inspections.
This will run, periodically inspecting and then sleeping, until
manually stopped via stop()."""
# Keep sampling until this thread is explicitly stopped.
while not self.should_stop():
# Take a sample of the main request thread's frame stack...
self.profile.take_sample()
# ...then sleep and let it do some more work.
time.sleep(1.0 / InspectingThread.SAMPLES_PER_SECOND)
# Only take one sample per thread if this is running on the
# single-threaded dev server.
if _is_dev_server and len(self.profile.samples) > 0:
break
class ProfileSample(object):
"""Single stack trace sample gathered during a periodic inspection."""
def __init__(self, stack):
self.stack_trace = traceback.extract_stack(stack)
class Profile(object):
"""Profiler that periodically inspects a request and logs stack traces."""
def __init__(self):
# All saved stack trace samples
self.samples = []
# Thread id for the request thread currently being profiled
self.current_request_thread_id = None
# Thread that constantly waits, inspects, waits, inspect, ...
self.inspecting_thread = None
def results(self):
"""Return sampling results in a dictionary for template context."""
aggregated_calls = defaultdict(int)
total_samples = len(self.samples)
for sample in self.samples:
for filename, line_num, function_name, src in sample.stack_trace:
aggregated_calls["%s\n\n%s:%s (%s)" %
(src, filename, line_num, function_name)] += 1
# Turn aggregated call samples into dictionary of results
calls = [{
"func_desc": item[0],
"func_desc_short": util.short_method_fmt(item[0]),
"count_samples": item[1],
"per_samples": "%s%%" % util.decimal_fmt(
100.0 * item[1] / total_samples),
} for item in aggregated_calls.items()]
# Sort call sample results by # of times calls appeared in a sample
calls = sorted(calls, reverse=True,
key=lambda call: call["count_samples"])
return {
"calls": calls,
"total_samples": total_samples,
"is_dev_server": _is_dev_server,
}
def take_sample(self):
# Look at stacks of all existing threads...
# See http://bzimmer.ziclix.com/2008/12/17/python-thread-dumps/
for thread_id, stack in sys._current_frames().items():
# ...but only sample from the main request thread.
if _is_dev_server:
# In development, current_request_thread_id won't be set
# properly. threading.current_thread().ident always returns -1
# in dev. So instead, we just take a peek at the stack's
# current package to figure out if it is the request thread.
# Even though the dev server is single-threaded,
# sys._current_frames will return multiple threads, because
# some of them are spawned by the App Engine dev server for
# internal purposes. We don't want to sample these internal dev
# server threads -- we want to sample the thread that is
# running the current request. Since the dev server will be
# running this sampling code immediately from the run() code
# below, we can spot this thread's stack by looking at its
# global namespace (f_globals) and making sure it's currently
# in the gae_mini_profiler package.
should_sample = (stack.f_globals["__package__"] ==
"gae_mini_profiler")
else:
# In production, current_request_thread_id will be set properly
# by threading.current_thread().ident.
# TODO(kamens): this profiler will need work if we ever
# actually use multiple threads in a single request and want to
# profile more than one of them.
should_sample = thread_id == self.current_request_thread_id
if should_sample:
# Grab a sample of this thread's current stack
self.samples.append(ProfileSample(stack))
def run(self, fxn):
"""Run function with samping profiler enabled, saving results."""
if not hasattr(threading, "current_thread"):
# Sampling profiler is not supported in Python2.5
logging.warn("The sampling profiler is not supported in Python2.5")
return fxn()
# Store the thread id for the current request's thread. This lets
# the inspecting thread know which thread to inspect.
self.current_request_thread_id = threading.current_thread().ident
# Start the thread that will be periodically inspecting the frame
# stack of this current request thread
self.inspecting_thread = InspectingThread(profile=self)
self.inspecting_thread.start()
try:
# Run the request fxn which will be inspected by the inspecting
# thread.
return fxn()
finally:
# Stop and clear the inspecting thread
self.inspecting_thread.stop()
self.inspecting_thread = None
| apache-2.0 |
zachjanicki/osf.io | admin/meetings/views.py | 1 | 4642 | from __future__ import unicode_literals
from copy import deepcopy
from django.views.generic import ListView, FormView
from django.core.urlresolvers import reverse
from django.http import Http404
from framework.auth.core import get_user
from website.conferences.model import Conference, DEFAULT_FIELD_NAMES
from website.conferences.exceptions import ConferenceError
from admin.base.utils import OSFAdmin
from admin.meetings.forms import MeetingForm
from admin.meetings.serializers import serialize_meeting
class MeetingListView(OSFAdmin, ListView):
template_name = 'meetings/list.html'
paginate_by = 10
paginate_orphans = 1
context_object_name = 'meeting'
def get_queryset(self):
return Conference.find()
def get_context_data(self, **kwargs):
queryset = kwargs.pop('object_list', self.object_list)
page_size = self.get_paginate_by(queryset)
paginator, page, queryset, is_paginated = self.paginate_queryset(
queryset, page_size
)
kwargs.setdefault('meetings', map(serialize_meeting, queryset))
kwargs.setdefault('page', page)
return super(MeetingListView, self).get_context_data(**kwargs)
class MeetingFormView(OSFAdmin, FormView):
template_name = 'meetings/detail.html'
form_class = MeetingForm
def dispatch(self, request, *args, **kwargs):
endpoint = self.kwargs.get('endpoint')
try:
self.conf = Conference.get_by_endpoint(endpoint, active=False)
except ConferenceError:
raise Http404('Meeting with endpoint "{}" not found'.format(
endpoint
))
return super(MeetingFormView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs.setdefault('endpoint', self.kwargs.get('endpoint'))
return super(MeetingFormView, self).get_context_data(**kwargs)
def get_initial(self):
self.initial = serialize_meeting(self.conf)
self.initial.setdefault('edit', True)
return super(MeetingFormView, self).get_initial()
def form_valid(self, form):
custom_fields, data = get_custom_fields(form.cleaned_data)
if 'admins' in form.changed_data:
admin_users = get_admin_users(data.get('admins'))
self.conf.admins = admin_users
self.conf.name = data.get('name')
self.conf.info_url = data.get('info_url')
self.conf.logo_url = data.get('logo_url')
self.conf.active = data.get('active')
self.conf.public_projects = data.get('public_projects')
self.conf.poster = data.get('poster')
self.conf.talk = data.get('talk')
self.conf.field_names.update(custom_fields)
self.conf.save()
return super(MeetingFormView, self).form_valid(form)
@property
def success_url(self):
return reverse('meetings:detail',
kwargs={'endpoint': self.kwargs.get('endpoint')})
class MeetingCreateFormView(OSFAdmin, FormView):
template_name = 'meetings/create.html'
form_class = MeetingForm
def get_initial(self):
self.initial.update(DEFAULT_FIELD_NAMES)
self.initial.setdefault('edit', False)
return super(MeetingCreateFormView, self).get_initial()
def form_valid(self, form):
custom_fields, data = get_custom_fields(form.cleaned_data)
endpoint = data.pop('endpoint')
self.kwargs.setdefault('endpoint', endpoint)
# Form validation already checks emails for existence
admin_users = get_admin_users(data.pop('admins'))
# Form validation already catches if a conference endpoint exists
new_conf = Conference(
endpoint=endpoint,
admins=admin_users,
**data
)
new_conf.field_names.update(custom_fields)
new_conf.save()
return super(MeetingCreateFormView, self).form_valid(form)
def get_success_url(self):
return reverse('meetings:detail',
kwargs={'endpoint': self.kwargs.get('endpoint')})
def get_custom_fields(data):
"""Return two dicts, one of field_names and the other regular fields."""
data_copy = deepcopy(data)
field_names = {}
for key, value in data.iteritems():
if key in DEFAULT_FIELD_NAMES:
field_names[key] = data_copy.pop(key)
return field_names, data_copy
def get_admin_users(admins):
"""Returns a list of user objects
If used in conjunction with MeetingForm it will already have checked for
emails that don't match OSF users.
"""
return [get_user(email=e) for e in admins]
| apache-2.0 |
CyanogenMod/android_kernel_samsung_msm8660-q1 | Documentation/target/tcm_mod_builder.py | 3119 | 42754 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_transport.h>\n"
buf += "#include <target/target_core_fabric_ops.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_fabric_lib.h>\n"
buf += "#include <target/target_core_device.h>\n"
buf += "#include <target/target_core_tpg.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!(se_nacl_new))\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!(tpg)) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!(" + fabric_mod_port + ")) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "__NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd_to_pool = " + fabric_mod_name + "_release_cmd,\n"
buf += " .release_cmd_direct = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .new_cmd_failure = " + fabric_mod_name + "_new_cmd_failure,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " .pack_lun = " + fabric_mod_name + "_pack_lun,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (!(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return -ENOMEM;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!(" + fabric_mod_name + "_fabric_configfs))\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "#ifdef MODULE\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
buf += "#endif\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric_ops.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_transport.h>\n"
buf += "#include <target/target_core_fabric_ops.h>\n"
buf += "#include <target/target_core_fabric_lib.h>\n"
buf += "#include <target/target_core_device.h>\n"
buf += "#include <target/target_core_tpg.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!(nacl)) {\n"
buf += " printk(KERN_ERR \"Unable to alocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('release_cmd_to_pool', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('new_cmd_failure\)\(', fo):
buf += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
if re.search('pack_lun\)\(', fo):
buf += "u64 " + fabric_mod_name + "_pack_lun(unsigned int lun)\n"
buf += "{\n"
buf += " WARN_ON(lun >= 256);\n"
buf += " /* Caller wants this byte-swapped */\n"
buf += " return cpu_to_le64((lun & 0xff) << 8);\n"
buf += "}\n\n"
bufi += "u64 " + fabric_mod_name + "_pack_lun(unsigned int);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
danilovaz/ogitrev | node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py | 2779 | 1665 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
| mit |
jacobian-archive/django-storymarket | setup.py | 1 | 1081 | import os
import sys
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "django-storymarket",
version = "1.0a3",
description = "Sync Django models to Storymarket.",
long_description = read('README.rst'),
url = 'http://packages.python.org/django-storymarket',
license = 'BSD',
author = 'Jacob Kaplan-Moss',
author_email = 'jacob@jacobian.org',
packages = find_packages(exclude=['tests', 'example']),
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
install_requires = ['django >= 1.2', 'python-storymarket'],
tests_require = ["mock", "nose", "django-nose"],
test_suite = "django_storymarket.runtests.runtests",
) | bsd-3-clause |
gangadharkadam/frappecontribution | frappe/desk/form/assign_to.py | 26 | 4251 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""assign/unassign to ToDo"""
import frappe
from frappe import _
from frappe.desk.form.load import get_docinfo
def get(args=None):
"""get assigned to"""
if not args:
args = frappe.local.form_dict
get_docinfo(frappe.get_doc(args.get("doctype"), args.get("name")))
return frappe.db.sql("""select owner, description from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s and status="Open"
order by modified desc limit 5""", args, as_dict=True)
@frappe.whitelist()
def add(args=None):
"""add in someone's to do list
args = {
"assign_to": ,
"doctype": ,
"name": ,
"description":
}
"""
if not args:
args = frappe.local.form_dict
if frappe.db.sql("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s and status="Open"
and owner=%(assign_to)s""", args):
frappe.msgprint(_("Already in user's To Do list"), raise_exception=True)
return
else:
from frappe.utils import nowdate
d = frappe.get_doc({
"doctype":"ToDo",
"owner": args['assign_to'],
"reference_type": args['doctype'],
"reference_name": args['name'],
"description": args.get('description'),
"priority": args.get("priority", "Medium"),
"status": "Open",
"date": args.get('date', nowdate()),
"assigned_by": args.get('assigned_by', frappe.session.user),
}).insert(ignore_permissions=True)
# set assigned_to if field exists
if frappe.get_meta(args['doctype']).get_field("assigned_to"):
frappe.db.set_value(args['doctype'], args['name'], "assigned_to", args['assign_to'])
# notify
if not args.get("no_notification"):
notify_assignment(d.assigned_by, d.owner, d.reference_type, d.reference_name, action='ASSIGN', description=args.get("description"), notify=args.get('notify'))
return get(args)
@frappe.whitelist()
def remove(doctype, name, assign_to):
"""remove from todo"""
try:
todo = frappe.db.get_value("ToDo", {"reference_type":doctype, "reference_name":name, "owner":assign_to, "status":"Open"})
if todo:
todo = frappe.get_doc("ToDo", todo)
todo.status = "Closed"
todo.save(ignore_permissions=True)
notify_assignment(todo.assigned_by, todo.owner, todo.reference_type, todo.reference_name)
except frappe.DoesNotExistError:
pass
# clear assigned_to if field exists
if frappe.get_meta(doctype).get_field("assigned_to"):
frappe.db.set_value(doctype, name, "assigned_to", None)
return get({"doctype": doctype, "name": name})
def clear(doctype, name):
for assign_to in frappe.db.sql_list("""select owner from `tabToDo`
where reference_type=%(doctype)s and reference_name=%(name)s""", locals()):
remove(doctype, name, assign_to)
def notify_assignment(assigned_by, owner, doc_type, doc_name, action='CLOSE',
description=None, notify=0):
"""
Notify assignee that there is a change in assignment
"""
if not (assigned_by and owner and doc_type and doc_name): return
# self assignment / closing - no message
if assigned_by==owner:
return
from frappe.boot import get_fullnames
user_info = get_fullnames()
# Search for email address in description -- i.e. assignee
from frappe.utils import get_url_to_form
assignment = get_url_to_form(doc_type, doc_name, label="%s: %s" % (doc_type, doc_name))
owner_name = user_info.get(owner, {}).get('fullname')
user_name = user_info.get(frappe.session.get('user'), {}).get('fullname')
if action=='CLOSE':
if owner == frappe.session.get('user'):
arg = {
'contact': assigned_by,
'txt': _("The task {0}, that you assigned to {1}, has been closed.").format(assignment,
owner_name)
}
else:
arg = {
'contact': assigned_by,
'txt': _("The task {0}, that you assigned to {1}, has been closed by {2}.").format(assignment,
owner_name, user_name)
}
else:
description_html = "<p>{0}</p>".format(description)
arg = {
'contact': owner,
'txt': _("A new task, {0}, has been assigned to you by {1}. {2}").format(assignment,
user_name, description_html),
'notify': notify
}
arg["parenttype"] = "Assignment"
from frappe.desk.page.messages import messages
messages.post(**arg)
| mit |
bukun/pycsw | pycsw/plugins/outputschemas/fgdc.py | 2 | 7650 | # -*- coding: utf-8 -*-
# =================================================================
#
# Authors: Tom Kralidis <tomkralidis@gmail.com>
#
# Copyright (c) 2015 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
from pycsw.core import util
from pycsw.core.etree import etree
#NAMESPACE = 'http://www.fgdc.gov/metadata/csdgm'
NAMESPACE = 'http://www.opengis.net/cat/csw/csdgm'
NAMESPACES = {'fgdc': NAMESPACE}
XPATH_MAPPINGS = {
'pycsw:Identifier': 'idinfo/datasetid',
'pycsw:Title': 'idinfo/citation/citeinfo/title',
'pycsw:Creator': 'idinfo/citation/citeinfo/origin',
'pycsw:Publisher': 'idinfo/citation/citeinfo/publinfo/publish',
'pycsw:Abstract': 'idinfo/descript/abstract',
'pycsw:Format': 'idinfo/citation/citeinfo/geoform',
'pycsw:PublicationDate': 'idinfo/citation/citeinfo/pubdate',
'pycsw:Keywords': 'idinfo/keywords/theme/themekey',
'pycsw:TempExtent_begin': 'idinfo/timeperd/timeinfo/rngdates/begdate',
'pycsw:TempExtent_end': 'idinfo/timeperd/timeinfo/rngdates/enddate',
'pycsw:Contributor': 'idinfo/datacred',
'pycsw:AccessConstraints': 'idinfo/accconst',
'pycsw:Modified': 'metainfo/metd',
'pycsw:Type': 'spdoinfo/direct',
'pycsw:Source': 'lineage/srcinfo/srccite/citeinfo/title',
'pycsw:Relation': 'idinfo/citation/citeinfo/onlink',
}
def write_record(recobj, esn, context, url=None):
''' Return csw:SearchResults child as lxml.etree.Element '''
typename = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Typename'])
if esn == 'full' and typename == 'fgdc:metadata':
# dump record as is and exit
return etree.fromstring(util.getqattr(recobj, context.md_core_model['mappings']['pycsw:XML']), context.parser)
node = etree.Element('metadata')
node.attrib[util.nspath_eval('xsi:noNamespaceSchemaLocation', context.namespaces)] = \
'http://www.fgdc.gov/metadata/fgdc-std-001-1998.xsd'
idinfo = etree.SubElement(node, 'idinfo')
# identifier
etree.SubElement(idinfo, 'datasetid').text = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Identifier'])
citation = etree.SubElement(idinfo, 'citation')
citeinfo = etree.SubElement(citation, 'citeinfo')
# title
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Title'])
etree.SubElement(citeinfo, 'title').text = val
# publisher
publinfo = etree.SubElement(citeinfo, 'publinfo')
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Publisher']) or ''
etree.SubElement(publinfo, 'publish').text = val
# origin
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Creator']) or ''
etree.SubElement(citeinfo, 'origin').text = val
# keywords
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Keywords'])
if val:
keywords = etree.SubElement(idinfo, 'keywords')
theme = etree.SubElement(keywords, 'theme')
for v in val.split(','):
etree.SubElement(theme, 'themekey').text = v
# accessconstraints
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:AccessConstraints']) or ''
etree.SubElement(idinfo, 'accconst').text = val
# abstract
descript = etree.SubElement(idinfo, 'descript')
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Abstract']) or ''
etree.SubElement(descript, 'abstract').text = val
# time
datebegin = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:TempExtent_begin'])
dateend = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:TempExtent_end'])
if all([datebegin, dateend]):
timeperd = etree.SubElement(idinfo, 'timeperd')
timeinfo = etree.SubElement(timeperd, 'timeinfo')
rngdates = etree.SubElement(timeinfo, 'timeinfo')
begdate = etree.SubElement(rngdates, 'begdate').text = datebegin
enddate = etree.SubElement(rngdates, 'enddate').text = dateend
# bbox extent
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:BoundingBox'])
bboxel = write_extent(val)
if bboxel is not None:
idinfo.append(bboxel)
# contributor
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Contributor']) or ''
etree.SubElement(idinfo, 'datacred').text = val
# direct
spdoinfo = etree.SubElement(idinfo, 'spdoinfo')
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Type']) or ''
etree.SubElement(spdoinfo, 'direct').text = val
# formname
distinfo = etree.SubElement(node, 'distinfo')
stdorder = etree.SubElement(distinfo, 'stdorder')
digform = etree.SubElement(stdorder, 'digform')
digtinfo = etree.SubElement(digform, 'digtinfo')
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Format']) or ''
etree.SubElement(digtinfo, 'formname').text = val
etree.SubElement(citeinfo, 'geoform').text = val
# source
lineage = etree.SubElement(node, 'lineage')
srcinfo = etree.SubElement(lineage, 'srcinfo')
srccite = etree.SubElement(srcinfo, 'srccite')
sciteinfo = etree.SubElement(srccite, 'citeinfo')
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Source']) or ''
etree.SubElement(sciteinfo, 'title').text = val
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Relation']) or ''
etree.SubElement(citeinfo, 'onlink').text = val
# links
rlinks = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Links'])
if rlinks:
for link in rlinks.split('^'):
linkset = link.split(',')
etree.SubElement(citeinfo, 'onlink', type=linkset[2]).text = linkset[-1]
# metd
metainfo = etree.SubElement(node, 'metainfo')
val = util.getqattr(recobj, context.md_core_model['mappings']['pycsw:Modified']) or ''
etree.SubElement(metainfo, 'metd').text = val
return node
def write_extent(bbox):
''' Generate BBOX extent '''
if bbox is not None:
try:
bbox2 = util.wkt2geom(bbox)
except:
return None
spdom = etree.Element('spdom')
bounding = etree.SubElement(spdom, 'bounding')
etree.SubElement(bounding, 'westbc').text = str(bbox2[0])
etree.SubElement(bounding, 'eastbc').text = str(bbox2[2])
etree.SubElement(bounding, 'northbc').text = str(bbox2[3])
etree.SubElement(bounding, 'southbc').text = str(bbox2[1])
return spdom
return None
| mit |
truthcoin/blocksize-market | qa/rpc-tests/rawtransactions.py | 10 | 12828 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test re-org scenarios with a mempool that contains transactions
# that spend (directly or indirectly) coinbase transactions.
#
from test_framework import BitcoinTestFramework
from util import *
from pprint import pprint
from time import sleep
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self, split=False):
self.nodes = start_nodes(3, self.options.tmpdir)
# connect to a local machine for debugging
# url = "http://bitcoinrpc:DP6DvqZtqXarpeNWyN3LZTFchCCyCUuHwNF7E8pX99x1@%s:%d" % ('127.0.0.1', 18332)
# proxy = AuthServiceProxy(url)
# proxy.url = url # store URL on proxy for info
# self.nodes.append(proxy)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
self.nodes[2].setgenerate(True, 1)
self.nodes[0].setgenerate(True, 121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_amount = 200
self.nodes[1].importaddress(watchonly_address, "", True)
self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.5);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),1.0);
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(),5.0);
self.sync_all()
self.nodes[0].setgenerate(True, 1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(len(dec_tx['vin']), 1) #one vin coin
assert_equal(fee*0.00000001+float(totalOut), 1.5) #the 1.5BTC coin must be taken
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.2 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(len(dec_tx['vin']), 2) #one vin coin
assert_equal(fee*0.00000001+float(totalOut), 2.5) #the 1.5BTC+1.0BTC coins must have be taken
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(len(dec_tx['vin']), 1) #one vin coin
assert_equal(fee*0.00000001+float(totalOut), 5.0) #the 5.0BTC coin must have be taken
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 2.6, self.nodes[1].getnewaddress() : 2.5 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(len(dec_tx['vin']), 2) #one vin coin
assert_equal(fee*0.00000001+float(totalOut), 6.0) #the 5.0BTC + 1.0BTC coins must have be taken
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 5.0:
utx = aUtx
break;
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee*0.00000001+float(totalOut), utx['amount']) #compare vin total and totalout+fee
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
break;
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
assert_equal(fee*0.00000001+float(totalOut), 2.5) #this tx must use the 1.0BTC and the 1.5BTC coin
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
assert_equal(fee*0.00000001+float(totalOut), 7.5) #this tx must use the 1.0BTC and the 1.5BTC coin
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 1.0:
utx = aUtx
if aUtx['amount'] == 5.0:
utx2 = aUtx
assert_equal(utx!=False, True)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 6.0, self.nodes[0].getnewaddress() : 1.0 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if outputs.has_key(out['scriptPubKey']['addresses'][0]):
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
assert_equal(fee*0.00000001+float(totalOut), 7.5) #this tx must use the 1.0BTC and the 1.5BTC coin
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 1.0}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
errorString = ""
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Insufficient" in errorString, True);
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2.0}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
result = self.nodes[1].fundrawtransaction(rawtx, True)
assert_equal("hex" in result.keys(), True)
assert_equal("fee" in result.keys(), True)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 1.0}
rawtx = self.nodes[0].createrawtransaction(inputs, outputs)
result = self.nodes[1].fundrawtransaction(rawtx, True)
assert_equal("hex" in result.keys(), True)
assert_equal("fee" in result.keys(), True)
if __name__ == '__main__':
RawTransactionsTest().main()
| mit |
jimberlage/servo | tests/wpt/web-platform-tests/xhr/resources/access-control-preflight-denied.py | 23 | 1608 | def main(request, response):
def fail(message):
response.content = "FAIL: " + str(message)
response.status = 400
def getState(token):
server_state = request.server.stash.take(token)
if not server_state:
return "Uninitialized"
return server_state
def setState(token, state):
request.server.stash.put(token, state)
def resetState(token):
setState(token, "")
response.headers.set("Cache-Control", "no-store")
response.headers.set("Access-Control-Allow-Origin", request.headers.get("origin"))
response.headers.set("Access-Control-Max-Age", 1)
token = request.GET.first("token", None)
state = getState(token)
command = request.GET.first("command", None)
if command == "reset":
if request.method == "GET":
resetState(token)
response.content = "Server state reset"
else:
fail("Invalid Method.")
elif state == "Uninitialized":
if request.method == "OPTIONS":
response.content = "This request should not be displayed."
setState(token, "Denied")
else:
fail(state)
elif state == "Denied":
if request.method == "GET" and command == "complete":
resetState(token)
response.content = "Request successfully blocked."
else:
setState("Deny Ignored")
fail("The request was not denied.")
elif state == "Deny Ignored":
resetState(token)
fail(state)
else:
resetState(token)
fail("Unknown Error.")
| mpl-2.0 |
haxoza/django | tests/auth_tests/models/invalid_models.py | 26 | 1208 | from django.contrib.auth.models import AbstractBaseUser, UserManager
from django.db import models
class CustomUserNonUniqueUsername(AbstractBaseUser):
"""
A user with a non-unique username.
This model is not invalid if it is used with a custom authentication
backend which supports non-unique usernames.
"""
username = models.CharField(max_length=30)
email = models.EmailField(blank=True)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
objects = UserManager()
class CustomUserNonListRequiredFields(AbstractBaseUser):
"A user with a non-list REQUIRED_FIELDS"
username = models.CharField(max_length=30, unique=True)
date_of_birth = models.DateField()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = 'date_of_birth'
class CustomUserBadRequiredFields(AbstractBaseUser):
"A user with a USERNAME_FIELD that appears in REQUIRED_FIELDS (invalid)"
username = models.CharField(max_length=30, unique=True)
date_of_birth = models.DateField()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['username', 'date_of_birth']
| bsd-3-clause |
FiveEye/ml-notebook | dlp/ch5_1_dogsvscats_copy_data.py | 1 | 1485 | import os, shutil
org_dataset_dir = '/home/han/code/data/dogsvscats'
org_dataset_train_dir = os.path.join(org_dataset_dir, 'train')
base_dir = os.path.join(org_dataset_dir, 'small')
# os.mkdir(base_dir)
train_dir = os.path.join(base_dir, 'train')
val_dir = os.path.join(base_dir, 'validation')
test_dir = os.path.join(base_dir, 'test')
# os.mkdir(train_dir)
# os.mkdir(val_dir)
# os.mkdir(test_dir)
train_cats_dir = os.path.join(train_dir, 'cats')
train_dogs_dir = os.path.join(train_dir, 'dogs')
val_cats_dir = os.path.join(val_dir, 'cats')
val_dogs_dir = os.path.join(val_dir, 'dogs')
test_cats_dir = os.path.join(test_dir, 'cats')
test_dogs_dir = os.path.join(test_dir, 'dogs')
# os.mkdir(train_cats_dir)
# os.mkdir(val_cats_dir)
# os.mkdir(test_cats_dir)
# os.mkdir(train_dogs_dir)
# os.mkdir(val_dogs_dir)
# os.mkdir(test_dogs_dir)
def copy_data(dir, beg, end):
sub_dir = os.path.join(dir, 'cats')
fnames = ['cat.{}.jpg'.format(i) for i in range(beg, end)]
for fname in fnames:
s = os.path.join(org_dataset_train_dir, fname)
d = os.path.join(sub_dir, fname)
shutil.copyfile(s, d)
sub_dir = os.path.join(dir, 'dogs')
fnames = ['dog.{}.jpg'.format(i) for i in range(beg, end)]
for fname in fnames:
s = os.path.join(org_dataset_train_dir, fname)
d = os.path.join(sub_dir, fname)
shutil.copyfile(s, d)
copy_data(train_dir, 0, 1000)
copy_data(val_dir, 1000, 1500)
copy_data(test_dir, 1500, 2000) | mit |
PongPi/isl-odoo | openerp/report/custom.py | 338 | 25091 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import time
import openerp
import openerp.tools as tools
from openerp.tools.safe_eval import safe_eval as eval
import print_xml
import render
from interface import report_int
import common
from openerp.osv.osv import except_osv
from openerp.osv.orm import BaseModel
from pychart import *
import misc
import cStringIO
from lxml import etree
from openerp.tools.translate import _
class external_pdf(render.render):
def __init__(self, pdf):
render.render.__init__(self)
self.pdf = pdf
self.output_type='pdf'
def _render(self):
return self.pdf
theme.use_color = 1
#TODO: devrait heriter de report_rml a la place de report_int
# -> pourrait overrider que create_xml a la place de tout create
# heuu, ca marche pas ds tous les cas car graphs sont generes en pdf directment
# par pychart, et on passe donc pas par du rml
class report_custom(report_int):
def __init__(self, name):
report_int.__init__(self, name)
#
# PRE:
# fields = [['address','city'],['name'], ['zip']]
# conditions = [[('zip','==','3'),(,)],(,),(,)] #same structure as fields
# row_canvas = ['Rue', None, None]
# POST:
# [ ['ville','name','zip'] ]
#
def _row_get(self, cr, uid, objs, fields, conditions, row_canvas=None, group_by=None):
result = []
for obj in objs:
tobreak = False
for cond in conditions:
if cond and cond[0]:
c = cond[0]
temp = c[0](eval('obj.'+c[1],{'obj': obj}))
if not eval('\''+temp+'\''+' '+c[2]+' '+'\''+str(c[3])+'\''):
tobreak = True
if tobreak:
break
levels = {}
row = []
for i in range(len(fields)):
if not fields[i]:
row.append(row_canvas and row_canvas[i])
if row_canvas[i]:
row_canvas[i]=False
elif len(fields[i])==1:
if obj:
row.append(str(eval('obj.'+fields[i][0],{'obj': obj})))
else:
row.append(None)
else:
row.append(None)
levels[fields[i][0]]=True
if not levels:
result.append(row)
else:
# Process group_by data first
key = []
if group_by is not None and fields[group_by] is not None:
if fields[group_by][0] in levels.keys():
key.append(fields[group_by][0])
for l in levels.keys():
if l != fields[group_by][0]:
key.append(l)
else:
key = levels.keys()
for l in key:
objs = eval('obj.'+l,{'obj': obj})
if not isinstance(objs, (BaseModel, list)):
objs = [objs]
field_new = []
cond_new = []
for f in range(len(fields)):
if (fields[f] and fields[f][0])==l:
field_new.append(fields[f][1:])
cond_new.append(conditions[f][1:])
else:
field_new.append(None)
cond_new.append(None)
if len(objs):
result += self._row_get(cr, uid, objs, field_new, cond_new, row, group_by)
else:
result.append(row)
return result
def create(self, cr, uid, ids, datas, context=None):
if not context:
context={}
self.pool = openerp.registry(cr.dbname)
report = self.pool['ir.report.custom'].browse(cr, uid, [datas['report_id']])[0]
datas['model'] = report.model_id.model
if report.menu_id:
ids = self.pool[report.model_id.model].search(cr, uid, [])
datas['ids'] = ids
report_id = datas['report_id']
report = self.pool['ir.report.custom'].read(cr, uid, [report_id], context=context)[0]
fields = self.pool['ir.report.custom.fields'].read(cr, uid, report['fields_child0'], context=context)
fields.sort(lambda x,y : x['sequence'] - y['sequence'])
if report['field_parent']:
parent_field = self.pool['ir.model.fields'].read(cr, uid, [report['field_parent'][0]], ['model'])
model_name = self.pool['ir.model'].read(cr, uid, [report['model_id'][0]], ['model'], context=context)[0]['model']
fct = {
'id': lambda x: x,
'gety': lambda x: x.split('-')[0],
'in': lambda x: x.split(',')
}
new_fields = []
new_cond = []
for f in fields:
row = []
cond = []
for i in range(4):
field_child = f['field_child'+str(i)]
if field_child:
row.append(
self.pool['ir.model.fields'].read(cr, uid, [field_child[0]], ['name'], context=context)[0]['name']
)
if f['fc'+str(i)+'_operande']:
fct_name = 'id'
cond_op = f['fc'+str(i)+'_op']
if len(f['fc'+str(i)+'_op'].split(',')) == 2:
cond_op = f['fc'+str(i)+'_op'].split(',')[1]
fct_name = f['fc'+str(i)+'_op'].split(',')[0]
cond.append((fct[fct_name], f['fc'+str(i)+'_operande'][1], cond_op, f['fc'+str(i)+'_condition']))
else:
cond.append(None)
new_fields.append(row)
new_cond.append(cond)
objs = self.pool[model_name].browse(cr, uid, ids)
# Group by
groupby = None
idx = 0
for f in fields:
if f['groupby']:
groupby = idx
idx += 1
results = []
if report['field_parent']:
level = []
def build_tree(obj, level, depth):
res = self._row_get(cr, uid,[obj], new_fields, new_cond)
level.append(depth)
new_obj = eval('obj.'+report['field_parent'][1],{'obj': obj})
if not isinstance(new_obj, list) :
new_obj = [new_obj]
for o in new_obj:
if o:
res += build_tree(o, level, depth+1)
return res
for obj in objs:
results += build_tree(obj, level, 0)
else:
results = self._row_get(cr, uid,objs, new_fields, new_cond, group_by=groupby)
fct = {
'calc_sum': lambda l: reduce(lambda x,y: float(x)+float(y), filter(None, l), 0),
'calc_avg': lambda l: reduce(lambda x,y: float(x)+float(y), filter(None, l), 0) / (len(filter(None, l)) or 1.0),
'calc_max': lambda l: reduce(lambda x,y: max(x,y), [(i or 0.0) for i in l], 0),
'calc_min': lambda l: reduce(lambda x,y: min(x,y), [(i or 0.0) for i in l], 0),
'calc_count': lambda l: len(filter(None, l)),
'False': lambda l: '\r\n'.join(filter(None, l)),
'groupby': lambda l: reduce(lambda x,y: x or y, l)
}
new_res = []
prev = None
if groupby is not None:
res_dic = {}
for line in results:
if not line[groupby] and prev in res_dic:
res_dic[prev].append(line)
else:
prev = line[groupby]
res_dic.setdefault(line[groupby], [])
res_dic[line[groupby]].append(line)
#we use the keys in results since they are ordered, whereas in res_dic.heys() they aren't
for key in filter(None, [x[groupby] for x in results]):
row = []
for col in range(len(fields)):
if col == groupby:
row.append(fct['groupby'](map(lambda x: x[col], res_dic[key])))
else:
row.append(fct[str(fields[col]['operation'])](map(lambda x: x[col], res_dic[key])))
new_res.append(row)
results = new_res
if report['type']=='table':
if report['field_parent']:
res = self._create_tree(uid, ids, report, fields, level, results, context)
else:
sort_idx = 0
for idx in range(len(fields)):
if fields[idx]['name'] == report['sortby']:
sort_idx = idx
break
try :
results.sort(lambda x,y : cmp(float(x[sort_idx]),float(y[sort_idx])))
except :
results.sort(lambda x,y : cmp(x[sort_idx],y[sort_idx]))
if report['limitt']:
results = results[:int(report['limitt'])]
res = self._create_table(uid, ids, report, fields, None, results, context)
elif report['type'] in ('pie','bar', 'line'):
results2 = []
prev = False
for r in results:
row = []
for j in range(len(r)):
if j == 0 and not r[j]:
row.append(prev)
elif j == 0 and r[j]:
prev = r[j]
row.append(r[j])
else:
try:
row.append(float(r[j]))
except Exception:
row.append(r[j])
results2.append(row)
if report['type']=='pie':
res = self._create_pie(cr,uid, ids, report, fields, results2, context)
elif report['type']=='bar':
res = self._create_bars(cr,uid, ids, report, fields, results2, context)
elif report['type']=='line':
res = self._create_lines(cr,uid, ids, report, fields, results2, context)
return self.obj.get(), 'pdf'
def _create_tree(self, uid, ids, report, fields, level, results, context):
pageSize=common.pageSize.get(report['print_format'], [210.0,297.0])
if report['print_orientation']=='landscape':
pageSize=[pageSize[1],pageSize[0]]
new_doc = etree.Element('report')
config = etree.SubElement(new_doc, 'config')
def _append_node(name, text):
n = etree.SubElement(config, name)
n.text = text
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageFormat', '%s' % report['print_format'])
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
length = pageSize[0]-30-reduce(lambda x,y:x+(y['width'] or 0), fields, 0)
count = 0
for f in fields:
if not f['width']: count+=1
for f in fields:
if not f['width']:
f['width']=round((float(length)/count)-0.5)
_append_node('tableSize', '%s' % ','.join(map(lambda x: '%.2fmm' % (x['width'],), fields)))
_append_node('report-header', '%s' % (report['title'],))
_append_node('report-footer', '%s' % (report['footer'],))
header = etree.SubElement(new_doc, 'header')
for f in fields:
field = etree.SubElement(header, 'field')
field.text = f['name']
lines = etree.SubElement(new_doc, 'lines')
level.reverse()
for line in results:
shift = level.pop()
node_line = etree.SubElement(lines, 'row')
prefix = '+'
for f in range(len(fields)):
col = etree.SubElement(node_line, 'col')
if f == 0:
col.attrib.update(para='yes',
tree='yes',
space=str(3*shift)+'mm')
if line[f] is not None:
col.text = prefix+str(line[f]) or ''
else:
col.text = '/'
prefix = ''
transform = etree.XSLT(
etree.parse(os.path.join(tools.config['root_path'],
'addons/base/report/custom_new.xsl')))
rml = etree.tostring(transform(new_doc))
self.obj = render.rml(rml)
self.obj.render()
return True
def _create_lines(self, cr, uid, ids, report, fields, results, context):
pool = openerp.registry(cr.dbname)
pdf_string = cStringIO.StringIO()
can = canvas.init(fname=pdf_string, format='pdf')
can.show(80,380,'/16/H'+report['title'])
ar = area.T(size=(350,350),
#x_coord = category_coord.T(['2005-09-01','2005-10-22'],0),
x_axis = axis.X(label = fields[0]['name'], format="/a-30{}%s"),
y_axis = axis.Y(label = ', '.join(map(lambda x : x['name'], fields[1:]))))
process_date = {
'D': lambda x: reduce(lambda xx, yy: xx + '-' + yy, x.split('-')[1:3]),
'M': lambda x: x.split('-')[1],
'Y': lambda x: x.split('-')[0]
}
order_date = {
'D': lambda x: time.mktime((2005, int(x.split('-')[0]), int(x.split('-')[1]), 0, 0, 0, 0, 0, 0)),
'M': lambda x: x,
'Y': lambda x: x
}
abscissa = []
idx = 0
date_idx = None
fct = {}
for f in fields:
field_id = (f['field_child3'] and f['field_child3'][0]) or (f['field_child2'] and f['field_child2'][0]) or (f['field_child1'] and f['field_child1'][0]) or (f['field_child0'] and f['field_child0'][0])
if field_id:
type = pool['ir.model.fields'].read(cr, uid, [field_id],['ttype'])
if type[0]['ttype'] == 'date':
date_idx = idx
fct[idx] = process_date[report['frequency']]
else:
fct[idx] = lambda x : x
else:
fct[idx] = lambda x : x
idx+=1
# plots are usually displayed year by year
# so we do so if the first field is a date
data_by_year = {}
if date_idx is not None:
for r in results:
key = process_date['Y'](r[date_idx])
if key not in data_by_year:
data_by_year[key] = []
for i in range(len(r)):
r[i] = fct[i](r[i])
data_by_year[key].append(r)
else:
data_by_year[''] = results
idx0 = 0
nb_bar = len(data_by_year)*(len(fields)-1)
colors = map(lambda x:line_style.T(color=x), misc.choice_colors(nb_bar))
abscissa = {}
for line in data_by_year.keys():
fields_bar = []
# sum data and save it in a list. An item for a fields
for d in data_by_year[line]:
for idx in range(len(fields)-1):
fields_bar.append({})
if d[0] in fields_bar[idx]:
fields_bar[idx][d[0]] += d[idx+1]
else:
fields_bar[idx][d[0]] = d[idx+1]
for idx in range(len(fields)-1):
data = {}
for k in fields_bar[idx].keys():
if k in data:
data[k] += fields_bar[idx][k]
else:
data[k] = fields_bar[idx][k]
data_cum = []
prev = 0.0
keys = data.keys()
keys.sort()
# cumulate if necessary
for k in keys:
data_cum.append([k, float(data[k])+float(prev)])
if fields[idx+1]['cumulate']:
prev += data[k]
idx0 = 0
plot = line_plot.T(label=fields[idx+1]['name']+' '+str(line), data = data_cum, line_style=colors[idx0*(len(fields)-1)+idx])
ar.add_plot(plot)
abscissa.update(fields_bar[idx])
idx0 += 1
abscissa = map(lambda x : [x, None], abscissa)
ar.x_coord = category_coord.T(abscissa,0)
ar.draw(can)
can.close()
self.obj = external_pdf(pdf_string.getvalue())
self.obj.render()
pdf_string.close()
return True
def _create_bars(self, cr, uid, ids, report, fields, results, context):
pool = openerp.registry(cr.dbname)
pdf_string = cStringIO.StringIO()
can = canvas.init(fname=pdf_string, format='pdf')
can.show(80,380,'/16/H'+report['title'])
process_date = {
'D': lambda x: reduce(lambda xx, yy: xx + '-' + yy, x.split('-')[1:3]),
'M': lambda x: x.split('-')[1],
'Y': lambda x: x.split('-')[0]
}
order_date = {
'D': lambda x: time.mktime((2005, int(x.split('-')[0]), int(x.split('-')[1]), 0, 0, 0, 0, 0, 0)),
'M': lambda x: x,
'Y': lambda x: x
}
ar = area.T(size=(350,350),
x_axis = axis.X(label = fields[0]['name'], format="/a-30{}%s"),
y_axis = axis.Y(label = ', '.join(map(lambda x : x['name'], fields[1:]))))
idx = 0
date_idx = None
fct = {}
for f in fields:
field_id = (f['field_child3'] and f['field_child3'][0]) or (f['field_child2'] and f['field_child2'][0]) or (f['field_child1'] and f['field_child1'][0]) or (f['field_child0'] and f['field_child0'][0])
if field_id:
type = pool['ir.model.fields'].read(cr, uid, [field_id],['ttype'])
if type[0]['ttype'] == 'date':
date_idx = idx
fct[idx] = process_date[report['frequency']]
else:
fct[idx] = lambda x : x
else:
fct[idx] = lambda x : x
idx+=1
# plot are usually displayed year by year
# so we do so if the first field is a date
data_by_year = {}
if date_idx is not None:
for r in results:
key = process_date['Y'](r[date_idx])
if key not in data_by_year:
data_by_year[key] = []
for i in range(len(r)):
r[i] = fct[i](r[i])
data_by_year[key].append(r)
else:
data_by_year[''] = results
nb_bar = len(data_by_year)*(len(fields)-1)
colors = map(lambda x:fill_style.Plain(bgcolor=x), misc.choice_colors(nb_bar))
abscissa = {}
for line in data_by_year.keys():
fields_bar = []
# sum data and save it in a list. An item for a fields
for d in data_by_year[line]:
for idx in range(len(fields)-1):
fields_bar.append({})
if d[0] in fields_bar[idx]:
fields_bar[idx][d[0]] += d[idx+1]
else:
fields_bar[idx][d[0]] = d[idx+1]
for idx in range(len(fields)-1):
data = {}
for k in fields_bar[idx].keys():
if k in data:
data[k] += fields_bar[idx][k]
else:
data[k] = fields_bar[idx][k]
data_cum = []
prev = 0.0
keys = data.keys()
keys.sort()
# cumulate if necessary
for k in keys:
data_cum.append([k, float(data[k])+float(prev)])
if fields[idx+1]['cumulate']:
prev += data[k]
idx0 = 0
plot = bar_plot.T(label=fields[idx+1]['name']+' '+str(line), data = data_cum, cluster=(idx0*(len(fields)-1)+idx,nb_bar), fill_style=colors[idx0*(len(fields)-1)+idx])
ar.add_plot(plot)
abscissa.update(fields_bar[idx])
idx0 += 1
abscissa = map(lambda x : [x, None], abscissa)
abscissa.sort()
ar.x_coord = category_coord.T(abscissa,0)
ar.draw(can)
can.close()
self.obj = external_pdf(pdf_string.getvalue())
self.obj.render()
pdf_string.close()
return True
def _create_pie(self, cr, uid, ids, report, fields, results, context):
pdf_string = cStringIO.StringIO()
can = canvas.init(fname=pdf_string, format='pdf')
ar = area.T(size=(350,350), legend=legend.T(),
x_grid_style = None, y_grid_style = None)
colors = map(lambda x:fill_style.Plain(bgcolor=x), misc.choice_colors(len(results)))
if reduce(lambda x,y : x+y, map(lambda x : x[1],results)) == 0.0:
raise except_osv(_('Error'), _("The sum of the data (2nd field) is null.\nWe can't draw a pie chart !"))
plot = pie_plot.T(data=results, arc_offsets=[0,10,0,10],
shadow = (2, -2, fill_style.gray50),
label_offset = 25,
arrow_style = arrow.a3,
fill_styles=colors)
ar.add_plot(plot)
ar.draw(can)
can.close()
self.obj = external_pdf(pdf_string.getvalue())
self.obj.render()
pdf_string.close()
return True
def _create_table(self, uid, ids, report, fields, tree, results, context):
pageSize=common.pageSize.get(report['print_format'], [210.0,297.0])
if report['print_orientation']=='landscape':
pageSize=[pageSize[1],pageSize[0]]
new_doc = etree.Element('report')
config = etree.SubElement(new_doc, 'config')
def _append_node(name, text):
n = etree.SubElement(config, name)
n.text = text
_append_node('date', time.strftime('%d/%m/%Y'))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageFormat', '%s' % report['print_format'])
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
length = pageSize[0]-30-reduce(lambda x,y:x+(y['width'] or 0), fields, 0)
count = 0
for f in fields:
if not f['width']: count+=1
for f in fields:
if not f['width']:
f['width']=round((float(length)/count)-0.5)
_append_node('tableSize', '%s' % ','.join(map(lambda x: '%.2fmm' % (x['width'],), fields)))
_append_node('report-header', '%s' % (report['title'],))
_append_node('report-footer', '%s' % (report['footer'],))
header = etree.SubElement(new_doc, 'header')
for f in fields:
field = etree.SubElement(header, 'field')
field.text = f['name']
lines = etree.SubElement(new_doc, 'lines')
for line in results:
node_line = etree.SubElement(lines, 'row')
for f in range(len(fields)):
col = etree.SubElement(node_line, 'col', tree='no')
if line[f] is not None:
col.text = line[f] or ''
else:
col.text = '/'
transform = etree.XSLT(
etree.parse(os.path.join(tools.config['root_path'],
'addons/base/report/custom_new.xsl')))
rml = etree.tostring(transform(new_doc))
self.obj = render.rml(rml)
self.obj.render()
return True
report_custom('report.custom')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
lovingmage/ART4SQLi | TF-IDF Based Approach/test_case_feature.py | 1 | 3341 | # Test Case TF-IDF Stat Package
# Author : C.H. Wang
# Department of Computer Science at Harbin Engineering University
# This is the test case feature vector extraction script
#------------------------------------------------------------------
#------------------------------------------------------------------
# Import part , should import two modules in the toolkit file
import file_operation
import testcase_stat
import os
import json
#------------------------------------------------------------------
#
# Init part, this section can be described as the initiation part
# of test cases
# All test cases need to be counted should be loaded in this part
#
#-------------------------------------------------------------------
filename = './Input_data/test.txt'
#This part should be replaced as user input or configure files
test_case_list = file_operation.fileopt_line(filename)
full_list = file_operation.fileopt_all(filename)
full_stat_list = testcase_stat.stat_all(full_list)
test_case_stat_list = testcase_stat.stat(test_case_list)
json.dump(full_stat_list, open('./statistics/full_stat_list.json', 'w'))
json.dump(test_case_stat_list, open('./statistics/test_case_stat_list.json', 'w'))
#-------------------------------------------------------------------
# test_case_list is the list stored the test cases read from files
# Sample : [
# ['alice', 'dod', 'alice', 'bob', 'alice', 'tom'],
# ['alice', 'dod', 'alice', 'bob', 'alice', 'tom']
# ]
#
# full_list is the total list without split
# full_stat_list is used to stat full list
# test_case_stat_list is the split test cases list statistic list
#
#-------------------------------------------------------------------
#print test_case_stat_list
tf_matrix = testcase_stat.compute_tf(test_case_stat_list)
#computing tf value and store in tf_matrix
#print tf_matrix
#Important this place
# In this place we should recompute the test_case_stat_list value
# If not the consequence will go wrong
test_case_stat_list = testcase_stat.stat(test_case_list)
#print test_case_stat_list
# Recompute the test_case_stat_list and then compute the idf value
idf_matrix = testcase_stat.compute_idf(test_case_stat_list, full_stat_list)
#print idf_matrix
#combine tf_matrix and idf_matrix
#Compute tf_idf_list
tf_idf_list = testcase_stat.compute_tf_idf(tf_matrix, idf_matrix)
#print tf_idf_list
#Put them into feature vector and store in
#test_case_feature_list
#----------------------------------------------------------------------------------------------
test_case_feature_list = testcase_stat.feature_vector_quantify(tf_idf_list, full_stat_list)
#----------------------------------------------------------------------------------------------
json.dump(test_case_feature_list, open('./statistics/test_case_feature.json', 'w'))
#demoDictList is the value we want format to output
#jsonDumpsTFIDF = json.dumps(tf_idf_list, indent=1)
#--------------------------------------------------------------------
#jsonDumpsFuature = json.dumps(test_case_feature_list, indent=1)
#json.dump(test_case_feature_list, open('./feature_list.json', 'w'))
#--------------------------------------------------------------------
#print jsonDumpsTFIDF
##print jsonDumpsFuature
#print "-------------------------------------"
#print test_case_list
#print full_list
#print full_stat_list
| gpl-2.0 |
Haynie-Research-and-Development/jarvis | deps/lib/python3.4/site-packages/sqlalchemy/event/api.py | 33 | 5990 | # event/api.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Public API functions for the event system.
"""
from __future__ import absolute_import
from .. import util, exc
from .base import _registrars
from .registry import _EventKey
CANCEL = util.symbol('CANCEL')
NO_RETVAL = util.symbol('NO_RETVAL')
def _event_key(target, identifier, fn):
for evt_cls in _registrars[identifier]:
tgt = evt_cls._accept_with(target)
if tgt is not None:
return _EventKey(target, identifier, fn, tgt)
else:
raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
(identifier, target))
def listen(target, identifier, fn, *args, **kw):
"""Register a listener function for the given target.
e.g.::
from sqlalchemy import event
from sqlalchemy.schema import UniqueConstraint
def unique_constraint_name(const, table):
const.name = "uq_%s_%s" % (
table.name,
list(const.columns)[0].name
)
event.listen(
UniqueConstraint,
"after_parent_attach",
unique_constraint_name)
A given function can also be invoked for only the first invocation
of the event using the ``once`` argument::
def on_config():
do_config()
event.listen(Mapper, "before_configure", on_config, once=True)
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
and :func:`.event.listens_for`.
.. note::
The :func:`.listen` function cannot be called at the same time
that the target event is being run. This has implications
for thread safety, and also means an event cannot be added
from inside the listener function for itself. The list of
events to be run are present inside of a mutable collection
that can't be changed during iteration.
Event registration and removal is not intended to be a "high
velocity" operation; it is a configurational operation. For
systems that need to quickly associate and deassociate with
events at high scale, use a mutable structure that is handled
from inside of a single listener.
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
used as the container for the list of events, which explicitly
disallows collection mutation while the collection is being
iterated.
.. seealso::
:func:`.listens_for`
:func:`.remove`
"""
_event_key(target, identifier, fn).listen(*args, **kw)
def listens_for(target, identifier, *args, **kw):
"""Decorate a function as a listener for the given target + identifier.
e.g.::
from sqlalchemy import event
from sqlalchemy.schema import UniqueConstraint
@event.listens_for(UniqueConstraint, "after_parent_attach")
def unique_constraint_name(const, table):
const.name = "uq_%s_%s" % (
table.name,
list(const.columns)[0].name
)
A given function can also be invoked for only the first invocation
of the event using the ``once`` argument::
@event.listens_for(Mapper, "before_configure", once=True)
def on_config():
do_config()
.. versionadded:: 0.9.4 Added ``once=True`` to :func:`.event.listen`
and :func:`.event.listens_for`.
.. seealso::
:func:`.listen` - general description of event listening
"""
def decorate(fn):
listen(target, identifier, fn, *args, **kw)
return fn
return decorate
def remove(target, identifier, fn):
"""Remove an event listener.
The arguments here should match exactly those which were sent to
:func:`.listen`; all the event registration which proceeded as a result
of this call will be reverted by calling :func:`.remove` with the same
arguments.
e.g.::
# if a function was registered like this...
@event.listens_for(SomeMappedClass, "before_insert", propagate=True)
def my_listener_function(*arg):
pass
# ... it's removed like this
event.remove(SomeMappedClass, "before_insert", my_listener_function)
Above, the listener function associated with ``SomeMappedClass`` was also
propagated to subclasses of ``SomeMappedClass``; the :func:`.remove`
function will revert all of these operations.
.. versionadded:: 0.9.0
.. note::
The :func:`.remove` function cannot be called at the same time
that the target event is being run. This has implications
for thread safety, and also means an event cannot be removed
from inside the listener function for itself. The list of
events to be run are present inside of a mutable collection
that can't be changed during iteration.
Event registration and removal is not intended to be a "high
velocity" operation; it is a configurational operation. For
systems that need to quickly associate and deassociate with
events at high scale, use a mutable structure that is handled
from inside of a single listener.
.. versionchanged:: 1.0.0 - a ``collections.deque()`` object is now
used as the container for the list of events, which explicitly
disallows collection mutation while the collection is being
iterated.
.. seealso::
:func:`.listen`
"""
_event_key(target, identifier, fn).remove()
def contains(target, identifier, fn):
"""Return True if the given target/ident/fn is set up to listen.
.. versionadded:: 0.9.0
"""
return _event_key(target, identifier, fn).contains()
| gpl-2.0 |
Refefer/pylearn2 | pylearn2/models/autoencoder.py | 1 | 27560 | """
Autoencoders, denoising autoencoders, and stacked DAEs.
"""
# Standard library imports
import functools
import operator
# Third-party imports
import numpy
import theano
from theano import tensor
from theano.compat.six.moves import zip as izip, reduce
# Local imports
from pylearn2.blocks import Block, StackedBlocks
from pylearn2.models import Model
from pylearn2.utils import sharedX
from pylearn2.utils.theano_graph import is_pure_elemwise
from pylearn2.utils.rng import make_np_rng, make_theano_rng
from pylearn2.space import VectorSpace
theano.config.warn.sum_div_dimshuffle_bug = False
class AbstractAutoencoder(Model, Block):
"""
Abstract class for autoencoders.
"""
def __init__(self):
super(AbstractAutoencoder, self).__init__()
def encode(self, inputs):
"""
Map inputs through the encoder function.
Parameters
----------
inputs : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) to be encoded. Assumed to be 2-tensors, with the
first dimension indexing training examples and the second
indexing data dimensions.
Returns
-------
encoded : tensor_like or list of tensor_like
Theano symbolic (or list thereof) representing the corresponding
minibatch(es) after encoding.
"""
raise NotImplementedError(
str(type(self)) + " does not implement encode.")
def decode(self, hiddens):
"""
Map inputs through the encoder function.
Parameters
----------
hiddens : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) to be encoded. Assumed to be 2-tensors, with the
first dimension indexing training examples and the second
indexing data dimensions.
Returns
-------
decoded : tensor_like or list of tensor_like
Theano symbolic (or list thereof) representing the corresponding
minibatch(es) after decoding.
"""
raise NotImplementedError(
str(type(self)) + " does not implement decode.")
def reconstruct(self, inputs):
"""
Reconstruct (decode) the inputs after mapping through the encoder.
Parameters
----------
inputs : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) to be encoded and reconstructed. Assumed to be
2-tensors, with the first dimension indexing training examples
and the second indexing data dimensions.
Returns
-------
reconstructed : tensor_like or list of tensor_like
Theano symbolic (or list thereof) representing the corresponding
reconstructed minibatch(es) after encoding/decoding.
"""
return self.decode(self.encode(inputs))
def __call__(self, inputs):
"""
Forward propagate (symbolic) input through this module, obtaining
a representation to pass on to layers above.
This just aliases the `encode()` function for syntactic
sugar/convenience.
"""
return self.encode(inputs)
class Autoencoder(AbstractAutoencoder):
"""
Base class implementing ordinary autoencoders.
More exotic variants (denoising, contracting autoencoders) can inherit
much of the necessary functionality and override what they need.
Parameters
----------
nvis : int
Number of visible units (input dimensions) in this model.
A value of 0 indicates that this block will be left partially
initialized until later (e.g., when the dataset is loaded and
its dimensionality is known). Note: There is currently a bug
when nvis is set to 0. For now, you should not set nvis to 0.
nhid : int
Number of hidden units in this model.
act_enc : callable or string
Activation function (elementwise nonlinearity) to use for the
encoder. Strings (e.g. 'tanh' or 'sigmoid') will be looked up as
functions in `theano.tensor.nnet` and `theano.tensor`. Use `None`
for linear units.
act_dec : callable or string
Activation function (elementwise nonlinearity) to use for the
decoder. Strings (e.g. 'tanh' or 'sigmoid') will be looked up as
functions in `theano.tensor.nnet` and `theano.tensor`. Use `None`
for linear units.
tied_weights : bool, optional
If `False` (default), a separate set of weights will be allocated
(and learned) for the encoder and the decoder function. If
`True`, the decoder weight matrix will be constrained to be equal
to the transpose of the encoder weight matrix.
irange : float, optional
Width of the initial range around 0 from which to sample initial
values for the weights.
rng : RandomState object or seed, optional
NumPy random number generator object (or seed to create one) used
to initialize the model parameters.
"""
def __init__(self, nhid, act_enc, act_dec,
tied_weights=False, irange=1e-3, rng=9001,
nvis=None):
"""
WRITEME
"""
super(Autoencoder, self).__init__()
self.nhid = nhid
self.irange = irange
self.tied_weights = tied_weights
self.rng = make_np_rng(rng, which_method="randn")
self.act_enc = self._resolve_callable(act_enc)
self.act_dec = self._resolve_callable(act_dec)
self.output_space = VectorSpace(nhid)
self._params = []
def set_input_space(self, input_space):
"""
Use input spaces instead of setting visible units directly
"""
total_dims = input_space.get_total_dimension()
self.input_space = input_space
self.desired_input_space = VectorSpace(dim=total_dims)
self.needs_reformat = input_space != self.desired_input_space
self._initialize_params(total_dims)
def format_enc(self, X):
"""Formats the input to the desired internal space"""
if self.needs_reformat:
X = self.input_space.format_as(X, self.desired_input_space)
return X
def format_dec(self, res):
"""Formats the result to the desired output space"""
if self.needs_reformat:
res = self.desired_input_space.format_as(res, self.input_space)
return res
def _initialize_params(self, nvis):
# Save a few parameters needed for resizing
self._initialize_hidden(nvis)
self._initialize_visible(nvis)
def _initialize_hidden(self, nvis):
self._initialize_hidbias()
self._initialize_weights(nvis)
self._params.extend([
self.hidbias,
self.weights
])
def _initialize_visible(self, nvis):
self._initialize_visbias(nvis)
if self.tied_weights and self.weights is not None:
self.w_prime = self.weights.T
else:
self._initialize_w_prime(nvis)
self._params.append(self.visbias)
if not self.tied_weights:
self._params.append(self.w_prime)
def _resolve_callable(self, activation):
"""
.. todo::
WRITEME
"""
if activation is None or activation == "linear":
return None
# If it's a callable, use it directly.
if hasattr(activation, '__call__'):
return activation
if isinstance(activation, basestring):
if hasattr(tensor.nnet, activation):
return getattr(tensor.nnet, activation)
elif hasattr(tensor, activation):
return getattr(tensor, activation)
raise ValueError("Couldn't interpret value: '%s'" % activation)
def _initialize_weights(self, nvis, rng=None, irange=None):
"""
.. todo::
WRITEME
"""
if rng is None:
rng = self.rng
if irange is None:
irange = self.irange
# TODO: use weight scaling factor if provided, Xavier's default else
self.weights = sharedX(
(.5 - rng.rand(nvis, self.nhid)) * irange,
name='W',
borrow=True
)
def _initialize_hidbias(self):
"""
.. todo::
WRITEME
"""
self.hidbias = sharedX(
numpy.zeros(self.nhid),
name='hb',
borrow=True
)
def _initialize_visbias(self, nvis):
"""
.. todo::
WRITEME
"""
self.visbias = sharedX(
numpy.zeros(nvis),
name='vb',
borrow=True
)
def _initialize_w_prime(self, nvis, rng=None, irange=None):
"""
.. todo::
WRITEME
"""
assert not self.tied_weights, (
"Can't initialize w_prime in tied weights model; "
"this method shouldn't have been called"
)
if rng is None:
rng = self.rng
if irange is None:
irange = self.irange
self.w_prime = sharedX(
(.5 - rng.rand(self.nhid, nvis)) * irange,
name='Wprime',
borrow=True
)
def set_visible_size(self, nvis, rng=None):
"""
Create and initialize the necessary parameters to accept
`nvis` sized inputs.
Parameters
----------
nvis : int
Number of visible units for the model.
rng : RandomState object or seed, optional
NumPy random number generator object (or seed to create one) used \
to initialize the model parameters. If not provided, the stored \
rng object (from the time of construction) will be used.
"""
if self.weights is not None:
raise ValueError('parameters of this model already initialized; '
'create a new object instead')
if rng is not None:
self.rng = rng
else:
rng = self.rng
self._initialize_visbias(nvis)
self._initialize_weights(nvis, rng)
if not self.tied_weights:
self._initialize_w_prime(nvis, rng)
self._set_params()
def _hidden_activation(self, x):
"""
Single minibatch activation function.
Parameters
----------
x : tensor_like
Theano symbolic representing the input minibatch.
Returns
-------
y : tensor_like
(Symbolic) hidden unit activations given the input.
"""
if self.act_enc is None:
act_enc = lambda x: x
else:
act_enc = self.act_enc
return act_enc(self._hidden_input(x))
def _hidden_input(self, x):
"""
Given a single minibatch, computes the input to the
activation nonlinearity without applying it.
Parameters
----------
x : tensor_like
Theano symbolic representing the input minibatch.
Returns
-------
y : tensor_like
(Symbolic) input flowing into the hidden layer nonlinearity.
"""
return self.hidbias + tensor.dot(x, self.weights)
def upward_pass(self, inputs):
"""
Wrapper to Autoencoder encode function. Called when autoencoder
is accessed by mlp.PretrainedLayer
Parameters
----------
inputs : WRITEME
Returns
-------
WRITEME
"""
return self.encode(inputs)
def encode(self, inputs):
"""
Map inputs through the encoder function.
Parameters
----------
inputs : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) to be encoded. Assumed to be 2-tensors, with the
first dimension indexing training examples and the second
indexing data dimensions.
Returns
-------
encoded : tensor_like or list of tensor_like
Theano symbolic (or list thereof) representing the corresponding
minibatch(es) after encoding.
"""
if isinstance(inputs, tensor.Variable):
inputs = self.format_enc(inputs)
return self._hidden_activation(inputs)
else:
return [self.encode(v) for v in inputs]
def decode(self, hiddens):
"""
Map inputs through the encoder function.
Parameters
----------
hiddens : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) to be encoded. Assumed to be 2-tensors, with the
first dimension indexing training examples and the second
indexing data dimensions.
Returns
-------
decoded : tensor_like or list of tensor_like
Theano symbolic (or list thereof) representing the corresponding
minibatch(es) after decoding.
"""
if self.act_dec is None:
act_dec = lambda x: x
else:
act_dec = self.act_dec
if isinstance(hiddens, tensor.Variable):
res = act_dec(self.visbias + tensor.dot(hiddens, self.w_prime))
return self.format_dec(res)
else:
return [self.decode(v) for v in hiddens]
def get_weights(self, borrow=False):
"""
.. todo::
WRITEME
"""
return self.weights.get_value(borrow=borrow)
def get_weights_format(self):
"""
.. todo::
WRITEME
"""
return ['v', 'h']
class DenoisingAutoencoder(Autoencoder):
"""
A denoising autoencoder learns a representation of the input by
reconstructing a noisy version of it.
Parameters
----------
corruptor : object
Instance of a corruptor object to use for corrupting the
input.
nvis : int
WRITEME
nhid : int
WRITEME
act_enc : WRITEME
act_dec : WRITEME
tied_weights : bool, optional
WRITEME
irange : WRITEME
rng : WRITEME
Notes
-----
The remaining parameters are identical to those of the constructor
for the Autoencoder class; see the `Autoencoder.__init__` docstring
for details.
"""
def __init__(self, corruptor, nvis, nhid, act_enc, act_dec,
tied_weights=False, irange=1e-3, rng=9001):
super(DenoisingAutoencoder, self).__init__(
nvis,
nhid,
act_enc,
act_dec,
tied_weights,
irange,
rng
)
self.corruptor = corruptor
def reconstruct(self, inputs):
"""
Reconstruct the inputs after corrupting and mapping through the
encoder and decoder.
Parameters
----------
inputs : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) to be corrupted and reconstructed. Assumed to be
2-tensors, with the first dimension indexing training examples
and the second indexing data dimensions.
Returns
-------
reconstructed : tensor_like or list of tensor_like
Theano symbolic (or list thereof) representing the corresponding
reconstructed minibatch(es) after corruption and encoding/decoding.
"""
corrupted = self.corruptor(inputs)
return super(DenoisingAutoencoder, self).reconstruct(corrupted)
class ContractiveAutoencoder(Autoencoder):
"""
A contracting autoencoder works like a regular autoencoder, and adds an
extra term to its cost function.
"""
@functools.wraps(Autoencoder.__init__)
def __init__(self, *args, **kwargs):
super(ContractiveAutoencoder, self).__init__(*args, **kwargs)
dummyinput = tensor.matrix()
if not is_pure_elemwise(self.act_enc(dummyinput), [dummyinput]):
raise ValueError("Invalid encoder activation function: "
"not an elementwise function of its input")
def _activation_grad(self, inputs):
"""
Calculate (symbolically) the contracting autoencoder penalty term.
Parameters
----------
inputs : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input \
minibatch(es) on which the penalty is calculated. Assumed to be \
2-tensors, with the first dimension indexing training examples \
and the second indexing data dimensions.
Returns
-------
act_grad : tensor_like
2-dimensional tensor representing, dh/da for every \
pre/postsynaptic pair, which we can easily do by taking the \
gradient of the sum of the hidden units activations w.r.t the \
presynaptic activity, since the gradient of hiddens.sum() with \
respect to hiddens is a matrix of ones!
Notes
-----
Theano's differentiation capabilities do not currently allow
(efficient) automatic evaluation of the Jacobian, mainly because
of the immature state of the `scan` operator. Here we use a
"semi-automatic" hack that works for hidden layers of the for
:math:`s(Wx + b)`, where `s` is the activation function, :math:`W`
is `self.weights`, and :math:`b` is `self.hidbias`, by only taking
the derivative of :math:`s` with respect :math:`a = Wx + b` and
manually constructing the Jacobian from there.
Because of this implementation depends *critically* on the
_hidden_inputs() method implementing only an affine transformation
by the weights (i.e. :math:`Wx + b`), and the activation function
`self.act_enc` applying an independent, elementwise operation.
"""
# Compute the input flowing into the hidden units, i.e. the
# value before applying the nonlinearity/activation function
acts = self._hidden_input(inputs)
# Apply the activating nonlinearity.
hiddens = self.act_enc(acts)
act_grad = tensor.grad(hiddens.sum(), acts)
return act_grad
def jacobian_h_x(self, inputs):
"""
Calculate (symbolically) the contracting autoencoder penalty term.
Parameters
----------
inputs : tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) on which the penalty is calculated. Assumed to be
2-tensors, with the first dimension indexing training examples
and the second indexing data dimensions.
Returns
-------
jacobian : tensor_like
3-dimensional tensor representing, for each mini-batch example,
the Jacobian matrix of the encoder transformation. You can then
apply the penalty you want on it, or use the contraction_penalty
method to have a default one.
"""
# As long as act_enc is an elementwise operator, the Jacobian
# of a act_enc(Wx + b) hidden layer has a Jacobian of the
# following form.
act_grad = self._activation_grad(inputs)
jacobian = self.weights * act_grad.dimshuffle(0, 'x', 1)
return jacobian
def contraction_penalty(self, data):
"""
Calculate (symbolically) the contracting autoencoder penalty term.
Parameters
----------
data : tuple containing one tensor_like or list of tensor_likes
Theano symbolic (or list thereof) representing the input
minibatch(es) on which the penalty is calculated. Assumed to be
2-tensors, with the first dimension indexing training examples
and the second indexing data dimensions.
Returns
-------
jacobian : tensor_like
1-dimensional tensor representing, for each mini-batch
example, the penalty of the encoder transformation. Add this to
the output of a Cost object, such as SquaredError, to penalize it.
"""
X = data
act_grad = self._activation_grad(X)
frob_norm = tensor.dot(tensor.sqr(act_grad),
tensor.sqr(self.weights).sum(axis=0))
contract_penalty = frob_norm.sum() / X.shape[0]
return tensor.cast(contract_penalty, X .dtype)
def contraction_penalty_data_specs(self):
"""
.. todo::
WRITEME
"""
return (self.get_input_space(), self.get_input_source())
class HigherOrderContractiveAutoencoder(ContractiveAutoencoder):
"""
Higher order contractive autoencoder. Adds higher orders regularization
Parameters
----------
corruptor : object
Instance of a corruptor object to use for corrupting the input.
num_corruptions : integer
number of corrupted inputs to use
nvis : int
WRITEME
nhid : int
WRITEME
act_enc : WRITEME
act_dec : WRITEME
tied_weights : WRITEME
irange : WRITEME
rng : WRITEME
Notes
-----
The remaining parameters are identical to those of the constructor
for the Autoencoder class; see the `ContractiveAutoEncoder.__init__`
docstring for details.
"""
def __init__(self, corruptor, num_corruptions, nvis, nhid, act_enc,
act_dec, tied_weights=False, irange=1e-3, rng=9001):
super(HigherOrderContractiveAutoencoder, self).__init__(
nvis,
nhid,
act_enc,
act_dec,
tied_weights,
irange,
rng
)
self.corruptor = corruptor
self.num_corruptions = num_corruptions
def higher_order_penalty(self, data):
"""
Stochastic approximation of Hessian Frobenius norm
Parameters
----------
data : WRITEME
Returns
-------
WRITEME
"""
X = data
corrupted_inputs = [self.corruptor(X) for times in
range(self.num_corruptions)]
hessian = tensor.concatenate(
[self.jacobian_h_x(X) - self.jacobian_h_x(corrupted)
for corrupted in corrupted_inputs])
return (hessian ** 2).mean()
def higher_order_penalty_data_specs(self):
"""
.. todo::
WRITEME
"""
return (self.get_input_space(), self.get_input_source())
class UntiedAutoencoder(Autoencoder):
"""
.. todo::
WRITEME
Parameters
----------
base : WRITEME
"""
def __init__(self, base):
if not (isinstance(base, Autoencoder) and base.tied_weights):
raise ValueError("%s is not a tied-weights autoencoder" %
str(base))
super(UntiedAutoencoder, self).__init__(
nvis=base.nvis, nhid=base.nhid, act_enc=base.act_enc,
act_dec=base.act_dec, tied_weights=True, irange=base.irange,
rng=base.rng)
self.weights = tensor.shared(base.weights.get_value(borrow=False),
name='weights')
self.visbias = tensor.shared(base.visbias.get_value(borrow=False),
name='vb')
self.hidbias = tensor.shared(base.visbias.get_value(borrow=False),
name='hb')
self.w_prime = tensor.shared(base.weights.get_value(borrow=False).T,
name='w_prime')
self._params = [self.visbias, self.hidbias, self.weights, self.w_prime]
class DeepComposedAutoencoder(AbstractAutoencoder):
"""
A deep autoencoder composed of several single-layer
autoencoders.
Parameters
----------
autoencoders : list
A list of autoencoder objects.
"""
def __init__(self, autoencoders):
super(DeepComposedAutoencoder, self).__init__()
self.fn = None
self.cpu_only = False
assert all(pre.get_output_space().dim == post.get_input_space().dim
for pre, post in izip(autoencoders[:-1], autoencoders[1:]))
self.autoencoders = list(autoencoders)
self.input_space = autoencoders[0].get_input_space()
self.output_space = autoencoders[-1].get_output_space()
@functools.wraps(Autoencoder.encode)
def encode(self, inputs):
"""
.. todo::
WRITEME
"""
current = inputs
for encoder in self.autoencoders:
current = encoder.encode(current)
return current
@functools.wraps(Autoencoder.decode)
def decode(self, hiddens):
"""
.. todo::
WRITEME
"""
current = hiddens
for decoder in self.autoencoders[::-1]:
current = decoder.decode(current)
return current
@functools.wraps(Model.get_params)
def get_params(self):
"""
.. todo::
WRITEME
"""
return reduce(operator.add,
[ae.get_params() for ae in self.autoencoders])
def _modify_updates(self, updates):
"""
.. todo::
WRITEME
"""
for autoencoder in self.autoencoders:
autoencoder.modify_updates(updates)
def build_stacked_ae(nvis, nhids, act_enc, act_dec,
tied_weights=False, irange=1e-3, rng=None,
corruptor=None, contracting=False):
"""
.. todo::
WRITEME properly
Allocate a stack of autoencoders.
"""
rng = make_np_rng(rng, which_method='randn')
layers = []
final = {}
# "Broadcast" arguments if they are singular, or accept sequences if
# they are the same length as nhids
for c in ['corruptor', 'contracting', 'act_enc', 'act_dec',
'tied_weights', 'irange']:
if type(locals()[c]) is not str and hasattr(locals()[c], '__len__'):
assert len(nhids) == len(locals()[c])
final[c] = locals()[c]
else:
final[c] = [locals()[c]] * len(nhids)
# The number of visible units in each layer is the initial input
# size and the first k-1 hidden unit sizes.
nviss = [nvis] + nhids[:-1]
seq = izip(nhids, nviss,
final['act_enc'],
final['act_dec'],
final['corruptor'],
final['contracting'],
final['tied_weights'],
final['irange'],)
# Create each layer.
for (nhid, nvis, act_enc, act_dec, corr, cae, tied, ir) in seq:
args = (nvis, nhid, act_enc, act_dec, tied, ir, rng)
if cae and corr is not None:
raise ValueError("Can't specify denoising and contracting "
"objectives simultaneously")
elif cae:
autoenc = ContractiveAutoencoder(*args)
elif corr is not None:
autoenc = DenoisingAutoencoder(corr, *args)
else:
autoenc = Autoencoder(*args)
layers.append(autoenc)
# Create the stack
return StackedBlocks(layers)
| bsd-3-clause |
1tush/sentry | src/sentry/migrations/0115_auto__del_projectcountbyminute__del_unique_projectcountbyminute_projec.py | 36 | 24582 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'ProjectCountByMinute'
db.delete_table(u'sentry_projectcountbyminute')
# Deleting model 'GroupCountByMinute'
db.delete_table('sentry_messagecountbyminute')
def backwards(self, orm):
# Adding model 'ProjectCountByMinute'
db.create_table(u'sentry_projectcountbyminute', (
('time_spent_count', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('times_seen', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'], null=True)),
('time_spent_total', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('date', self.gf('django.db.models.fields.DateTimeField')()),
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
))
db.send_create_signal('sentry', ['ProjectCountByMinute'])
# Adding unique constraint on 'ProjectCountByMinute', fields ['project', 'date']
db.create_unique(u'sentry_projectcountbyminute', ['project_id', 'date'])
# Adding model 'GroupCountByMinute'
db.create_table('sentry_messagecountbyminute', (
('project', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Project'], null=True)),
('time_spent_total', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('group', self.gf('sentry.db.models.fields.FlexibleForeignKey')(to=orm['sentry.Group'])),
('time_spent_count', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('date', self.gf('django.db.models.fields.DateTimeField')(db_index=True)),
('times_seen', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
))
db.send_create_signal('sentry', ['GroupCountByMinute'])
# Adding unique constraint on 'GroupCountByMinute', fields ['project', 'group', 'date']
db.create_unique('sentry_messagecountbyminute', ['project_id', 'group_id', 'date'])
models = {
'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': "orm['sentry.AlertRelatedGroup']", 'to': "orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': "orm['sentry.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Team']", 'null': 'True'})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'}),
'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.TeamMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': "orm['sentry.User']"})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
}, 'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
}
}
complete_apps = ['sentry']
| bsd-3-clause |
jpautom/scikit-learn | examples/cluster/plot_birch_vs_minibatchkmeans.py | 333 | 3694 | """
=================================
Compare BIRCH and MiniBatchKMeans
=================================
This example compares the timing of Birch (with and without the global
clustering step) and MiniBatchKMeans on a synthetic dataset having
100,000 samples and 2 features generated using make_blobs.
If ``n_clusters`` is set to None, the data is reduced from 100,000
samples to a set of 158 clusters. This can be viewed as a preprocessing
step before the final (global) clustering step that further reduces these
158 clusters to 100 clusters.
"""
# Authors: Manoj Kumar <manojkumarsivaraj334@gmail.com
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD 3 clause
print(__doc__)
from itertools import cycle
from time import time
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import Birch, MiniBatchKMeans
from sklearn.datasets.samples_generator import make_blobs
# Generate centers for the blobs so that it forms a 10 X 10 grid.
xx = np.linspace(-22, 22, 10)
yy = np.linspace(-22, 22, 10)
xx, yy = np.meshgrid(xx, yy)
n_centres = np.hstack((np.ravel(xx)[:, np.newaxis],
np.ravel(yy)[:, np.newaxis]))
# Generate blobs to do a comparison between MiniBatchKMeans and Birch.
X, y = make_blobs(n_samples=100000, centers=n_centres, random_state=0)
# Use all colors that matplotlib provides by default.
colors_ = cycle(colors.cnames.keys())
fig = plt.figure(figsize=(12, 4))
fig.subplots_adjust(left=0.04, right=0.98, bottom=0.1, top=0.9)
# Compute clustering with Birch with and without the final clustering step
# and plot.
birch_models = [Birch(threshold=1.7, n_clusters=None),
Birch(threshold=1.7, n_clusters=100)]
final_step = ['without global clustering', 'with global clustering']
for ind, (birch_model, info) in enumerate(zip(birch_models, final_step)):
t = time()
birch_model.fit(X)
time_ = time() - t
print("Birch %s as the final step took %0.2f seconds" % (
info, (time() - t)))
# Plot result
labels = birch_model.labels_
centroids = birch_model.subcluster_centers_
n_clusters = np.unique(labels).size
print("n_clusters : %d" % n_clusters)
ax = fig.add_subplot(1, 3, ind + 1)
for this_centroid, k, col in zip(centroids, range(n_clusters), colors_):
mask = labels == k
ax.plot(X[mask, 0], X[mask, 1], 'w',
markerfacecolor=col, marker='.')
if birch_model.n_clusters is None:
ax.plot(this_centroid[0], this_centroid[1], '+', markerfacecolor=col,
markeredgecolor='k', markersize=5)
ax.set_ylim([-25, 25])
ax.set_xlim([-25, 25])
ax.set_autoscaley_on(False)
ax.set_title('Birch %s' % info)
# Compute clustering with MiniBatchKMeans.
mbk = MiniBatchKMeans(init='k-means++', n_clusters=100, batch_size=100,
n_init=10, max_no_improvement=10, verbose=0,
random_state=0)
t0 = time()
mbk.fit(X)
t_mini_batch = time() - t0
print("Time taken to run MiniBatchKMeans %0.2f seconds" % t_mini_batch)
mbk_means_labels_unique = np.unique(mbk.labels_)
ax = fig.add_subplot(1, 3, 3)
for this_centroid, k, col in zip(mbk.cluster_centers_,
range(n_clusters), colors_):
mask = mbk.labels_ == k
ax.plot(X[mask, 0], X[mask, 1], 'w', markerfacecolor=col, marker='.')
ax.plot(this_centroid[0], this_centroid[1], '+', markeredgecolor='k',
markersize=5)
ax.set_xlim([-25, 25])
ax.set_ylim([-25, 25])
ax.set_title("MiniBatchKMeans")
ax.set_autoscaley_on(False)
plt.show()
| bsd-3-clause |
tcalmant/ipopo | docs/conf.py | 1 | 9992 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# iPOPO documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 13 16:39:47 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
]
# Autodoc configuration
autodoc_member_order = "groupwise"
autoclass_content = "both"
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "iPOPO"
copyright = "2020, Thomas Calmant"
author = "Thomas Calmant"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "1.0.1"
# The full version, including alpha/beta/rc tags.
release = "1.0.1"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'iPOPO v0.6.4'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "iPOPOdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"iPOPO.tex",
"iPOPO Documentation",
"Thomas Calmant",
"manual",
),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "ipopo", "iPOPO Documentation", [author], 1)]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"iPOPO",
"iPOPO Documentation",
author,
"iPOPO",
"A Service-Oriented Component Model for Python",
"Miscellaneous",
),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
| apache-2.0 |
sanchousese/Flask-User-starter-app | app/core/models.py | 5 | 2664 | # Copyright 2014 SolidBuilds.com. All rights reserved
#
# Authors: Ling Thio <ling.thio@gmail.com>
from flask_user import UserMixin
from flask_user.forms import RegisterForm
from flask_wtf import Form
from wtforms import StringField, SubmitField, validators
from app import db
# Define the User data model. Make sure to add the flask_user.UserMixin !!
class User(db.Model, UserMixin):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
# User authentication information (required for Flask-User)
email = db.Column(db.Unicode(255), nullable=False, server_default=u'', unique=True)
confirmed_at = db.Column(db.DateTime())
password = db.Column(db.String(255), nullable=False, server_default='')
reset_password_token = db.Column(db.String(100), nullable=False, server_default='')
active = db.Column(db.Boolean(), nullable=False, server_default='0')
# User information
active = db.Column('is_active', db.Boolean(), nullable=False, server_default='0')
first_name = db.Column(db.Unicode(50), nullable=False, server_default=u'')
last_name = db.Column(db.Unicode(50), nullable=False, server_default=u'')
# Relationships
roles = db.relationship('Role', secondary='users_roles',
backref=db.backref('users', lazy='dynamic'))
# Define the Role data model
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer(), primary_key=True)
name = db.Column(db.String(50), nullable=False, server_default=u'', unique=True) # for @roles_accepted()
label = db.Column(db.Unicode(255), server_default=u'') # for display purposes
# Define the UserRoles association model
class UsersRoles(db.Model):
__tablename__ = 'users_roles'
id = db.Column(db.Integer(), primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('users.id', ondelete='CASCADE'))
role_id = db.Column(db.Integer(), db.ForeignKey('roles.id', ondelete='CASCADE'))
# Define the User registration form
# It augments the Flask-User RegisterForm with additional fields
class MyRegisterForm(RegisterForm):
first_name = StringField('First name', validators=[
validators.DataRequired('First name is required')])
last_name = StringField('Last name', validators=[
validators.DataRequired('Last name is required')])
# Define the User profile form
class UserProfileForm(Form):
first_name = StringField('First name', validators=[
validators.DataRequired('First name is required')])
last_name = StringField('Last name', validators=[
validators.DataRequired('Last name is required')])
submit = SubmitField('Save')
| bsd-2-clause |
trondth/master | opinionexpressions.py | 1 | 45577 | ##!/usr/bin/env python
# -*- coding: utf-8 -*-
from masters_project_config import *
from subprocess import Popen, PIPE
from collections import namedtuple
from collections import OrderedDict
import xml.etree.ElementTree as etree
import copy
import codecs
import os
import sys
import re
import json
import io
import pickle
import subjlex
import repp
import postag
import mpqa
import lth_srl
import bohnet_nivre
import argparse
# TODO - as class?
def createfile(filename="out.txt", testset=False, devset=True, opinionexp=True, opinionholder=False, doclistfile=False):
"""
Runs pipeline and writes training file
@param filename: Output filename, relative from DATA_PREFIX
"""
if doclistfile:
with open(DATA_PREFIX + doclistfile) as f:
filelist = [line[:-1] for line in f]
elif devset:
if testset:
with open(DATA_PREFIX + config.get('doclist', 'DEVPATH') +
'/' + config.get('doclist', 'DEVTESTSET')) as f:
filelist = [line[:-1] for line in f]
else:
with open(DATA_PREFIX + config.get('doclist', 'DEVPATH') +
'/' + config.get('doclist', 'DEVTRAINSET')) as f:
filelist = [line[:-1] for line in f]
else:
if testset:
with open(DATA_PREFIX + config.get('doclist', 'PATH') +
'/' + config.get('doclist', 'TESTSET')) as f:
filelist = [line[:-1] for line in f]
else:
with open(DATA_PREFIX + config.get('doclist', 'PATH') +
'/' + config.get('doclist', 'TRAINSET')) as f:
filelist = [line[:-1] for line in f]
if opinionexp:
for file in filelist:
print "FIL: {}".format(file)
iob2str, ignored_exp_str = getopinionexp_iob2(file)
if testset:
with open(filename + '-ignore', 'a') as ign_f:
for line in ignored_exp_str:
ign_f.write(line.encode('utf-8'))
#for (line, ignore_line) in zip(iob2str, ignored_exp_str):
with open(filename, 'a') as f:
for line in iob2str:
f.write(line.encode('utf-8'))
#if testset:
# f.write(line.encode('utf-8'))
# ign_f.write(ignore_line.encode('utf-8'))
# #f.write(lastcol_re.sub("", line).encode('utf-8')) # u'\u2019' # TODO - takler wapiti dette?
#else:
# f.write(line.encode('utf-8'))
elif opinionholder:
tmp = []
for file in filelist:
#print "FIL: {}".format(file)
tmp.extend(getopinionholder(file))
#break #TODO
#with open(filename, 'w') as outfile:
# json.dump(tmp, outfile)
#b = writeconll(tmp, DATA_PREFIX + "/out/tmp.conll")
#lth = lth_srl.Lth_srl()
#conlloutput = lth.run(data_prefix + "/out/tmp.conll")
#c = readconlltolst(a, conlloutput)
#d = getholdercandidates(c)
return tmp
class PickleEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, (list, dict, str, unicode, int, float, bool, type(None))):
return JSONEncoder.default(self, obj)
return {'_pickle_object': pickle.dumps(obj)}
def pickle_object(dct):
if '_pickle_object' in dct:
return pickle.loads(str(dct['_pickle_object']))
return dct
class SliceEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, slice):
return {'start': str(obj.start), 'stop': str(obj.stop)}
return json.JSONEncoder.default(self, obj)
def json_slice(dct):
#print "slice"
if ('slice' in dct):
str = dct['slice']
if isinstance(str, basestring) and str[:6] == 'slice(':
#print "str/unicode"
str = str[6:-1].split(',')
dct['slice'] = slice(int(str[0]), int(str[1]))
elif ('start' in dct['slice'] and
'stop' in dct['slice']):
#start = dct['slice']['start']
tmp = slice(int(dct['slice']['start']), int(dct['slice']['stop']))
dct['slice'] = tmp
return dct
def dump_jsonfile(lst, filename):
"""
Dumps list of sents to jsonfile
"""
if os.path.exists(filename):
overwrite_ok = raw_input('File exists. Overwrite? (y/N)')
if overwrite_ok != 'y' and overwrite_ok != 'j':
return -1
f = open(filename, 'w')
json.dump(lst, f, cls=PickleEncoder)
f.close()
def read_jsonfile(filename, object_hook=pickle_object): #json_slice):#pickle_object):
"""
@return lst of sents
"""
with open(filename, 'r') as file:
lst = json.load(file, object_hook=object_hook)
return lst
#class SliceDecoder(json.JSONDecoder):
# def decode(self, json_string):
# default_obj = super(TemplateJSONDecoder, self).decode(json_string)
# if ('slice' in default_obj and
# 'start' in default_obj['slice'] and
# 'stop' in default_obj['start']):
# return slice(default_obj['start'], default_obj['stop'])
# return default_obj
def find_expr_heads(lst):
"""
For each expression, find token with head outside expression span.
Make list of expressions with more than one token with head outside span.
"""
sent_exp_head_dict = []
for sent in lst:
expression_head_dict = OrderedDict()
for i, token in enumerate(sent):
token['nr'] = i+1
gate_line_ids = _is_part_of_expression(token)
if gate_line_ids:
for line_id in gate_line_ids:
if not _is_part_of_expression(sent[int(token['head'])-1], gate_line_id=line_id):
#print token
if 'line_id' in expression_head_dict:
expression_head_dict[line_id].append(token)
else:
expression_head_dict[line_id] = [token]
sent_exp_head_dict.append(expression_head_dict)
return sent_exp_head_dict
def _is_part_of_expression(token, gate_line_id=False):
expression_lst = []
for gate in token['GATE']:
if gate_line_id:
if gate['line_id'] == gate_line_id:
return True
else:
if (gate['ann_type'] == 'GATE_objective-speech-event' or
gate['ann_type'] == 'GATE_expressive-subjectivity' or
gate['ann_type'] == 'GATE_direct-subjective'):
expression_lst.extend(gate['line_id'])
if expression_lst:
return expression_lst
return False
def find_possible_paths(lst):
"""
For each token in sent, find path to all other tokens in sent.
"""
pass
def printopinionexpression(lst, ignorelst=['GATE_attitude', 'GATE_inside', 'GATE_target']):
for sent in lst:
for i, token in enumerate(sent):
#print token
prstr = "{}\t".format(i)
prstr = "{}\t".format(token['form'])
if 'head' in token:
prstr += "{}\t".format(token['head'])
prstr += "{}".format(token['pos'])
if 'holder_candidate' in token:
prstr += "[C]"
for gate in token['GATE']:
#print gate
#for gate in gates:
if gate['ann_type'] not in ignorelst and gate['slice'].start - gate['slice'].stop != 0:
prstr += "\t[{}-{}]".format(gate['slice'].start, gate['slice'].stop)
prstr += "{}".format(gate['ann_type'])
if 'nested-source' in gate:
prstr += '({})'.format(gate['nested-source'])
print prstr
print "\n"
def holders(annotations):
d = {}
for lst in annotations:
if hold_re.match(lst[3]):
# moved from iob2():
if lst[1].start != lst[1].stop:
if lst[1].start in d:
print "{} exists.\n{}\n{}".format(lst[1], d[lst[1].start], lst)
else:
d[lst[1].start] = lst
# TODO
#else:
# print lst[1].start, lst[1].stop
# print lst[1].start != lst[1].stop
# print "[expressions()] ignoring none-span-holder: {}".format(lst)
return d
def getopinionholder(filename, examplerun=False):
"""
TODO
"""
if not examplerun and (len(filename) < 23 or filename[:23] != 'database.mpqa.2.0/docs/'):
return # Ikke i formatet fra listen til johansson/moscitti. sannsynligvis tom linje på slutten
docid = filename[23:] # Kutter database.mpqa.2.0/docs/
sents = t_mpqa.sentences_from_file(filename)
sents = t_tokenizer.runrepponsentences(sents)
#return tokens
sents = t_postagger.pos_tag_sents_lst(sents, filename)
#tokens = t_postagger.pos_tag_sents_lst(tokens)
annotations = t_mpqa.annotation_tuples_from_file(filename)
gatedic = gates(annotations)
holder = holders(annotations)
#print holder
#return holder
expr = expressions_all(annotations)[0] # todo
#return holder
#cur = None
#cur_end = None
return_lst = []
"""
[
{tokens: [
{form, lemma, pos, att, holder: [], ese:[], dse:[] , ose: []}, ...
]}, ...
]
"""
#print gatedic.keys()
#print gatedic[1045]
for sent in sents:
token_lst = []
current_gate = {}
for token in sent:
#print token.slice.start
token_dic = {'form': token.token,
'lemma': token.lemma,
'pos': token.postag,
'holder': False,
'ese': False,
'ose': False,
'dse': False,
'slice': str(token.slice)}
#if cur_end and cur_end <= int(token.slice.start): # TODO ?
# #print "her"
# cur = None
# cur_end = None
for id, obj in current_gate.items():
if id < token.slice.start:
current_gate.pop(id)
#print id, obj
if token.slice.start in gatedic:
tmp = gatedic[token.slice.start]
#if token.slice.start == 1045: print tmp
for gate in tmp:
#if token.slice.start == 1045: print "G: ", gate
if gate['ann_type'] != 'GATE_inside':
if gate['slice'].stop in current_gate:
current_gate[gate['slice'].stop].append(gate)
else:
current_gate[gate['slice'].stop] = [gate]
#token_dic['GATE'] = tmp
#current_gate[gatedic[token.slice.start]['line_id']] = token.slice.stop
token_dic['GATE'] = []
for i in current_gate.values():
for j in i:
token_dic['GATE'].append(j)
#if token.slice.start == 1045: print "Gate: ", current_gate
## TODO - gate > lst
if token.slice.stop in current_gate:
current_gate.pop(token.slice.stop)
if int(token.slice.start) in holder:
token_dic['holder'] = True
#if int(token.slice.start) in expr:
#for gate in token_dic['GATE']:
for gate in token_dic['GATE']:
# TODO: Error - includes none-span-expr.
tmp = gate['ann_type']
#tmp = expr[int(token.slice.start)]
if tmp == 'GATE_objective-speech-event':
token_dic['ose'] = True #tmp[4]
elif tmp == 'GATE_expressive-subjectivity':
token_dic['ese'] = True #tmp[4]
elif tmp == 'GATE_direct-subjective':
token_dic['dse'] = True #tmp[4]
else:
# Other ann_type
pass #print "FEIL. {}".format(tmp)
#if token.slice.start == 1045: print token_dic
token_lst.append(token_dic)
#return_lst.append({'tokens':token_lst})
return_lst.append(token_lst)
return return_lst
#return expressions(annotations)
#print expressions(annotations)
#print tokens
#print iob2(tokens, expressions(annotations))
#return (tokens, expressions(annotations))
#return iob2(tokens, expressions(annotations))
#TODO change nested-source to set
def getopinionholderpairs(lst):
for sent in lst:
holders = {}
expressions = OrderedDict()
for i, token in enumerate(sent):
#print i, token
#if token['form'] == 'He': print "t....", token
for gate in token['GATE']:
#print "G: {}".format(gate)
if gate['ann_type'] == 'GATE_agent':
# TODO - exptype
if 'holder_candidate' in token and token['holder_candidate']:
#(token['pos'][:2] == 'NN' or token['pos'][:3] == 'PRP'):
if 'nested_source_split' in gate:
tmp = gate['nested_source_split']
#elif 'line_id' in gate:
# tmp = gate['line_id']
for s in tmp:
if s in holders:
holders[s].add(i)
else:
holders[s] = set([i])
#if token['slice'].start == 1045: print token, gate
if (gate['line_id'] not in expressions and
(gate['ann_type'] == 'GATE_objective-speech-event' or
gate['ann_type'] == 'GATE_expressive-subjectivity' or
gate['ann_type'] == 'GATE_direct-subjective')):
#print "..."
expressions[gate['line_id']] = (i, gate)
print expressions
print holders
for i, gate in expressions.values():
#print "...", token_nr, sent[token_nr]
if 'nested_source_split' in gate:
print "G: {}".format(gate['nested_source_split'])
else:
print "FEIL: {}, {}".format(i, gate)
#for exp in expressions
#b = getopinionholderpairs(a)
def getholdercandidates(lst):
# Deprec.
raise Exception
for sent in lst:
count = 0
for i, token in enumerate(sent):
if True and not (token['dse'] or token['ese'] or token['ose']):
if ('head' in token and (token['pos'][:2] == 'NN' or token['pos'][:3] == 'PRP')):
#print token['form']
head_id = int(token['head'])
count += 1
token['np_num'] = count
if head_id != 0 and len(sent) >= head_id:
tmp_token = sent[head_id-1]
if not ('head' in tmp_token and (tmp_token['pos'][:2] == 'NN' or tmp_token['pos'][:3] == 'PRP')):
token['holder_candidate'] = True
else:
token['holder_candidate'] = True
def getopinionexp_iob2(filename):
"""
TODO
"""
if len(filename) < 23 or filename[:23] != 'database.mpqa.2.0/docs/':
return # Ikke i formatet fra listen til johansson/moscitti. sannsynligvis tom linje på slutten
docid = filename[23:] # Kutter database.mpqa.2.0/docs/
sents = t_mpqa.sentences_from_file(filename)
sents = t_tokenizer.runrepponsentences(sents)
sents = t_postagger.pos_tag_sents_lst(sents, filename)
#tokens = t_postagger.pos_tag_sents_lst(tokens)
annotations = t_mpqa.annotation_tuples_from_file(filename)
#print expressions(annotations)
#print tokens
#print iob2(tokens, expressions(annotations))
#return (tokens, expressions(annotations))
#todo overlaps
expressions, ignored_expr = expressions_all(annotations)
return iob2(sents, expressions, ignored_expr)
def gates(annotations):
d = {}
#print annotations
for lst in annotations:
if lst[1].start in d:
d[lst[1].start].append(annlsttodic(lst))
else:
d[lst[1].start] = [annlsttodic(lst)]
#print d.keys()
#print d[1045]
return d
def annlsttodic(lst):
if len(lst) < 5:
d = {}
else:
d = lst[4]
d['line_id'] = lst[0]
d['slice'] = lst[1]
d['data_type'] = lst[2]
d['ann_type'] = lst[3]
if 'nested-source' in d:
d['nested_source_split'] = [s.strip() for s in d['nested-source'].split(',')]
return d
def expressions(annotations):
d = {}
for lst in annotations:
if expr_re.match(lst[3]):
# moved from iob2():
if lst[1].start != lst[1].stop:
if lst[1].start in d:
print "{} exists.\n{}\n{}".format(lst[1], d[lst[1].start], lst)
else:
d[lst[1].start] = lst
return d
def expressions_all(annotations):
"""
@return expressions-dict, ignored expressions-ordered-dict
"""
d_last_end = 0
d = {}
d_ignored = {}
for lst in annotations:
if expr_re.match(lst[3]):
cur_start = lst[1].start
cur_end = lst[1].stop
if cur_start != cur_end: # do not return 0-span-expressions
if d_last_end <= cur_start: # no previous overlap
d_last_end = cur_end
d[cur_start] = lst
else:
if cur_start in d_ignored:
d_ignored[cur_start].append(lst)
else:
d_ignored[cur_start] = [lst]
return d, OrderedDict(sorted(d_ignored.items(), key=lambda t: t[0]))
def iob2(sents, expressions, ignored=OrderedDict()):
iob_str = ""
ignored_str = ""
cur = None
cur_label = None
cur_end = None
for sent in sents:
for token in sent:
# iob2
if cur_end <= int(token.slice.start):
cur = None
cur_label = None
cur_end = None
if int(token.slice.start) in expressions and cur:
print cur
raise
if int(token.slice.start) in expressions:
cur = expressions[int(token.slice.start)]
cur_label = gate2label(cur)
cur_end = cur[1].stop
print cur_label
iob_str += u'{}\tB-{}\n'.format(token2iob(token), cur_label)
elif cur:
iob_str += u'{}\tI-{}\n'.format(token2iob(token), cur_label)
else:
iob_str += u'{}\tO\n'.format(token2iob(token))
# ignored
ignored_line = u'{}'.format(token.token)
for it in ignored.values():
for gate in it:
#print gate #[1].start
if gate[1].start == int(token.slice.start):
ignored_line += u'\tB-' + gate2label(gate)
elif gate[1].stop > int(token.slice.start) and gate[1].start < int(token.slice.start):
ignored_line += u'\tI-' + gate2label(gate)
else:
ignored_line += u'\t_'
ignored_str += u'\n' + ignored_line
iob_str += u'\n'
ignored_str += u'\n'
return iob_str, ignored_str
def gate2label(gate):
if gate[3] == 'GATE_objective-speech-event':
return 'OSE'
elif gate[3] == 'GATE_expressive-subjectivity':
return 'ESE'
elif gate[3] == 'GATE_direct-subjective':
return 'DSE'
else:
print "FEIL. {}".format(tmp)
raise
def gatestr2label(gatestr):
if gatestr == 'GATE_objective-speech-event':
return 'ose'
elif gatestr == 'GATE_expressive-subjectivity':
return 'ese'
elif gatestr == 'GATE_direct-subjective':
return 'dse'
else:
print "FEIL. {}".format(tmp)
raise
#def iob2(tokens, expressions):
# return_str = ""
# return_ignored = ""
# cur = None
# cur_end = None
# ignored = {}
# ignored_end = {}
# for sent in tokens:
# for token in sent:
# #print token
# #print "{}-{}".format(token.token, token.slice.start)
# #print expressions[1089]
# #print int(token.slice.start) in expressions
# if cur_end and cur_end <= int(token.slice.start): # TODO ?
# cur = None
# cur_end = None
# if int(token.slice.start) in expressions and cur:
# print "ignoring overlap: {}\n(continues {})".format(token, lasttoken)
# # TODO Counter - overlap
# #return_ignored += u'{}\tI-{}'.format(expressions[cur])
# return_str += u'{}\tI-{}\n'.format(token2iob(token), cur)
# elif int(token.slice.start) in expressions:
# tmp = expressions[int(token.slice.start)]
# #print "token: {} tmp: {}".format(token, tmp)
# ## Following unessesary - test moved to expressions()
# #if tmp[1].start == tmp[1].stop:
# # print tmp[1].start, tmp[1].stop
# # print "[iob2()] ignoring none-span-expression: {}".format(token)
# # return_str += u'{}\tO\n'.format(token2iob(tmp))
# if True: #else:
# if tmp[3] == 'GATE_objective-speech-event':
# cur = 'OSE'
# elif tmp[3] == 'GATE_expressive-subjectivity':
# cur = 'ESE'
# elif tmp[3] == 'GATE_direct-subjective':
# cur = 'DSE'
# else:
# print "FEIL. {}".format(tmp)
# cur_end = tmp[1].stop
# return_str += u'{}\tB-{}\n'.format(token2iob(token), cur)
# lasttoken = token
# elif cur:
# return_str += u'{}\tI-{}\n'.format(token2iob(token), cur)
# else:
# return_str += u'{}\tO\n'.format(token2iob(token))
# return_str += u'\n'
# return return_str, return_ignored
def token2iob(token):
"""
"""
subj = t_subjlex.getsubj(token)
if token.token == '#':
tokenform = '\#'
else:
tokenform = token.token
return u'{}\t{}\t{}\t{}'.format(tokenform, token.lemma, token.postag, subj)
lastcol_re = re.compile(r'\t[^\t]*$')
expr_re = re.compile(r'GATE_(objective-speech-event|direct-subjectiv|expressive-subjectiv)')
hold_re = re.compile(r'GATE_agent')
t_mpqa = mpqa.MPQA()
t_tokenizer = repp.Tokenize()
t_postagger = postag.Postag()
t_subjlex = subjlex.Subjlex()
def readiob2(file, cols=('form', 'lemma', 'pos', 'att', 'gold_label', 'label', 'label/score')):
"""
Reads IOB2 file
"""
f = open(file)
sents = []
tmp = []
for line in f:
#print line
if line == "\n":
sents.append(tmp)
tmp = []
elif len(line.split()) != len(cols) and line[0] == '#':
# ignore if #
#print line
pass
else:
# ignore #
#if line[0] != '#':
#print "«{}»".format(line.split())
tmp.append(splitiob2(line, cols))
if tmp:
sents.append(tmp)
f.close()
return sents
def splitiob2(line, cols):
linesplit = line.split()
if len(linesplit) != len(cols):
raise ValueError('Wrong number of columns.')
tmp = {}
for i, col in enumerate(cols):
tmp[col] = linesplit[i]
return tmp
def writeconll2009(lst, file):
"""
@param lst List of sentences with list of dics representing each word
@return filename
"""
# ID FORM LEMMA GPOS PPOS SPLIT FORM SPLIT LEMMA PPOSS HEAD DEPREL PRED ARG
#f = open(file, 'w')
f = io.open(file, 'w')
for sent in lst:
for i, token in enumerate(sent):
#print token
f.write(u"{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
i+1, # word id
token['form'], # word form
token['lemma'], # gold lemma #
u"_", # pred lemma
token['pos'], # gold pos #
u"_", # pred pos
u"_", # gold feat
u"_", # pred feat
u"_", # gold head
u"_", # pred head
u"_", # gold label
u"_", # pred label
u"_" # arg
))
f.write(u"\n")
f.close()
return file
def writeconll(lst, file):
"""
@param lst List of sentences with list of dics representing each word
"""
# ID FORM LEMMA GPOS PPOS SPLIT FORM SPLIT LEMMA PPOSS HEAD DEPREL PRED ARG
#f = open(file, 'w')
f = io.open(file, 'w')
for sent in lst:
for i, token in enumerate(sent):
#print token
f.write(u"{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
i+1,
token['form'],
token['lemma'],
u"_",
token['pos'],
token['form'],
token['lemma'],
token['pos'],
u"0",
u"_"
))
f.write(u"\n")
f.close()
#def readconlltolst_dic(lst, file):
# """
# @param lst List of sentences with list of dics representing each token
# """
# # ID FORM LEMMA GPOS PPOS SPLIT FORM SPLIT LEMMA PPOSS HEAD DEPREL PRED ARG
# f = open(file, 'r')
# tokens = None
# count = 0
# lcount = 0
# newlst = []
# for sent in lst:
# count += 1
# #print count
# line = False
# newsent = []
# for i, token in enumerate(sent):
# #check alignment
# while not line or line == "\n":
# lcount += 1
# line = f.readline()
# #print("L: {} «{}»".format(lcount, line))
# #print("Empty line? {}\n{}".format(linesplit, token))
# #print ".."
# linesplit = line.split()
# if linesplit[1] != token['form']:
# print("Not aligned! l.{}\nconll: {}\ntoken: {}".format(lcount, linesplit, token))
# return lst
# if len(linesplit) > 8:
# token['head'] = linesplit[8]
# #print linesplit
# #print token
# if len(linesplit) > 9:
# token['deprel'] = linesplit[9]
# if len(linesplit) > 10:
# token['pred'] = linesplit[10]
# token['arg'] = linesplit[11:]
# newsent.append(token)
# #print token
# line = False
# newlst.append(newsent)
# f.close()
# return lst
def readconll2009(filename):
print "open {}".format(filename)
f = io.open(filename, 'r') #, encoding='utf-8')
tokens = None
lcount = 0
lst = []
cursent = []
for i, line in enumerate(f):
if line == '\n':
lst.append(cursent)
cursent = []
else:
cursent.append(parseconll2009line(line))
return lst
def readconll(filename):
print "open {}".format(filename)
f = io.open(filename, 'r') #, encoding='utf-8')
tokens = None
lcount = 0
lst = []
cursent = []
for i, line in enumerate(f):
if line == '\n':
lst.append(cursent)
cursent = []
else:
cursent.append(parseconllline(line))
return lst
def parseconllline(line):
# f.write(u"{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
# i+1, # word id 0
# token['form'], # word form 1
# token['lemma'], # pred. lemma 2
# u"_", # gold lemma 3
# token['pos'], # pred. pos 4
# u"_", # gold pos 5
# u"_", # gold feat 6
# u"_", # pred feat 7
# u"_", # gold head 8
# u"_", # pred head 9
# u"_", # gold label 10
# u"_", # pred label 11
# u"_" # attributes
# ))
#check alignment
token = {}
linesplit = line.split('\t')
try:
if len(linesplit) < 11:
#print("Not aligned! {}\n{}".format(linesplit, token))
print "Not correct format", linesplit
raise Exception
except:
print linesplit
raise Exception
token['id'] = linesplit[0]
token['form'] = linesplit[1]
token['lemma'] = linesplit[2]
token['pos'] = linesplit[4] # pos from stanford postagger
if len(linesplit) > 8:
token['head'] = linesplit[8]
#print linesplit
#print token
if len(linesplit) > 9:
token['deprel'] = linesplit[9]
if len(linesplit) > 10:
token['pred'] = linesplit[10]
token['arg'] = linesplit[11:]
return token
def parseconll2009line(line):
# f.write(u"{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
# i+1, # word id 0
# token['form'], # word form 1
# token['lemma'], # pred. lemma 2
# u"_", # gold lemma 3
# token['pos'], # pred. pos 4
# u"_", # gold pos 5
# u"_", # gold feat 6
# u"_", # pred feat 7
# u"_", # gold head 8
# u"_", # pred head 9
# u"_", # gold label 10
# u"_", # pred label 11
# u"_" # attributes
# ))
#check alignment
token = {}
linesplit = line.split('\t')
try:
if len(linesplit) < 13:
#print("Not aligned! {}\n{}".format(linesplit, token))
print "Not correct format", linesplit
raise Exception
except:
print linesplit
raise Exception
token['id'] = linesplit[0]
token['form'] = linesplit[1]
token['lemma'] = linesplit[2]
token['pos'] = linesplit[4] # pos from stanford postagger
if len(linesplit) > 8:
token['head'] = linesplit[9]
#print linesplit
#print token
if len(linesplit) > 9:
token['deprel'] = linesplit[11]
#if len(linesplit) > 10:
# token['pred'] = linesplit[10]
# token['arg'] = linesplit[11:]
return token
def readconll2009tolst(lst, filename):
"""
@param lst List of sentences with list of dics representing each word
"""
# ID FORM LEMMA GPOS PPOS SPLIT FORM SPLIT LEMMA PPOSS HEAD DEPREL PRED ARG
# ID FORM LEMMA PLEMMA POS PPOS FEAT PFEAT HEAD PHEAD LABEL PLABEL ARG
#f = open(filename, 'r')
print "open {}".format(filename)
f = io.open(filename, 'r') #, encoding='utf-8')
tokens = None
count = 0
lcount = 0
newlst = []
for sent in lst:
count += 1
if count == 10 or count == 100 or count % 1000 == 0:
print "setning: {}".format(count)
#print "setning:", count
line = False
newsent = []
# f.write(u"{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\n".format(
# i+1, # word id 0
# token['form'], # word form 1
# token['lemma'], # pred. lemma 2
# u"_", # gold lemma 3
# token['pos'], # pred. pos 4
# u"_", # gold pos 5
# u"_", # gold feat 6
# u"_", # pred feat 7
# u"_", # gold head 8
# u"_", # pred head 9
# u"_", # gold label 10
# u"_", # pred label 11
# u"_" # attributes
# ))
for i, token_origin in enumerate(sent):
token = copy.deepcopy(token_origin)
#check alignment
while not line or line == "\n":
lcount += 1
line = f.readline() #.decode('utf-8')
#print("L: {} «{}»".format(lcount, line))
#print("Empty line? {}\n{}".format(linesplit, token))
#print ".."
linesplit = line.split('\t')
try:
if linesplit[1] != token['form']:
#print("Not aligned! {}\n{}".format(linesplit, token))
print("Not aligned! l.{}\nconll: {}\ntoken: {}".format(lcount, linesplit, token))
return newlst
except:
print linesplit
raise
if len(linesplit) > 8:
token['head'] = linesplit[9]
#print linesplit
#print token
if len(linesplit) > 9:
token['deprel'] = linesplit[11]
#if len(linesplit) > 10:
# token['pred'] = linesplit[10]
# token['arg'] = linesplit[11:]
newsent.append(token)
#print token
line = False
newlst.append(newsent)
f.close()
#print newlst[0][0]
return newlst
def readconlltolst(lst, file):
"""
@param lst List of sentences with list of dics representing each word
"""
# ID FORM LEMMA GPOS PPOS SPLIT FORM SPLIT LEMMA PPOSS HEAD DEPREL PRED ARG
#f = open(file, 'r')
f = io.open(file, 'r') #, encoding='utf-8')
#f = open(file, 'r')
tokens = None
count = 0
lcount = 0
newlst = []
for sent in lst:
count += 1
if count % 1000 == 0:
print "setning: {}".format(count)
#print count
line = False
newsent = []
for i, token in enumerate(sent):
#print token
#check alignment
while not line or line == "\n":
lcount += 1
line = f.readline() #.decode('utf-8')
#print("L: {} «{}»".format(lcount, line))
#print("Empty line? {}\n{}".format(linesplit, token))
#print ".."
linesplit = line.split('\t')
if linesplit[1] != token['form']:
#print("Not aligned! {}\n{}".format(linesplit, token))
print("Not aligned! l.{}\nconll: {}\ntoken: {}".format(lcount, linesplit, token))
return lst
if len(linesplit) > 8:
token['head'] = linesplit[8]
#print linesplit
#print token
if len(linesplit) > 9:
token['deprel'] = linesplit[9]
if len(linesplit) > 10:
token['pred'] = linesplit[10]
token['arg'] = linesplit[11:]
newsent.append(token)
#print token
line = False
newlst.append(newsent)
f.close()
return lst
def writeiob2(lst, filename):
fw = open(filename, 'w')
for sent in lst:
predicates = []
for token in sent:
pred = token['pred']
if pred != '_':
predicates.append(pred)
for token in sent:
pred = '_'
argl = '_'
for i, arg in enumerate(token['arg']):
if arg != '_':
if pred != '_':
print("Mer enn et pred: {}\n{}".format(token['form'], token['arg']) )
pred = predicates[i]
argl = arg
fw.write("{}\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t-{}-\n".format(
token['form'],
token['lemma'],
token['pos'],
token['att'],
token['head'],
token['deprel'],
token['pred'],
pred + ':' + argl,
token['gold_label']
))
fw.write("\n")
fw.close()
def sentwords_in_sent(sent):
sentwords = []
# find sentiment words indexes
for i, token in enumerate(sent):
if token['att'] != '-':
sentwords.append(i+1)
return sentwords
def deptree_from_sent(sent):
deptree = {}
inv_deptree = {}
for i, token in enumerate(sent):
if token['head'] in deptree:
deptree[token['head']].append(i+1)
else:
deptree[token['head']] = [i+1]
inv_deptree[str(i+1)] = token['head']
return deptree, inv_deptree
# sentiment word proximity
def sentword_prox(sent):
# for each token, find distance to nearest sentiment word
sentwords = sentwords_in_sent(sent)
for i, token in enumerate(sent):
nearest = min(sentwords, key=lambda x:abs(x-i))
if len(sentwords) == 0:
token['sentword_prox'] = '-'
elif abs(nearest - i) > 4:
token['sentword_prox'] = '5'
elif abs(nearest - i) > 2:
token['sentword_prox'] = '3'
elif abs(nearest - i) > 0:
token['sentword_prox'] = '1'
elif abs(nearest - i) == 0:
token['sentword_prox'] = '0'
# sentiment word dep distance
def sentword_dep_dist(sent):
distances = {}
sentwords = sentwords_in_sent(sent)
deptree, inv_deptree = deptree_from_sent(sent)
for sentword in sentwords:
count = 0
distances[sentword] = count
tmp = sentword
while tmp != '0':
print tmp
tmp = inv_deptree[str(tmp)]
distances[tmp] = count
print distances
return
if __name__ == "__main__":
parser = argparse.ArgumentParser()
# parser.add_argument("-i", dest="input_filename",
# help="Input file", metavar="FILE",
# type=lambda x: is_valid_file(parser, x))
parser.add_argument("-dev-train", "--devel-train", dest="devtrainset",
help="Create training file for devel. with devtrainset",
action='store_true')
parser.add_argument("-dev-test", "--devel-test", dest="devtestset",
help="Create test file for devel. with devtestset",
action='store_true')
parser.add_argument("-train", "--train", dest="trainset",
help="Create training file for final test with trainset",
action='store_true')
parser.add_argument("-test", "--test", dest="testset",
help="Create test file for final test with testset",
action='store_true')
parser.add_argument("-resultfile", "--resultfile", dest="resultfile",
help="iob2 input file is a resultfile",
action='store_true')
parser.add_argument("-conll-input", dest="conllfile",
help="conll input file for merging with iob2 with gold labels as last (3rd with resultfile) column",
metavar="FILE"
)
parser.add_argument("-iob2-input", dest="iob2file",
help="iob2 input file with gold labels as last (3rd with resultfile) column",
metavar="FILE"
)
parser.add_argument("-add-srl-dep", "--add-srl-dep", dest="iob2_filename",
help="Reads file, writes conll-version (with suffix conll)", metavar="FILE")
parser.add_argument("-i", "--interactive", dest="interactive",
help="For interactive development",
action='store_true')
parser.add_argument("--pylab", dest="interactive",
help="For interactive development",
action='store_true')
parser.add_argument("--automagic", dest="interactive",
help="For interactive development",
action='store_true')
args = parser.parse_args()
if args.interactive:
print "Interactive"
#test = createtrainingfile(DATA_PREFIX + "/out/trening.txt")
#test = getopinionexp_iob2("database.mpqa.2.0/docs/20020510/21.50.13-28912")
#test2 = getopinionexp_iob2("database.mpqa.2.0/docs/20020510/21.50.13-28912")
#test = getopinionholder("database.mpqa.2.0/docs/20010630/00.48.42-17806")
#test = getopinionexp_iob2("database.mpqa.2.0/docs/ula/HistoryGreek")
#test2 = getopinionexp_iob2("database.mpqa.2.0/docs/ula/HistoryGreek")
#testdoc = 'database.mpqa.2.0/docs/20020315/20.42.26-19148'
#test = getopinionexp_iob2(testdoc)
#test = "database.mpqa.2.0/docs/20010630/00.48.42-17806"
#test = "database.mpqa.2.0/docs/20020304/20.42.01-25605" # ex 4a, 5a, 5b
#test = "database.mpqa.2.0/docs/20020331/21.09.25-22686" # own example - terrorism .. hate
# #test = "database.mpqa.2.0/docs/non_fbis/15.13.45-21190" # ex 4b:However
# #test = "database.mpqa.2.0/docs/20011231/21.05.45-10422" # ex 4d, 5c
# #test = "database.mpqa.2.0/docs/non_fbis/04.33.07-17094" # ex 6a
# #test = "database.mpqa.2.0/docs/ula/114CUL057" # ex 6b
#test = "database.mpqa.2.0/docs/20020510/21.50.13-28912" # SRI
#test = 'database.mpqa.2.0/docs/20011007/03.17.23-6711'
#a = getopinionholder(test)
#b = writeconll(a, DATA_PREFIX + "/out/tmp2.conll")
# lth = lth_srl.Lth_srl()
#conlloutput = lth.run(DATA_PREFIX + "/out/tmp2.conll")
#conlloutput = DATA_PREFIX + '/out/tmp2.conll'
#c = readconlltolst(a, conlloutput)
#d = getholdercandidates(c)
#printopinionexpression(a)
#'database.mpqa.2.0/docs/20020315/20.42.26-19148'
#ex = expressions(s)
#ex, ignored = getopinionexp_iob2(test)
#foo = expressions(test2)
#iobstr = token2iob(token)
#iob = iob2(test[0], test[1])
#cols=('form', 'lemma', 'pos', 'att', 'gold_label')
#sentlst = createfile(testset=True, opinionexp=False, opinionholder=True)
#sentlst = readiob2('/hom/trondth/Dropbox/devtrain.txt', cols)
#newlst = readconlltolst(sentlst, '/hom/trondth/Dropbox/devtest.txt.conll.out')
#testlst = newlst[:3]
###writeiob2(tstlst, "test.txt")
#testlst = newlst[0:1]
#sentword_dep_dist(testlst[0])
#a = createfile("foo", opinionexp=False, opinionholder=True)
#a = createfile("foo", opinionexp=False, opinionholder=True, testset=True)
#sentlst = createfile("devtest.json", testset=True, opinionexp=False, opinionholder=True)
#sentlst = createfile("devtest.json", testset=False, opinionexp=False, opinionholder=True)
#newlst = readconlltolst(sentlst, '/home/tt/Dropbox/devtest.txt.conll.out')
#cols=('form', 'lemma', 'pos', 'att', 'gold_label', 'label', 'label/score')
##exprsentlst = readiob2('/hom/trondth/master/out/minidevresult.txt', cols)
#writeconll2009(exprsentlst, "minidevresult.conll")
#trainsentlst = createfile(opinionexp=False, opinionholder=True, doclistfile="/config/doclists/minitrainset.txt")
#trainsentlst_sb = readconll2009tolst(trainsentlst, "minidevtrain.conll.sb")
#trainsentlst_dt = readconll2009tolst(trainsentlst, "minidevtrain.conll.dt")
#trainsentlst_conll = readconll2009tolst(trainsentlst, "minidevtrain.conll.conll")
##writeconll2009(trainsentlst, "minidevtrain.conll")
#testsentlst = createfile(opinionexp=False, opinionholder=True, doclistfile="/config/doclists/minitestset.txt")
##writeconll2009(testsentlst, "minidevtest.conll")
#testsentlst_sb = readconll2009tolst(testsentlst, "minidevtest.conll.sb")
#testsentlst_dt = readconll2009tolst(testsentlst, "minidevtest.conll.dt")
#testsentlst_conll = readconll2009tolst(testsentlst, "minidevtest.conll.conll")
#writeconll2009(a, "ex-thrives.conll2009")
#writeconll(a, "ex-thrives.conll")
pass
if args.resultfile:
cols=('form', 'lemma', 'pos', 'att', 'gold_label', 'label', 'label/score')
else:
cols=('form', 'lemma', 'pos', 'att', 'gold_label')
if args.conllfile and args.iob2file:
iob2lst = readiob2(args.iob2file, cols)
fulllst = readconlltolst(iob2lst, args.conllfile)
writeiob2(fulllst, args.iob2file + '-extended.txt')
if args.iob2_filename:
tmp = readiob2(args.iob2_filename, cols)
writeconll(tmp, args.iob2_filename + ".conll")
if args.conllfile and args.opinionholder:
sentlst = createfile(testset=True, opinionexp=False, opinionholder=True)
fulllst = readconlltolst(sentlst, args.conllfile)
writeiob2(fulllst, args.iob2file + '-extended.txt')
if args.devtestset:
createfile("devtest.txt", testset=True)
if args.devtrainset:
createfile("devtrain.txt", testset=False)
if args.testset:
createfile("test.txt", testset=True, devset=False)
if args.trainset:
createfile("train.txt", testset=False, devset=False)
| mit |
hachard/Cra-Magnet | flask/lib/python3.5/site-packages/setuptools/command/install_egg_info.py | 17 | 5027 | from distutils import log, dir_util
import os, sys
from setuptools.extern.six.moves import map
from setuptools import Command
from setuptools.archive_util import unpack_archive
import pkg_resources
class install_egg_info(Command):
"""Install an .egg-info directory for the package"""
description = "Install an .egg-info directory for the package"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
self.install_layout = None
self.prefix_option = None
def finalize_options(self):
self.set_undefined_options('install_lib',
('install_dir', 'install_dir'))
self.set_undefined_options('install',('install_layout','install_layout'))
if sys.hexversion > 0x2060000:
self.set_undefined_options('install',('prefix_option','prefix_option'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name() + '.egg-info'
if self.install_layout:
if not self.install_layout.lower() in ['deb']:
raise DistutilsOptionError("unknown value for --install-layout")
self.install_layout = self.install_layout.lower()
basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '')
elif self.prefix_option or 'real_prefix' in sys.__dict__:
# don't modify for virtualenv
pass
else:
basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '')
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = []
def run(self):
self.run_command('egg_info')
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run:
pkg_resources.ensure_directory(self.target)
self.execute(
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
def get_outputs(self):
return self.outputs
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src, dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/', 'CVS/':
if src.startswith(skip) or '/' + skip in src:
return None
if self.install_layout and self.install_layout in ['deb'] and src.startswith('SOURCES.txt'):
log.info("Skipping SOURCES.txt")
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
def install_namespaces(self):
nsp = self._get_all_ns_packages()
if not nsp:
return
filename, ext = os.path.splitext(self.target)
filename += '-nspkg.pth'
self.outputs.append(filename)
log.info("Installing %s", filename)
lines = map(self._gen_nspkg_line, nsp)
if self.dry_run:
# always generate the lines, even in dry run
list(lines)
return
with open(filename, 'wt') as f:
f.writelines(lines)
_nspkg_tmpl = (
"import sys, types, os",
"p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)",
"ie = os.path.exists(os.path.join(p,'__init__.py'))",
"m = not ie and "
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))",
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)",
)
"lines for the namespace installer"
_nspkg_tmpl_multi = (
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
)
"additional line(s) when a parent package is indicated"
@classmethod
def _gen_nspkg_line(cls, pkg):
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.'))
tmpl_lines = cls._nspkg_tmpl
parent, sep, child = pkg.rpartition('.')
if parent:
tmpl_lines += cls._nspkg_tmpl_multi
return ';'.join(tmpl_lines) % locals() + '\n'
def _get_all_ns_packages(self):
"""Return sorted list of all package namespaces"""
nsp = set()
for pkg in self.distribution.namespace_packages or []:
pkg = pkg.split('.')
while pkg:
nsp.add('.'.join(pkg))
pkg.pop()
return sorted(nsp)
| gpl-3.0 |
sunnyzwh/readthedocs.org | readthedocs/bookmarks/views.py | 28 | 6492 | from django.contrib.auth.decorators import login_required
from django.http import HttpResponse, HttpResponseRedirect
from django.http import HttpResponseBadRequest
from django.shortcuts import get_object_or_404, render_to_response
from django.views.generic import ListView, View
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.utils.decorators import method_decorator
from django.core.exceptions import ObjectDoesNotExist
from django.views.decorators.csrf import csrf_exempt
import simplejson
from readthedocs.bookmarks.models import Bookmark
from readthedocs.projects.models import Project
# These views are CSRF exempt because of Django's CSRF middleware failing here
# https://github.com/django/django/blob/stable/1.6.x/django/middleware/csrf.py#L135-L159
# We don't have a valid referrer because we're on a subdomain
class BookmarkExistsView(View):
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
return super(BookmarkExistsView, self).dispatch(*args, **kwargs)
def get(self, request):
return HttpResponse(
content=simplejson.dumps(
{'error': 'You must POST!'}
),
content_type='application/json',
status=405
)
def post(self, request, *args, **kwargs):
"""
Returns:
200 response with exists = True in json if bookmark exists.
404 with exists = False in json if no matching bookmark is found.
400 if json data is missing any one of: project, version, page.
"""
post_json = simplejson.loads(request.body)
try:
project = post_json['project']
version = post_json['version']
page = post_json['page']
except KeyError:
return HttpResponseBadRequest(
content=simplejson.dumps({'error': 'Invalid parameters'})
)
try:
Bookmark.objects.get(
project__slug=project,
version__slug=version,
page=page
)
except ObjectDoesNotExist:
return HttpResponse(
content=simplejson.dumps({'exists': False}),
status=404,
content_type="application/json"
)
return HttpResponse(
content=simplejson.dumps({'exists': True}),
status=200,
content_type="application/json"
)
class BookmarkListView(ListView):
""" Displays all of a logged-in user's bookmarks """
model = Bookmark
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BookmarkListView, self).dispatch(*args, **kwargs)
def get_queryset(self):
return Bookmark.objects.filter(user=self.request.user)
class BookmarkAddView(View):
""" Adds bookmarks in response to POST requests """
@method_decorator(login_required)
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
return super(BookmarkAddView, self).dispatch(*args, **kwargs)
def get(self, request):
return HttpResponse(
content=simplejson.dumps(
{'error': 'You must POST!'}
),
content_type='application/json',
status=405
)
def post(self, request, *args, **kwargs):
"""Add a new bookmark for the current user to point at
``project``, ``version``, ``page``, and ``url``.
"""
post_json = simplejson.loads(request.body)
try:
project_slug = post_json['project']
version_slug = post_json['version']
page_slug = post_json['page']
url = post_json['url']
except KeyError:
return HttpResponseBadRequest(
content=simplejson.dumps({'error': "Invalid parameters"})
)
try:
project = Project.objects.get(slug=project_slug)
version = project.versions.get(slug=version_slug)
except ObjectDoesNotExist:
return HttpResponseBadRequest(
content=simplejson.dumps(
{'error': "Project or Version does not exist"}
)
)
Bookmark.objects.get_or_create(
user=request.user,
url=url,
project=project,
version=version,
page=page_slug,
)
return HttpResponse(
simplejson.dumps({'added': True}),
status=201,
content_type='application/json'
)
class BookmarkRemoveView(View):
"""
Deletes a user's bookmark in response to a POST request.
Renders a delete? confirmaton page in response to a GET request.
"""
@method_decorator(login_required)
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
return super(BookmarkRemoveView, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
return render_to_response(
'bookmarks/bookmark_delete.html',
context_instance=RequestContext(request)
)
def post(self, request, *args, **kwargs):
"""
Will delete bookmark with a primary key from the url
or using json data in request.
"""
if 'bookmark_pk' in kwargs:
bookmark = get_object_or_404(Bookmark, pk=kwargs['bookmark_pk'])
bookmark.delete()
return HttpResponseRedirect(reverse('bookmark_list'))
else:
try:
post_json = simplejson.loads(request.body)
project = Project.objects.get(slug=post_json['project'])
version = project.versions.get(slug=post_json['version'])
url = post_json['url']
page = post_json['page']
except KeyError:
return HttpResponseBadRequest(
simplejson.dumps({'error': "Invalid parameters"})
)
bookmark = get_object_or_404(
Bookmark,
user=request.user,
url=url,
project=project,
version=version,
page=page
)
bookmark.delete()
return HttpResponse(
simplejson.dumps({'removed': True}),
status=200,
content_type="application/json"
)
| mit |
ManageIQ/integration_tests | cfme/fixtures/rbac.py | 2 | 11638 | """RBAC Role based parametrization and checking
The purpose of this fixture is to allow tests to be run within the context of multiple different
users, without the hastle or modifying the test. To this end, the RBAC module and fixture do not
require any modifications to the test body.
The RBAC fixture starts by receiving a list of roles and associated errors from the test metadata.
This data is in YAML format and an example can be seen below.
.. code-block:: yaml
Metadata:
test_flag: provision
suite: infra_provisioning
rbac:
roles:
default:
evmgroup-super_administrator:
evmgroup-administrator:
evmgroup-operator: NoSuchElementException
evmgroup-auditor: NoSuchElementException
Let's assume also we have a test that looks like the following::
def test_rbac(rbac_role):
if rbac_role != 'evmgroup-superadministrator' or rbac_role != 'evmgroup-operator':
1 / 0
This metadata defines the roles to be tested, and associates with them the exceptions that are
expected for that particular test, or blank if no Exception is expected. In this way we can have
5 states of test result.
* **Test Passed** - This was expected - We do nothing to this and exit early. In the example above
evmgroup-super_administrator fulfills this, as it expects no Exception.
* **Test Failed** - This was expected - We consume the Exception and change the result of the test
to be a pass. In the example, this is fulfilled by evmgroup-auditor as it was expected to fail
with the ZeroDivisionError.
* **Test Failed** - This was unexpected - We consume the Exception and raise another informing that
the test should have passed. In the example above, evmgroup-administrator satisfies this
condition as it didn't expect a failure, but got one.
* **Test Failed** - This was expected, but the wrong Exception appeared - We consume the Exception
throw another stating that the Exception wasn't of the expected type. In the example above, the
default user satifies this as it receives the ZeroDivisionError, but expects MonkeyError.
* **Test Passed** - This was unexpected - We have Exception to consume, but we raise an Exception
of our own as the test should have failed. In the example above, evmgroup-operator satisfies
this as it should have received the ZeroDivisionError, but actually passes with no error.
When a test is configured to run against the RBAC suite, it will first parametrize the test with
the associated roles from the metadata. The test will then be wrapped and before it begins
we login as the *new* user. This process is also two fold. The ``pytest_store`` holds the current
user, and logging in is performed with whatever this user value is set to. So we first replace this
value with our new user. This ensures that if the browser fails during a navigation, we get
the opportunity to log in again with the *right* user. Once the user is set, we attempt to login.
When the test finishes, we set the user back to ``default`` before moving on to handling the outcome
of the test with the wrapped hook handler. This ensures that the next test will have the correct
user at login, even if the test fails horribly, and even if the inspection of the outcome should
fail.
To configure a test to use RBAC is simple. We simply need to add ``rbac_role`` to the list of
fixtures and the addition and the ldap configuration fixture also. Below is a complete
example of adding RBAC to a test.
.. code-block:: python
import pytest
def test_rbac(rbac_role):
\"\"\" Tests provisioning from a template
Metadata:
rbac:
roles:
default:
evmgroup-super_administrator:
evmgroup-administrator:
evmgroup-operator: NoSuchElementException
evmgroup-auditor: NoSuchElementException
\"\"\"
if rbac_role != 'evmgroup-superadministrator' or rbac_role != 'evmgroup-operator':
1 / 0
Exception matching is done with a simple string startswith match.
Currently there is no provision for skipping a role for a certain test, though this is easy to
implement. There is also no provision, for tests that have multiple parameters, to change the
expectation of the test, with relation to a parameter. For example, if there was a parameter
called *rhos* and one called *ec2* we could not change the expected exception to be different
depending on if the test was run against *rhos* or *ec2*.
"""
import traceback
import fauxfactory
import pytest
from cfme.fixtures.artifactor_plugin import fire_art_test_hook
from cfme.fixtures.pytest_store import store
from cfme.utils import conf
from cfme.utils import testgen
from cfme.utils.appliance import current_appliance
from cfme.utils.browser import browser
from cfme.utils.browser import ensure_browser_open
from cfme.utils.browser import take_screenshot
from cfme.utils.log import logger
enable_rbac = False
def save_traceback_file(node, contents):
"""A convenience function for artifactor file sending
This function simply takes the nodes id and the contents of the file and processes
them and sends them to artifactor
Args:
node: A pytest node
contents: The contents of the traceback file
"""
fire_art_test_hook(
node, 'filedump',
description="RBAC Traceback",
contents=contents, file_type="rbac", group_id="RBAC", slaveid=store.slaveid)
def save_screenshot(node, ss, sse):
if ss:
fire_art_test_hook(
node, 'filedump',
description="RBAC Screenshot", file_type="rbac_screenshot", mode="wb",
contents_base64=True, contents=ss, display_glyph="camera", group_id="RBAC",
slaveid=store.slaveid)
if sse:
fire_art_test_hook(
node, 'filedump',
description="RBAC Screenshot error", file_type="rbac_screenshot_error", mode="w",
contents_base64=False, contents=sse, display_type="danger", group_id="RBAC",
slaveid=store.slaveid)
def really_logout():
"""A convenience function logging out
This function simply ensures that we are logged out and that a new browser is loaded
ready for use.
"""
try:
current_appliance.server.logout()
except AttributeError:
try:
browser().quit()
except AttributeError:
ensure_browser_open()
@pytest.hookimpl(hookwrapper=True)
def pytest_pyfunc_call(pyfuncitem):
"""Inspects and consumes certain exceptions
The guts of this function are explained above in the module documentation.
Args:
pyfuncitem: A pytest test item.
"""
# do whatever you want before the next hook executes
if not enable_rbac:
yield
return
# Login as the "new" user to run the test under
if 'rbac_role' in pyfuncitem.fixturenames:
user = pyfuncitem._request.getfixturevalue('rbac_role')
really_logout()
logger.info(f"setting user to {user}")
user_obj = current_appliance.collections.users.instantiate(
username=conf.credentials[user]['username'],
password=conf.credentials[user]['password']
)
# Actually perform the test. outcome is set to be a result object from the test
with user_obj:
outcome = yield
screenshot, screenshot_error = take_screenshot()
# Handle the Exception
logger.error(pyfuncitem.location[0])
loc = "{}/{}".format(pyfuncitem.location[0], pyfuncitem.location[2])
# loc = loc[:min([loc.rfind('['), len(loc)])]
logger.error(loc)
# errors = [v for k, v in tests.items() if loc.startswith(k)]
errors = pyfuncitem.function.meta.kwargs['from_docs']['rbac']['roles']
if errors:
# errors = errors[0]
user = pyfuncitem.funcargs['rbac_role']
if errors[user]:
if not outcome.excinfo:
logger.error("RBAC: Test should fail!")
raise Exception("RBAC: You should fail!")
else:
if outcome.excinfo[1].__repr__().startswith(errors[user]):
logger.info("RBAC: Test failed as expected")
outcome.force_result(True)
else:
contents = "".join(traceback.format_list(
traceback.extract_tb(outcome.excinfo[2])))
save_traceback_file(pyfuncitem, contents)
save_screenshot(pyfuncitem, screenshot, screenshot_error)
logger.error("RBAC: You blithering idiot, "
"you failed with the wrong exception")
raise Exception("RBAC: You should fail with {}!".format(errors[user]))
else:
if not outcome.excinfo:
logger.info("RBAC: Test passed as expected")
else:
logger.error("RBAC: Test should have passed!")
contents = "".join(traceback.format_list(
traceback.extract_tb(outcome.excinfo[2])))
save_traceback_file(pyfuncitem, contents)
save_screenshot(pyfuncitem, screenshot, screenshot_error)
raise Exception("RBAC: Test should have passed!")
@pytest.hookimpl(hookwrapper=True)
def pytest_generate_tests(metafunc):
yield
if 'rbac_role' in metafunc.fixturenames:
if enable_rbac:
try:
meta_data = metafunc.function.meta
roles = list(meta_data.kwargs['from_docs']['rbac']['roles'].keys())
except (KeyError, AttributeError):
raise ValueError("Test {} should have metadata describing RBAC roles")
else:
roles = ['default']
testgen.parametrize(metafunc, 'rbac_role', roles)
def pytest_addoption(parser):
# Create the cfme option group for use in other plugins
parser.getgroup('cfme')
parser.addoption("--rbac", action="store_true", default=False,
help="enable rbac testing")
def pytest_configure(config):
""" Filters the list of providers as part of pytest configuration. """
global enable_rbac
if config.getoption('rbac'):
enable_rbac = True
@pytest.fixture
def role_with_all_features(appliance):
collection = appliance.collections.roles
new_role = collection.create(name=fauxfactory.gen_alphanumeric(start="role_"),
vm_restriction=None, product_features=[(['Everything'], True)])
yield new_role
new_role.delete_if_exists()
@pytest.fixture
def group(appliance, role_with_all_features):
collection = appliance.collections.groups
new_group = collection.create(description=fauxfactory.gen_alphanumeric(start="group_"),
role=role_with_all_features.name, tenant="My Company")
yield new_group
new_group.delete_if_exists()
@pytest.fixture
def user(appliance, group, new_credential):
collection = appliance.collections.users
new_user = collection.create(name=fauxfactory.gen_alphanumeric(start="user_"),
credential=new_credential,
email='xyz@redhat.com',
groups=group,
cost_center='Workload',
value_assign='Database')
yield new_user
new_user.delete_if_exists()
| gpl-2.0 |
robk5uj/invenio | modules/websubmit/lib/functions/Send_SRV_Mail.py | 33 | 4194 | ## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
## Description: function Send_SRV_Mail
## This function sends an email confirming the revision
## has been carried on with success
## Author: T.Baron
## PARAMETERS: addressesSRV: list of addresses to send this email to.
## categformatDAM: variable used to derive the category of
## the document from its reference. This value might then
## be used to derive the list of addresses
## emailFile: name of the file in which the user's email is
## noteFile: name of the file containing a note from the user
import os
from invenio.config import CFG_SITE_URL, \
CFG_SITE_NAME, \
CFG_SITE_SUPPORT_EMAIL, \
CFG_SITE_RECORD
from invenio.websubmit_config import CFG_WEBSUBMIT_COPY_MAILS_TO_ADMIN
from invenio.mailutils import send_email
from invenio.websubmit_functions.Retrieve_Data import Get_Field
def Send_SRV_Mail(parameters, curdir, form, user_info=None):
"""
This function sends an email to warn people a revision has been
carried out.
Parameters:
* notefile: name of the file in which the note can be found
* emailfile: name of the file containing the submitter's email
* addressesSRV: email addresses of the people who will receive
this email (comma separated list). this
parameter may contain the <CATEG> string. In
which case the variable computed from the
[categformatDAM] parameter replaces this
string.
eg.:"<CATEG>-email@cern.ch"
* categformatDAM: contains a regular expression used to compute
the category of the document given the
reference of the document.
eg.: if [categformatAFP]="TEST-<CATEG>-.*"
and the reference of the document is
"TEST-CATEGORY1-2001-001", then the computed
category equals "CATEGORY1"
"""
global rn,doctype,sysno
# variables declaration
FROMADDR = '%s Submission Engine <%s>' % (CFG_SITE_NAME,CFG_SITE_SUPPORT_EMAIL)
addresses = parameters['addressesSRV']
addresses = addresses.strip()
if parameters['emailFile'] is not None and parameters['emailFile']!="" and os.path.exists("%s/%s" % (curdir,parameters['emailFile'])):
fp = open("%s/%s" % (curdir,parameters['emailFile']), "r")
SuE = fp.read()
fp.close()
else:
SuE = ""
SuE = SuE.replace("\n",",")
if parameters['noteFile'] is not None and parameters['noteFile']!= "" and os.path.exists("%s/%s" % (curdir,parameters['noteFile'])):
fp = open("%s/%s" % (curdir,parameters['noteFile']), "r")
note = fp.read()
fp.close()
else:
note = ""
title = Get_Field("245__a",sysno)
author = Get_Field('100__a',sysno)
author += Get_Field('700__a',sysno)
# create message
message = "A revised version of document %s has been submitted.\n\nTitle: %s\nAuthor(s): %s\nURL: <%s/%s/%s>%s" % (rn,title,author,CFG_SITE_URL,CFG_SITE_RECORD,sysno,note)
# send the email
send_email(FROMADDR, SuE, "%s revised" % rn, message, copy_to_admin=CFG_WEBSUBMIT_COPY_MAILS_TO_ADMIN)
return ""
| gpl-2.0 |
jacquerie/inspire-next | inspirehep/modules/orcid/utils.py | 1 | 8326 | # -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2018 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""ORCID utils."""
from __future__ import absolute_import, division, print_function
import hashlib
import re
from itertools import chain
from elasticsearch_dsl import Q
from flask import current_app
from six.moves.urllib.parse import urljoin
from StringIO import StringIO
from sqlalchemy import type_coerce
from sqlalchemy.dialects.postgresql import JSONB
from invenio_db import db
from invenio_oauthclient.models import (
UserIdentity,
RemoteAccount,
RemoteToken,
)
from invenio_oauthclient.utils import oauth_link_external_id
from invenio_pidstore.models import PersistentIdentifier
from invenio_records.models import RecordMetadata
from inspire_dojson.utils import get_recid_from_ref
from inspire_utils.logging import getStackTraceLogger
from inspire_utils.record import get_values_for_schema
from inspire_utils.urls import ensure_scheme
from inspirehep.modules.search.api import LiteratureSearch
from inspirehep.utils.record_getter import get_db_records
LOGGER = getStackTraceLogger(__name__)
RECID_FROM_INSPIRE_URL = re.compile(
r"https?://(?:labs\.)?inspirehep\.net/(?:record|literature)/(\d+)",
re.IGNORECASE
)
WORKS_BULK_QUERY_LIMIT = 50
def _split_lists(sequence, chunk_size):
"""Get a list created by splitting the original list every n-th element
Args:
sequence (List[Any]): a list to be split
chunk_size (int): how bit one chunk should be (n)
Returns:
List[List[Any]]: the split list
"""
return [
sequence[i:i + chunk_size] for i in range(0, len(sequence), chunk_size)
]
def _get_api_url_for_recid(server_name, api_endpoint, recid):
"""Return API url for record
Args:
server_name (string): server authority
api_endpoint (string): api path
recid (string): record ID
Returns:
string: API URL for the record
"""
if not api_endpoint.endswith('/'):
api_endpoint = api_endpoint + '/'
api_url = urljoin(ensure_scheme(server_name), api_endpoint)
return urljoin(api_url, recid)
def get_orcid_recid_key(orcid, rec_id):
"""Return the string 'orcidcache:``orcid_value``:``rec_id``'"""
return 'orcidcache:{}:{}'.format(orcid, rec_id)
def _get_account_and_token(orcid):
account_token_join = db.session.query(RemoteAccount, RemoteToken).join("remote_tokens")
account_token_user_join = account_token_join.join(UserIdentity, UserIdentity.id_user == RemoteAccount.user_id)
account, remote_token = account_token_user_join.filter(UserIdentity.id == orcid).one()
return account, remote_token
def get_push_access_tokens(orcids):
remote_accounts = db.session.query(RemoteAccount)\
.filter(RemoteAccount.user_id == UserIdentity.id_user)\
.filter(UserIdentity.id.in_(orcids)).all()
remote_tokens = []
for remote_account in remote_accounts:
if remote_account.extra_data.get('allow_push', False):
remote_tokens.extend(remote_account.remote_tokens)
return remote_tokens
def account_setup(remote, token, resp):
"""Perform additional setup after user have been logged in.
This is a modified version of
:ref:`invenio_oauthclient.contrib.orcid.account_setup` that stores
additional metadata.
:param remote: The remote application.
:param token: The token value.
:param resp: The response.
"""
with db.session.begin_nested():
# Retrieve ORCID from response.
orcid = resp.get('orcid')
full_name = resp.get('name')
# Set ORCID in extra_data.
token.remote_account.extra_data = {
'orcid': orcid,
'full_name': full_name,
'allow_push': current_app.config.get('ORCID_ALLOW_PUSH_DEFAULT', False)
}
user = token.remote_account.user
# Create user <-> external id link.
oauth_link_external_id(user, {'id': orcid, 'method': 'orcid'})
def get_orcids_for_push(record):
"""Obtain the ORCIDs associated to the list of authors in the Literature record.
The ORCIDs are looked up both in the ``ids`` of the ``authors`` and in the
Author records that have claimed the paper.
Args:
record(dict): metadata from a Literature record
Returns:
Iterator[str]: all ORCIDs associated to these authors
"""
orcids_on_record = []
author_recids_with_claims = []
for author in record.get('authors', []):
orcids_in_author = get_values_for_schema(author.get('ids', []), 'ORCID')
if orcids_in_author:
orcids_on_record.extend(orcids_in_author)
elif author.get('curated_relation') is True and 'record' in author:
author_recids_with_claims.append(get_recid_from_ref(author['record']))
author_records = get_db_records('aut', author_recids_with_claims)
all_ids = (author.get('ids', []) for author in author_records)
orcids_in_authors = chain.from_iterable(get_values_for_schema(ids, 'ORCID') for ids in all_ids)
return chain(orcids_on_record, orcids_in_authors)
def hash_xml_element(element):
"""Compute a hash for XML element comparison.
Args:
element (lxml.etree._Element): the XML node
Return:
string: hash
"""
canonical_string = canonicalize_xml_element(element)
hash = hashlib.sha1(canonical_string)
return 'sha1:' + hash.hexdigest()
def canonicalize_xml_element(element):
"""Return a string with a canonical representation of the element.
Args:
element (lxml.etree._Element): the XML node
Return:
string: canonical representation
"""
element_tree = element.getroottree()
output_stream = StringIO()
element_tree.write_c14n(
output_stream,
with_comments=False,
exclusive=True,
)
return output_stream.getvalue()
def get_literature_recids_for_orcid(orcid):
"""Return the Literature recids that were claimed by an ORCiD.
We record the fact that the Author record X has claimed the Literature
record Y by storing in Y an author object with a ``$ref`` pointing to X
and the key ``curated_relation`` set to ``True``. Therefore this method
first searches the DB for the Author records for the one containing the
given ORCiD, and then uses its recid to search in ES for the Literature
records that satisfy the above property.
Args:
orcid (str): the ORCiD.
Return:
list(int): the recids of the Literature records that were claimed
by that ORCiD.
"""
orcid_object = '[{"schema": "ORCID", "value": "%s"}]' % orcid
# this first query is written in a way that can use the index on (json -> ids)
author_rec_uuid = db.session.query(RecordMetadata.id)\
.filter(type_coerce(RecordMetadata.json, JSONB)['ids'].contains(orcid_object)).one().id
author_recid = db.session.query(PersistentIdentifier.pid_value).filter(
PersistentIdentifier.object_type == 'rec',
PersistentIdentifier.object_uuid == author_rec_uuid,
PersistentIdentifier.pid_type == 'aut',
).one().pid_value
query = Q('match', authors__curated_relation=True) & Q('match', authors__recid=author_recid)
search_by_curated_author = LiteratureSearch().query('nested', path='authors', query=query)\
.params(_source=['control_number'], size=9999)
return [el['control_number'] for el in search_by_curated_author]
| gpl-3.0 |
vmware/tosca-vcloud-plugin | tests/unittests/test_mock_network_plugin_security_group.py | 1 | 20343 | # Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import mock
import unittest
from cloudify import exceptions as cfy_exc
from tests.unittests import test_mock_base
from network_plugin import security_group
import vcloud_plugin_common
class NetworkPluginSecurityGroupMockTestCase(test_mock_base.TestBase):
def test_get_gateway_name_from_params(self):
self.assertEqual(
security_group._get_gateway_name({
'security_group': {
'edge_gateway': 'some_edge_gateway'
}
}),
'some_edge_gateway'
)
def test_get_gateway_name_from_ctx(self):
fake_ctx = self.generate_node_context(
properties={
'vcloud_config': {
'edge_gateway': 'some_edge_gateway'
}
}
)
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
self.assertEqual(
security_group._get_gateway_name({
'security_group': {}
}),
'some_edge_gateway'
)
def generate_context_for_security_group(self):
fake_ctx = self.generate_relation_context()
fake_ctx._source.node.properties = {
'vcloud_config': {
'edge_gateway': 'some_edge_gateway',
'vdc': 'vdc_name'
}
}
return fake_ctx
def check_rule_operation(self, rule_type, rules, vms_networks=None):
if not vms_networks:
vms_networks = []
fake_client = self.generate_client(vms_networks=vms_networks)
fake_ctx = self.generate_context_for_security_group()
fake_ctx._target.node.properties = {
'rules': rules
}
# any calls for save configuration will be success
gateway = fake_client._vdc_gateway
self.set_services_conf_result(
gateway, vcloud_plugin_common.TASK_STATUS_SUCCESS
)
# for check calls for add/delete rule
gateway.add_fw_rule = mock.MagicMock(return_value=None)
gateway.delete_fw_rule = mock.MagicMock(return_value=None)
# any networks will be routed
self.set_network_routed_in_client(fake_client)
with mock.patch('network_plugin.security_group.ctx', fake_ctx):
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
security_group._rule_operation(
rule_type, fake_client
)
return gateway
def check_rule_operation_fail(self, rule_type, rules):
fake_client = self.generate_client()
fake_ctx = self.generate_context_for_security_group()
fake_ctx._target.node.properties = {
'rules': rules
}
# check busy
gateway = fake_client._vdc_gateway
self.set_gateway_busy(gateway)
self.prepare_retry(fake_ctx)
self.set_services_conf_result(
fake_client._vdc_gateway, None
)
with mock.patch('network_plugin.security_group.ctx', fake_ctx):
with mock.patch('vcloud_plugin_common.ctx', fake_ctx):
security_group._rule_operation(
rule_type, fake_client
)
self.check_retry_realy_called(fake_ctx)
def test_rule_operation_empty_rule(self):
for rule_type in [
security_group.CREATE_RULE, security_group.DELETE_RULE
]:
gateway = self.check_rule_operation(rule_type, [])
gateway.save_services_configuration.assert_called_once_with()
self.check_rule_operation_fail(rule_type, [])
self.assertFalse(gateway.add_fw_rule.called)
self.assertFalse(gateway.delete_fw_rule.called)
def test_rule_operation_default_rule(self):
for rule_type in [
security_group.CREATE_RULE, security_group.DELETE_RULE
]:
gateway = self.check_rule_operation(rule_type, [{}])
gateway.save_services_configuration.assert_called_once_with()
if rule_type == security_group.CREATE_RULE:
gateway.add_fw_rule.assert_called_once_with(
True, 'Rule added by pyvcloud', 'allow', 'Any',
'any', 'external', 'any', 'external', False
)
self.assertFalse(gateway.delete_fw_rule.called)
else:
gateway.delete_fw_rule.assert_called_once_with(
'Any', 'any', 'external', 'any', 'external'
)
self.assertFalse(gateway.add_fw_rule.called)
self.check_rule_operation_fail(rule_type, [{}])
def test_rule_operation_internal_rule(self):
for rule_type in [
security_group.CREATE_RULE, security_group.DELETE_RULE
]:
rules = [
{
'description': 'description',
'source_ip': 'internal',
'source_port': 22,
"destination": 'internal',
'destination_port': 40,
'protocol': 'tcp',
'action': 'deny',
'log_traffic': True
}
]
gateway = self.check_rule_operation(rule_type, rules)
gateway.save_services_configuration.assert_called_once_with()
if rule_type == security_group.CREATE_RULE:
gateway.add_fw_rule.assert_called_once_with(
True, 'description', 'deny', 'Tcp', '40',
'internal', '22', 'external', True
)
self.assertFalse(gateway.delete_fw_rule.called)
else:
gateway.delete_fw_rule.assert_called_once_with(
'Tcp', '40', 'internal', '22', 'external'
)
self.assertFalse(gateway.add_fw_rule.called)
self.check_rule_operation_fail(rule_type, rules)
def test_rule_operation_icmp_rule(self):
for rule_type in [
security_group.CREATE_RULE, security_group.DELETE_RULE
]:
rules = [
{
'description': 'ip',
'source': '1.2.3.4',
'source_port': 60,
"destination": '5.6.7.8',
'destination_port': 22,
'protocol': 'icmp',
'action': 'deny',
'log_traffic': True
}
]
gateway = self.check_rule_operation(rule_type, rules)
gateway.save_services_configuration.assert_called_once_with()
if rule_type == security_group.CREATE_RULE:
gateway.add_fw_rule.assert_called_once_with(
True, 'ip', 'deny', 'Icmp', '22', '5.6.7.8',
'60', '1.2.3.4', True
)
self.assertFalse(gateway.delete_fw_rule.called)
else:
gateway.delete_fw_rule.assert_called_once_with(
'Icmp', '22', '5.6.7.8', '60', '1.2.3.4'
)
self.assertFalse(gateway.add_fw_rule.called)
self.check_rule_operation_fail(rule_type, rules)
def test_rule_operation_tcp_rule(self):
for rule_type in [
security_group.CREATE_RULE, security_group.DELETE_RULE
]:
rules = [
{
'description': 'ip',
'source': '1.2.3.4',
'source_port': 60,
'destination': '5.6.7.8',
'destination_port': 22,
'protocol': 'tcp',
'action': 'deny',
'log_traffic': True
}
]
gateway = self.check_rule_operation(rule_type, rules)
gateway.save_services_configuration.assert_called_once_with()
if rule_type == security_group.CREATE_RULE:
gateway.add_fw_rule.assert_called_once_with(
True, 'ip', 'deny', 'Tcp', '22', '5.6.7.8', '60',
'1.2.3.4', True
)
self.assertFalse(gateway.delete_fw_rule.called)
else:
gateway.delete_fw_rule.assert_called_once_with(
'Tcp', '22', '5.6.7.8', '60', '1.2.3.4'
)
self.assertFalse(gateway.add_fw_rule.called)
self.check_rule_operation_fail(rule_type, rules)
def test_rule_operation_host_rule(self):
for rule_type in [
security_group.CREATE_RULE, security_group.DELETE_RULE
]:
# source
rules = [
{
'description': 'ip',
'source': 'host',
'source_port': 60,
'destination': '5.6.7.8',
'destination_port': 22,
'protocol': 'tcp',
'action': 'deny',
'log_traffic': True
}
]
gateway = self.check_rule_operation(
rule_type, rules,
[{
'is_connected': True,
'network_name': 'network_name',
'is_primary': True,
'ip': '1.1.1.1'
}]
)
gateway.save_services_configuration.assert_called_once_with()
if rule_type == security_group.CREATE_RULE:
gateway.add_fw_rule.assert_called_once_with(
True, 'ip', 'deny', 'Tcp', '22', '5.6.7.8', '60',
'1.1.1.1', True
)
self.assertFalse(gateway.delete_fw_rule.called)
else:
gateway.delete_fw_rule.assert_called_once_with(
'Tcp', '22', '5.6.7.8', '60', '1.1.1.1'
)
self.assertFalse(gateway.add_fw_rule.called)
# destination
rules = [
{
'description': 'ip',
'source': '1.2.3.4',
'source_port': 60,
'destination': 'host',
'destination_port': 22,
'protocol': 'tcp',
'action': 'deny',
'log_traffic': True
}
]
gateway = self.check_rule_operation(
rule_type, rules,
[{
'is_connected': True,
'is_primary': True,
'network_name': 'network_name',
'ip': '1.1.1.1'
}]
)
gateway.save_services_configuration.assert_called_once_with()
if rule_type == security_group.CREATE_RULE:
gateway.add_fw_rule.assert_called_once_with(
True, 'ip', 'deny', 'Tcp', '22', '1.1.1.1', '60',
'1.2.3.4', True
)
self.assertFalse(gateway.delete_fw_rule.called)
else:
gateway.delete_fw_rule.assert_called_once_with(
'Tcp', '22', '1.1.1.1', '60', '1.2.3.4'
)
self.assertFalse(gateway.add_fw_rule.called)
def test_rule_operation_error_ip_rule(self):
for rule_type in [
security_group.CREATE_RULE, security_group.DELETE_RULE
]:
rules = [
{
'description': 'ip',
'source': '300.1.3.4',
'source_port': 60,
'destination': '5.6.7.8',
'destination_port': 22,
'protocol': 'tcp',
'action': 'deny',
'log_traffic': True
}
]
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_rule_operation(rule_type, rules)
rules = [
{
'description': 'ip',
'source': '2.1.3.4',
'source_port': 60,
'destination': '5.6.7.300',
'destination_port': 22,
'protocol': 'tcp',
'action': 'deny',
'log_traffic': True
}
]
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_rule_operation(rule_type, rules)
def test_create(self):
fake_ctx = self.generate_context_for_security_group()
fake_client = self.generate_client()
# empty rules list
fake_ctx._target.node.properties = {
'rules': []
}
self.set_services_conf_result(
fake_client._vdc_gateway, vcloud_plugin_common.TASK_STATUS_SUCCESS
)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient.get',
mock.MagicMock(return_value=fake_client)
):
security_group.create(ctx=fake_ctx)
def test_delete(self):
fake_ctx = self.generate_context_for_security_group()
fake_client = self.generate_client()
# empty rules list
fake_ctx._target.node.properties = {
'rules': []
}
self.set_services_conf_result(
fake_client._vdc_gateway, vcloud_plugin_common.TASK_STATUS_SUCCESS
)
with mock.patch(
'vcloud_plugin_common.VcloudAirClient.get',
mock.MagicMock(return_value=fake_client)
):
security_group.delete(ctx=fake_ctx)
def check_creation_validation(self, rule):
fake_client = self.generate_client()
with mock.patch(
'vcloud_plugin_common.VcloudAirClient.get',
mock.MagicMock(return_value=fake_client)
):
fake_ctx = self.generate_node_context(
properties={
'vcloud_config': {
'edge_gateway': 'some_edge_gateway',
'vdc': 'vdc_name'
},
'rules': [rule]
}
)
security_group.creation_validation(ctx=fake_ctx)
def test_creation_validation(self):
fake_client = self.generate_client()
with mock.patch(
'vcloud_plugin_common.VcloudAirClient.get',
mock.MagicMock(return_value=fake_client)
):
fake_ctx = self.generate_node_context(
properties={
'vcloud_config': {
'edge_gateway': 'some_edge_gateway',
'vdc': 'vdc_name'
}
}
)
fake_client._vdc_gateway.is_fw_enabled = mock.MagicMock(
return_value=False
)
# Gateway firewall is disabled
with self.assertRaises(cfy_exc.NonRecoverableError):
security_group.creation_validation(ctx=fake_ctx)
fake_client._vdc_gateway.is_fw_enabled = mock.MagicMock(
return_value=True
)
# no rules
with self.assertRaises(cfy_exc.NonRecoverableError):
security_group.creation_validation(ctx=fake_ctx)
# wrong description
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 10
})
# wrong source
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": 11
})
with self.assertRaises(cfy_exc.NonRecoverableError):
security_group.creation_validation(ctx=fake_ctx)
# wrong ip
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.1111'
})
# wrong port
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234
})
# wrong destination
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": 123
})
# wrong destination ip
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": "123.1"
})
# wrong destination_port
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": "123.12.1.1",
'destination_port': 1111111
})
# wrong protocol
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": "123.12.1.1",
'destination_port': 1111,
"protocol": 'someone'
})
# wrong action
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": "123.12.1.1",
'destination_port': 1111,
"protocol": 'any',
"action": 'some'
})
# wrong action
with self.assertRaises(cfy_exc.NonRecoverableError):
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": "123.12.1.1",
'destination_port': 1111,
"protocol": 'any',
"action": 'allow',
'log_traffic': 'somevalue'
})
# correct
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": "123.12.1.1",
'destination_port': 1111,
"protocol": 'any',
"action": 'allow',
'log_traffic': True
})
self.check_creation_validation({
"description": 'a',
"source": '1.2.3.11',
"source_port": 1234,
"destination": "123.12.1.1",
'destination_port': 1111,
"protocol": 'any',
"action": 'allow',
'log_traffic': False
})
self.check_creation_validation({
"action": 'allow'
})
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
AllanDoensen/BitcoinUnlimited | qa/rpc-tests/signrawtransactions.py | 7 | 4690 | #!/usr/bin/env python3
# Copyright (c) 2015 The Bitcoin Core developers
# Copyright (c) 2015-2017 The Bitcoin Unlimited developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class SignRawTransactionsTest(BitcoinTestFramework):
"""Tests transaction signing via RPC command "signrawtransaction"."""
def setup_chain(self):
print('Initializing test directory ' + self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 1)
def setup_network(self, split=False):
self.nodes = start_nodes(1, self.options.tmpdir)
self.is_network_split = False
def successful_signing_test(self):
"""Creates and signs a valid raw transaction with one input.
Expected results:
1) The transaction has a complete set of signatures
2) No script verification error occurred"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']
inputs = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[0].signrawtransaction(rawTx, inputs, privKeys)
# 1) The transaction has a complete set of signatures
assert 'complete' in rawTxSigned
assert_equal(rawTxSigned['complete'], True)
# 2) No script verification error occurred
assert 'errors' not in rawTxSigned
def script_verification_error_test(self):
"""Creates and signs a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script.
Expected results:
3) The transaction has no complete set of signatures
4) Two script verification errors occurred
5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error")
6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)"""
privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N']
inputs = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7},
# Missing scriptPubKey
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1},
]
scripts = [
# Valid pay-to-pubkey script
{'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0,
'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'},
# Invalid script
{'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7,
'scriptPubKey': 'badbadbadbad'}
]
outputs = {'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB': 0.1}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
rawTxSigned = self.nodes[0].signrawtransaction(rawTx, scripts, privKeys)
# 3) The transaction has no complete set of signatures
assert 'complete' in rawTxSigned
assert_equal(rawTxSigned['complete'], False)
# 4) Two script verification errors occurred
assert 'errors' in rawTxSigned
assert_equal(len(rawTxSigned['errors']), 2)
# 5) Script verification errors have certain properties
assert 'txid' in rawTxSigned['errors'][0]
assert 'vout' in rawTxSigned['errors'][0]
assert 'scriptSig' in rawTxSigned['errors'][0]
assert 'sequence' in rawTxSigned['errors'][0]
assert 'error' in rawTxSigned['errors'][0]
# 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)
assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid'])
assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout'])
assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid'])
assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout'])
def run_test(self):
self.successful_signing_test()
self.script_verification_error_test()
if __name__ == '__main__':
SignRawTransactionsTest().main()
| mit |
jcoady9/python-for-android | python3-alpha/extra_modules/gdata/media/data.py | 132 | 3803 | #!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the data classes of the Yahoo! Media RSS Extension"""
__author__ = 'j.s@google.com (Jeff Scudder)'
import atom.core
MEDIA_TEMPLATE = '{http://search.yahoo.com/mrss//}%s'
class MediaCategory(atom.core.XmlElement):
"""Describes a media category."""
_qname = MEDIA_TEMPLATE % 'category'
scheme = 'scheme'
label = 'label'
class MediaCopyright(atom.core.XmlElement):
"""Describes a media copyright."""
_qname = MEDIA_TEMPLATE % 'copyright'
url = 'url'
class MediaCredit(atom.core.XmlElement):
"""Describes a media credit."""
_qname = MEDIA_TEMPLATE % 'credit'
role = 'role'
scheme = 'scheme'
class MediaDescription(atom.core.XmlElement):
"""Describes a media description."""
_qname = MEDIA_TEMPLATE % 'description'
type = 'type'
class MediaHash(atom.core.XmlElement):
"""Describes a media hash."""
_qname = MEDIA_TEMPLATE % 'hash'
algo = 'algo'
class MediaKeywords(atom.core.XmlElement):
"""Describes a media keywords."""
_qname = MEDIA_TEMPLATE % 'keywords'
class MediaPlayer(atom.core.XmlElement):
"""Describes a media player."""
_qname = MEDIA_TEMPLATE % 'player'
height = 'height'
width = 'width'
url = 'url'
class MediaRating(atom.core.XmlElement):
"""Describes a media rating."""
_qname = MEDIA_TEMPLATE % 'rating'
scheme = 'scheme'
class MediaRestriction(atom.core.XmlElement):
"""Describes a media restriction."""
_qname = MEDIA_TEMPLATE % 'restriction'
relationship = 'relationship'
type = 'type'
class MediaText(atom.core.XmlElement):
"""Describes a media text."""
_qname = MEDIA_TEMPLATE % 'text'
end = 'end'
lang = 'lang'
type = 'type'
start = 'start'
class MediaThumbnail(atom.core.XmlElement):
"""Describes a media thumbnail."""
_qname = MEDIA_TEMPLATE % 'thumbnail'
time = 'time'
url = 'url'
width = 'width'
height = 'height'
class MediaTitle(atom.core.XmlElement):
"""Describes a media title."""
_qname = MEDIA_TEMPLATE % 'title'
type = 'type'
class MediaContent(atom.core.XmlElement):
"""Describes a media content."""
_qname = MEDIA_TEMPLATE % 'content'
bitrate = 'bitrate'
is_default = 'isDefault'
medium = 'medium'
height = 'height'
credit = [MediaCredit]
language = 'language'
hash = MediaHash
width = 'width'
player = MediaPlayer
url = 'url'
file_size = 'fileSize'
channels = 'channels'
expression = 'expression'
text = [MediaText]
samplingrate = 'samplingrate'
title = MediaTitle
category = [MediaCategory]
rating = [MediaRating]
type = 'type'
description = MediaDescription
framerate = 'framerate'
thumbnail = [MediaThumbnail]
duration = 'duration'
copyright = MediaCopyright
keywords = MediaKeywords
restriction = [MediaRestriction]
class MediaGroup(atom.core.XmlElement):
"""Describes a media group."""
_qname = MEDIA_TEMPLATE % 'group'
credit = [MediaCredit]
content = [MediaContent]
copyright = MediaCopyright
description = MediaDescription
category = [MediaCategory]
player = MediaPlayer
rating = [MediaRating]
hash = MediaHash
title = MediaTitle
keywords = MediaKeywords
restriction = [MediaRestriction]
thumbnail = [MediaThumbnail]
text = [MediaText]
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.