repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
ndardenne/pymatgen | pymatgen/analysis/elasticity/tests/test_stress.py | 2 | 2818 | from __future__ import absolute_import
import unittest2 as unittest
import numpy as np
import warnings
from pymatgen.analysis.elasticity.stress import Stress
from pymatgen.analysis.elasticity.strain import Deformation
from pymatgen.util.testing import PymatgenTest
class StressTest(PymatgenTest):
def setUp(self):
self.rand_stress = Stress(np.random.randn(3, 3))
self.symm_stress = Stress([[0.51, 2.29, 2.42],
[2.29, 5.14, 5.07],
[2.42, 5.07, 5.33]])
self.non_symm = Stress([[0.1, 0.2, 0.3],
[0.4, 0.5, 0.6],
[0.2, 0.5, 0.5]])
def test_properties(self):
# mean_stress
self.assertEqual(self.rand_stress.mean_stress,
1. / 3. * (self.rand_stress[0, 0] +
self.rand_stress[1, 1] +
self.rand_stress[2, 2]))
self.assertAlmostEqual(self.symm_stress.mean_stress, 3.66)
# deviator_stress
self.assertArrayAlmostEqual(self.symm_stress.deviator_stress,
Stress([[-3.15, 2.29, 2.42],
[2.29, 1.48, 5.07],
[2.42, 5.07, 1.67]]))
self.assertArrayAlmostEqual(self.non_symm.deviator_stress,
[[-0.2666666667, 0.2, 0.3],
[0.4, 0.133333333, 0.6],
[0.2, 0.5, 0.133333333]])
# deviator_principal_invariants
self.assertArrayAlmostEqual(self.symm_stress.dev_principal_invariants,
[0, 44.2563, 111.953628])
# von_mises
self.assertAlmostEqual(self.symm_stress.von_mises,
11.52253878275)
# piola_kirchoff 1, 2
f = Deformation.from_index_amount((0, 1), 0.03)
self.assertArrayAlmostEqual(self.symm_stress.piola_kirchoff_1(f),
[[0.4413, 2.29, 2.42],
[2.1358, 5.14, 5.07],
[2.2679, 5.07, 5.33]])
self.assertArrayAlmostEqual(self.symm_stress.piola_kirchoff_2(f),
[[0.377226, 2.1358, 2.2679],
[2.1358, 5.14, 5.07],
[2.2679, 5.07, 5.33]])
# voigt
self.assertArrayEqual(self.symm_stress.voigt,
[0.51, 5.14, 5.33, 5.07, 2.42, 2.29])
with warnings.catch_warnings(record=True) as w:
self.non_symm.voigt
self.assertEqual(len(w), 1)
if __name__ == '__main__':
unittest.main()
| mit |
groovecoder/kuma | kuma/wiki/views/legacy.py | 10 | 4321 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.http import Http404
from django.shortcuts import redirect
from ..models import Document, Revision
# Legacy MindTouch redirects.
MINDTOUCH_NAMESPACES = (
'Help',
'Help_talk',
'Project',
'Project_talk',
'Special',
'Talk',
'Template',
'Template_talk',
'User',
)
MINDTOUCH_PROBLEM_LOCALES = {
'cn': 'zh-CN',
'en': 'en-US',
'zh_cn': 'zh-CN',
'zh_tw': 'zh-TW',
}
def mindtouch_namespace_redirect(request, namespace, slug):
"""
For URLs in special namespaces (like Talk:, User:, etc.), redirect
if possible to the appropriate new URL in the appropriate
locale. If the locale cannot be correctly determined, fall back to
en-US.
"""
new_locale = new_slug = None
if namespace in ('Talk', 'Project', 'Project_talk'):
# These namespaces carry the old locale in their URL, which
# simplifies figuring out where to send them.
locale, _, doc_slug = slug.partition('/')
new_locale = settings.MT_TO_KUMA_LOCALE_MAP.get(locale, 'en-US')
new_slug = '%s:%s' % (namespace, doc_slug)
elif namespace == 'User':
# For users, we look up the latest revision and get the locale
# from there.
new_slug = '%s:%s' % (namespace, slug)
try:
rev = (Revision.objects.filter(document__slug=new_slug)
.latest('created'))
new_locale = rev.document.locale
except Revision.DoesNotExist:
# If that doesn't work, bail out to en-US.
new_locale = 'en-US'
else:
# Templates, etc. don't actually have a locale, so we give
# them the default.
new_locale = 'en-US'
new_slug = '%s:%s' % (namespace, slug)
if new_locale:
new_url = '/%s/docs/%s' % (request.locale, new_slug)
return redirect(new_url, permanent=True)
def mindtouch_to_kuma_redirect(request, path):
"""
Given a request to a Mindtouch-generated URL, generate a redirect
to the correct corresponding kuma URL.
"""
new_locale = None
if path.startswith('Template:MindTouch'):
# MindTouch's default templates. There shouldn't be links to
# them anywhere in the wild, but just in case we 404 them.
raise Http404
if path.endswith('/'):
# If there's a trailing slash, snip it off.
path = path[:-1]
if ':' in path:
namespace, _, slug = path.partition(':')
# The namespaces (Talk:, User:, etc.) get their own
# special-case handling.
if namespace in MINDTOUCH_NAMESPACES:
return mindtouch_namespace_redirect(request, namespace, slug)
if '/' in path:
maybe_locale, _, slug = path.partition('/')
# There are three problematic locales that MindTouch had which
# can still be in the path we see after the locale
# middleware's done its bit. Since those are easy, we check
# them first.
if maybe_locale in MINDTOUCH_PROBLEM_LOCALES:
new_locale = MINDTOUCH_PROBLEM_LOCALES[maybe_locale]
# We do not preserve UI locale here -- these locales won't
# be picked up correctly by the locale middleware, and
# anyone trying to view the document in its locale with
# their own UI locale will have the correct starting URL
# anyway.
new_url = '/%s/docs/%s' % (new_locale, slug)
if 'view' in request.GET:
new_url = '%s$%s' % (new_url, request.GET['view'])
return redirect(new_url, permanent=True)
# Next we try looking up a Document with the possible locale
# we've pulled out.
try:
doc = Document.objects.get(slug=slug, locale=maybe_locale)
except Document.DoesNotExist:
pass
# Last attempt: we try the request locale as the document locale,
# and see if that matches something.
try:
doc = Document.objects.get(slug=path, locale=request.locale)
except Document.DoesNotExist:
raise Http404
location = doc.get_absolute_url()
if 'view' in request.GET:
location = '%s$%s' % (location, request.GET['view'])
return redirect(location, permanent=True)
| mpl-2.0 |
nelmiux/CarnotKE | jyhton/Lib/test/test_fileio.py | 10 | 14566 | # Adapted from test_file.py by Daniel Stutzbach
from __future__ import unicode_literals
import sys
import os
import errno
import unittest
from array import array
from weakref import proxy
from functools import wraps
from test.test_support import (TESTFN, check_warnings, run_unittest,
make_bad_fd, is_jython, gc_collect)
from test.test_support import py3k_bytes as bytes
from test.script_helper import run_python
from _io import FileIO as _FileIO
"""
XXX: ignoring ValueError on Jython for now as the ValueError/IOError thing is
too mixed up right now. Needs investigation especially in Jython3 -- we
should get this cleaned up if possible.
"""
class AutoFileTests(unittest.TestCase):
# file tests for which a test file is automatically set up
def setUp(self):
self.f = _FileIO(TESTFN, 'w')
def tearDown(self):
if self.f:
self.f.close()
os.remove(TESTFN)
def testWeakRefs(self):
# verify weak references
p = proxy(self.f)
p.write(bytes(range(10)))
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
gc_collect()
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testSeekTell(self):
self.f.write(bytes(range(20)))
self.assertEqual(self.f.tell(), 20)
self.f.seek(0)
self.assertEqual(self.f.tell(), 0)
self.f.seek(10)
self.assertEqual(self.f.tell(), 10)
self.f.seek(5, 1)
self.assertEqual(self.f.tell(), 15)
self.f.seek(-5, 1)
self.assertEqual(self.f.tell(), 10)
self.f.seek(-5, 2)
self.assertEqual(self.f.tell(), 15)
def testAttributes(self):
# verify expected attributes exist
f = self.f
self.assertEqual(f.mode, "wb")
self.assertEqual(f.closed, False)
# verify the attributes are readonly
for attr in 'mode', 'closed':
self.assertRaises((AttributeError, TypeError),
setattr, f, attr, 'oops')
def testReadinto(self):
# verify readinto
self.f.write(b"\x01\x02")
self.f.close()
a = array(b'b', b'x'*10)
self.f = _FileIO(TESTFN, 'r')
n = self.f.readinto(a)
self.assertEqual(array(b'b', [1, 2]), a[:n])
def test_none_args(self):
self.f.write(b"hi\nbye\nabc")
self.f.close()
self.f = _FileIO(TESTFN, 'r')
self.assertEqual(self.f.read(None), b"hi\nbye\nabc")
self.f.seek(0)
self.assertEqual(self.f.readline(None), b"hi\n")
self.assertEqual(self.f.readlines(None), [b"bye\n", b"abc"])
def testRepr(self):
self.assertEqual(repr(self.f), "<_io.FileIO name=%r mode='%s'>"
% (self.f.name, self.f.mode))
del self.f.name
self.assertEqual(repr(self.f), "<_io.FileIO fd=%r mode='%s'>"
% (self.f.fileno(), self.f.mode))
self.f.close()
self.assertEqual(repr(self.f), "<_io.FileIO [closed]>")
def testErrors(self):
f = self.f
self.assertTrue(not f.isatty())
self.assertTrue(not f.closed)
#self.assertEqual(f.name, TESTFN)
self.assertRaises(ValueError, f.read, 10) # Open for writing
f.close()
self.assertTrue(f.closed)
f = self.f = _FileIO(TESTFN, 'r')
self.assertRaises(TypeError, f.readinto, "")
self.assertTrue(not f.closed)
f.close()
self.assertTrue(f.closed)
# These methods all accept a call with 0 arguments
methods = ['fileno', 'isatty', 'read',
'tell', 'truncate', 'seekable',
'readable', 'writable']
if sys.platform.startswith('atheos'):
methods.remove('truncate')
self.f.close()
self.assertTrue(self.f.closed)
for methodname in methods:
method = getattr(self.f, methodname)
# should raise on closed file
self.assertRaises(ValueError, method)
# These other methods should be tested using a specific call
# in case the test for number of arguments comes first.
b = bytearray()
self.assertRaises(ValueError, self.f.readinto, b )
self.assertRaises(ValueError, self.f.seek, 0)
self.assertRaises(ValueError, self.f.write, b )
def testOpendir(self):
# Issue 3703: opening a directory should fill the errno
# Windows always returns "[Errno 13]: Permission denied
# Unix calls dircheck() and returns "[Errno 21]: Is a directory"
try:
_FileIO('.', 'r')
except IOError as e:
self.assertNotEqual(e.errno, 0)
self.assertEqual(e.filename, ".")
else:
self.fail("Should have raised IOError")
#A set of functions testing that we get expected behaviour if someone has
#manually closed the internal file descriptor. First, a decorator:
def ClosedFD(func):
@wraps(func)
def wrapper(self):
#forcibly close the fd before invoking the problem function
f = self.f
os.close(f.fileno())
try:
func(self, f)
except ValueError:
if not is_jython:
self.fail("ValueError only on Jython")
finally:
try:
self.f.close()
except IOError:
pass
except ValueError:
if not is_jython:
self.fail("ValueError only on Jython")
return wrapper
def ClosedFDRaises(func):
@wraps(func)
def wrapper(self):
#forcibly close the fd before invoking the problem function
f = self.f
os.close(f.fileno())
try:
func(self, f)
except IOError as e:
self.assertEqual(e.errno, errno.EBADF)
except ValueError as e:
if not is_jython:
self.fail("ValueError only on Jython")
else:
self.fail("Should have raised IOError")
finally:
try:
self.f.close()
except IOError:
pass
except ValueError:
if not is_jython:
self.fail("ValueError only on Jython")
return wrapper
@ClosedFDRaises
def testErrnoOnClose(self, f):
f.close()
@ClosedFDRaises
def testErrnoOnClosedWrite(self, f):
f.write('a')
@ClosedFDRaises
def testErrnoOnClosedSeek(self, f):
f.seek(0)
@ClosedFDRaises
def testErrnoOnClosedTell(self, f):
f.tell()
@ClosedFDRaises
def testErrnoOnClosedTruncate(self, f):
f.truncate(0)
@ClosedFD
def testErrnoOnClosedSeekable(self, f):
f.seekable()
@ClosedFD
def testErrnoOnClosedReadable(self, f):
f.readable()
@ClosedFD
def testErrnoOnClosedWritable(self, f):
f.writable()
@ClosedFD
def testErrnoOnClosedFileno(self, f):
f.fileno()
@ClosedFD
def testErrnoOnClosedIsatty(self, f):
self.assertEqual(f.isatty(), False)
def ReopenForRead(self):
try:
self.f.close()
except IOError:
pass
self.f = _FileIO(TESTFN, 'r')
os.close(self.f.fileno())
return self.f
@ClosedFDRaises
def testErrnoOnClosedRead(self, f):
f = self.ReopenForRead()
f.read(1)
@ClosedFDRaises
def testErrnoOnClosedReadall(self, f):
f = self.ReopenForRead()
f.readall()
@ClosedFDRaises
def testErrnoOnClosedReadinto(self, f):
f = self.ReopenForRead()
a = array(b'b', b'x'*10)
f.readinto(a)
class OtherFileTests(unittest.TestCase):
# file tests for which a test file is not created but cleaned up
# This introduced by Jython, to prevent the cascade of errors when
# a test exits leaving an open file. Also a CPython problem.
def setUp(self):
self.f = None
def tearDown(self):
if self.f:
self.f.close()
if os.path.exists(TESTFN):
os.remove(TESTFN)
def testAbles(self):
f = self.f = _FileIO(TESTFN, "w")
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
f.close()
f = self.f = _FileIO(TESTFN, "r")
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), False)
self.assertEqual(f.seekable(), True)
f.close()
f = self.f = _FileIO(TESTFN, "a+")
self.assertEqual(f.readable(), True)
self.assertEqual(f.writable(), True)
self.assertEqual(f.seekable(), True)
self.assertEqual(f.isatty(), False)
f.close()
# Jython specific issues:
# On OSX, FileIO("/dev/tty", "w").isatty() is False
# On Ubuntu, FileIO("/dev/tty", "w").isatty() throws IOError: Illegal seek
#
# Much like we see on other platforms, we cannot reliably
# determine it is not seekable (or special).
#
# Related bug: http://bugs.jython.org/issue1945
if sys.platform != "win32" and not is_jython:
try:
f = self.f = _FileIO("/dev/tty", "a")
except EnvironmentError:
# When run in a cron job there just aren't any
# ttys, so skip the test. This also handles other
# OS'es that don't support /dev/tty.
pass
else:
self.assertEqual(f.readable(), False)
self.assertEqual(f.writable(), True)
if sys.platform != "darwin" and \
'bsd' not in sys.platform and \
not sys.platform.startswith('sunos'):
# Somehow /dev/tty appears seekable on some BSDs
self.assertEqual(f.seekable(), False)
self.assertEqual(f.isatty(), True)
def testModeStrings(self):
# check invalid mode strings
for mode in ("", "aU", "wU+", "rw", "rt"):
try:
f = self.f = _FileIO(TESTFN, mode)
except ValueError:
pass
else:
self.fail('%r is an invalid file mode' % mode)
def testUnicodeOpen(self):
# verify repr works for unicode too
f = self.f = _FileIO(str(TESTFN), "w")
def testBytesOpen(self):
# Opening a bytes filename
try:
fn = TESTFN.encode("ascii")
except UnicodeEncodeError:
# Skip test
return
f = self.f = _FileIO(fn, "w")
f.write(b"abc")
f.close()
with open(TESTFN, "rb") as f:
self.f = f
self.assertEqual(f.read(), b"abc")
def testInvalidFd(self):
if is_jython:
self.assertRaises(TypeError, _FileIO, -10) # file descriptor not int in Jython
else:
self.assertRaises(ValueError, _FileIO, -10)
self.assertRaises(OSError, _FileIO, make_bad_fd())
if sys.platform == 'win32':
import msvcrt
self.assertRaises(IOError, msvcrt.get_osfhandle, make_bad_fd())
def testBadModeArgument(self):
# verify that we get a sensible error message for bad mode argument
bad_mode = "qwerty"
try:
f = self.f = _FileIO(TESTFN, bad_mode)
except ValueError as msg:
if msg.args[0] != 0:
s = str(msg)
if TESTFN in s or bad_mode not in s:
self.fail("bad error message for invalid mode: %s" % s)
# if msg.args[0] == 0, we're probably on Windows where there may be
# no obvious way to discover why open() failed.
else:
f.close()
self.fail("no error for invalid mode: %s" % bad_mode)
def testTruncate(self):
f = self.f = _FileIO(TESTFN, 'w')
f.write(bytes(bytearray(range(10))))
self.assertEqual(f.tell(), 10)
f.truncate(5)
self.assertEqual(f.tell(), 10)
self.assertEqual(f.seek(0, os.SEEK_END), 5)
f.truncate(15)
self.assertEqual(f.tell(), 5)
#XXX: next assert not working in Jython:
#self.assertEqual(f.seek(0, os.SEEK_END), 15)
f.close()
def testTruncateOnWindows(self):
def bug801631():
# SF bug <http://www.python.org/sf/801631>
# "file.truncate fault on windows"
f = self.f = _FileIO(TESTFN, 'w')
f.write(bytes(range(11)))
f.close()
f = self.f = _FileIO(TESTFN,'r+')
data = f.read(5)
if data != bytes(range(5)):
self.fail("Read on file opened for update failed %r" % data)
if f.tell() != 5:
self.fail("File pos after read wrong %d" % f.tell())
f.truncate()
if f.tell() != 5:
self.fail("File pos after ftruncate wrong %d" % f.tell())
f.close()
size = os.path.getsize(TESTFN)
if size != 5:
self.fail("File size after ftruncate wrong %d" % size)
# Test for bug 801631
bug801631()
def testAppend(self):
f = self.f = open(TESTFN, 'wb')
f.write(b'spam')
f.close()
f = self.f = open(TESTFN, 'ab')
f.write(b'eggs')
f.close()
f = self.f = open(TESTFN, 'rb')
d = f.read()
f.close()
self.assertEqual(d, b'spameggs')
def testInvalidInit(self):
self.assertRaises(TypeError, _FileIO, "1", 0, 0)
def testWarnings(self):
with check_warnings(quiet=True) as w:
self.assertEqual(w.warnings, [])
self.assertRaises(TypeError, _FileIO, [])
self.assertEqual(w.warnings, [])
self.assertRaises(ValueError, _FileIO, "/some/invalid/name", "rt")
self.assertEqual(w.warnings, [])
def test_main():
# Historically, these tests have been sloppy about removing TESTFN.
# So get rid of it no matter what.
try:
run_unittest(AutoFileTests, OtherFileTests)
finally:
if os.path.exists(TESTFN):
os.unlink(TESTFN)
if __name__ == '__main__':
test_main()
| apache-2.0 |
pcm17/tensorflow | tensorflow/python/ops/script_ops.py | 15 | 7020 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script Language Operators. See the @{python/script_ops} guide.
@@py_func
"""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
import numpy as np
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_script_ops
class FuncRegistry(object):
"""A helper class to keep track of registered py functions.
FuncRegistry keeps a map from unique tokens (string) to python
functions, which takes numpy arrays and outputs numpy arrays.
"""
def __init__(self):
self._lock = threading.Lock()
self._unique_id = 0 # GUARDED_BY(self._lock)
self._funcs = {}
def insert(self, func):
"""Registers `func` and returns a unique token for this entry."""
token = self._next_unique_token()
self._funcs[token] = func
return token
def remove(self, token):
"""Removes the registered function corresponding to `token`."""
self._funcs.pop(token, None)
@staticmethod
def _convert(value):
"""Converts an arg to numpy, avoiding dangerous string and unicode dtypes.
Numpy pads with zeros when using string and unicode dtypes if different
components of a tensor have different lengths. This is bad: ignoring the
padding is wrong for text data, and removing the padding is wrong for binary
data. To avoid this bug, we redo the conversion using an object dtype.
Args:
value: Value to convert to a numpy array.
Returns:
A numpy array.
"""
result = np.asarray(value, order="C")
if result.dtype.char in "SU" and result is not value:
return np.asarray(value, order="C", dtype=object)
return result
def __call__(self, token, args):
"""Calls the registered function for `token` with args."""
func = self._funcs[token]
if func is None:
raise ValueError("callback %s is not found" % token)
ret = func(*args)
# Ensures that we return either a single numpy array or a list of numpy
# arrays.
if isinstance(ret, (tuple, list)):
return [self._convert(x) for x in ret]
else:
return self._convert(ret)
def size(self):
"""Returns how many functions are currently registered."""
return len(self._funcs)
def _next_unique_token(self):
"""Returns a unique token."""
with self._lock:
uid = self._unique_id
self._unique_id += 1
return "pyfunc_%d" % uid
# Global registry for py functions.
_py_funcs = FuncRegistry()
pywrap_tensorflow.InitializePyTrampoline(_py_funcs)
class CleanupFunc(object):
"""A helper class to remove a registered function from _py_funcs."""
def __init__(self, token):
self._token = token
def __del__(self):
_py_funcs.remove(self._token)
def py_func(func, inp, Tout, stateful=True, name=None):
"""Wraps a python function and uses it as a TensorFlow op.
Given a python function `func`, which takes numpy arrays as its
inputs and returns numpy arrays as its outputs, wrap this function as an
operation in a TensorFlow graph. The following snippet constructs a simple
TensorFlow graph that invokes the `np.sinh()` NumPy function as a operation
in the graph:
```python
def my_func(x):
# x will be a numpy array with the contents of the placeholder below
return np.sinh(x)
inp = tf.placeholder(tf.float32)
y = tf.py_func(my_func, [inp], tf.float32)
```
**N.B.** The `tf.py_func()` operation has the following known limitations:
* The body of the function (i.e. `func`) will not be serialized in a
`GraphDef`. Therefore, you should not use this function if you need to
serialize your model and restore it in a different environment.
* The operation must run in the same address space as the Python program
that calls `tf.py_func()`. If you are using distributed TensorFlow, you
must run a `tf.train.Server` in the same process as the program that calls
`tf.py_func()` and you must pin the created operation to a device in that
server (e.g. using `with tf.device():`).
Args:
func: A Python function, which accepts a list of NumPy `ndarray` objects
having element types that match the corresponding `tf.Tensor` objects
in `inp`, and returns a list of `ndarray` objects (or a single `ndarray`)
having element types that match the corresponding values in `Tout`.
inp: A list of `Tensor` objects.
Tout: A list or tuple of tensorflow data types or a single tensorflow data
type if there is only one, indicating what `func` returns.
stateful: (Boolean.) If True, the function should be considered stateful.
If a function is stateless, when given the same input it will return the
same output and have no observable side effects. Optimizations such as
common subexpression elimination are only performed on stateless
operations.
name: A name for the operation (optional).
Returns:
A list of `Tensor` or a single `Tensor` which `func` computes.
"""
token = _py_funcs.insert(func)
# We tie the registered function's life-time with the current
# default graph. I.e., when the current graph is destroyed, we
# should remove its py funcs.
cleanup = CleanupFunc(token)
g = ops.get_default_graph()
# pylint: disable=protected-access
#
# TODO(zhifengc): Consider adding a Graph method to collect
# `cleanup` objects in one of its member.
if not hasattr(g, "_cleanup_py_funcs_used_in_graph"):
g._cleanup_py_funcs_used_in_graph = []
# When g is destroyed, elements in _cleanup_py_funcs_used_in_graph
# will be destroyed and their __del__ will remove the 'token' from
# the funcs registry.
g._cleanup_py_funcs_used_in_graph.append(cleanup)
if isinstance(Tout, (list, tuple)):
is_list_or_tuple = True
else:
Tout = [Tout]
is_list_or_tuple = False
if stateful:
result = gen_script_ops._py_func(
input=inp, token=token, Tout=Tout, name=name)
# pylint: enable=protected-access
else:
result = gen_script_ops._py_func_stateless(
input=inp, token=token, Tout=Tout, name=name)
# pylint: enable=protected-access
return result if is_list_or_tuple else result[0]
ops.NotDifferentiable("PyFunc")
ops.NotDifferentiable("PyFuncStateless")
| apache-2.0 |
RealTimeWeb/wikisite | MoinMoin/auth/mysql_group.py | 1 | 2750 | # -*- coding: iso-8859-1 -*-
"""
MoinMoin - auth plugin doing a check against MySQL group db
@copyright: 2006 Nick Phillips,
2007 MoinMoin:JohannesBerg,
2008 MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
import MySQLdb
from MoinMoin import log
logging = log.getLogger(__name__)
from MoinMoin.auth import BaseAuth, CancelLogin, ContinueLogin
class MysqlGroupAuth(BaseAuth):
""" Authorize via MySQL group DB.
We require an already-authenticated user_obj and
check that the user is part of an authorized group.
"""
def __init__(self, host, user, passwd, dbname, query):
BaseAuth.__init__(self)
self.mysql_group_query = query
self.host = host
self.user = user
self.passwd = passwd
self.dbname = dbname
def login(self, request, user_obj, **kw):
_ = request.getText
logging.debug("got: user_obj=%r" % user_obj)
if not (user_obj and user_obj.valid):
# No other method succeeded, so we cannot authorize
# but maybe some following auth methods can still "fix" that.
logging.debug("did not get valid user from previous auth method")
return ContinueLogin(user_obj)
# Got a valid user object - we can do stuff!
logging.debug("got valid user (name=%r) from previous auth method" % user_obj.auth_username)
# XXX Check auth_username for dodgy chars (should be none as it is authenticated, but...)
# shouldn't really be necessary since execute() quotes them all...
# OK, now check mysql!
try:
m = MySQLdb.connect(host=self.host, user=self.user,
passwd=self.passwd, db=self.dbname)
except:
logging.exception("authorization failed due to exception when connecting to DB, traceback follows...")
return CancelLogin(_('Failed to connect to database.'))
c = m.cursor()
c.execute(self.mysql_group_query, user_obj.auth_username)
results = c.fetchall()
if results:
# Checked out OK
logging.debug("got %d results -- authorized!" % len(results))
return ContinueLogin(user_obj)
else:
logging.debug("did not get match from DB -- not authorized")
return CancelLogin(_("Invalid username or password."))
# XXX do we really want this? could it be enough to check when they log in?
# of course then when you change the DB people who are logged in can still do stuff...
def request(self, request, user_obj, **kw):
retval = self.login(request, user_obj, **kw)
return retval.user_obj, retval.continue_flag
| apache-2.0 |
spark0001/spark2.1.1 | examples/src/main/python/ml/quantile_discretizer_example.py | 123 | 1707 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import QuantileDiscretizer
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("QuantileDiscretizerExample")\
.getOrCreate()
# $example on$
data = [(0, 18.0), (1, 19.0), (2, 8.0), (3, 5.0), (4, 2.2)]
df = spark.createDataFrame(data, ["id", "hour"])
# $example off$
# Output of QuantileDiscretizer for such small datasets can depend on the number of
# partitions. Here we force a single partition to ensure consistent results.
# Note this is not necessary for normal use cases
df = df.repartition(1)
# $example on$
discretizer = QuantileDiscretizer(numBuckets=3, inputCol="hour", outputCol="result")
result = discretizer.fit(df).transform(df)
result.show()
# $example off$
spark.stop()
| apache-2.0 |
sodexis/odoo | addons/account/wizard/account_report_partner_balance.py | 364 | 2199 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_partner_balance(osv.osv_memory):
"""
This wizard will provide the partner balance report by periods, between any two dates.
"""
_inherit = 'account.common.partner.report'
_name = 'account.partner.balance'
_description = 'Print Account Partner Balance'
_columns = {
'display_partner': fields.selection([('non-zero_balance', 'With balance is not equal to 0'), ('all', 'All Partners')]
,'Display Partners'),
'journal_ids': fields.many2many('account.journal', 'account_partner_balance_journal_rel', 'account_id', 'journal_id', 'Journals', required=True),
}
_defaults = {
'display_partner': 'non-zero_balance',
}
def _print_report(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
data = self.pre_print_report(cr, uid, ids, data, context=context)
data['form'].update(self.read(cr, uid, ids, ['display_partner'])[0])
return self.pool['report'].get_action(cr, uid, [], 'account.report_partnerbalance', data=data, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mmnelemane/neutron | neutron/service.py | 5 | 11342 | # Copyright 2011 VMware, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import os
import random
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_messaging import server as rpc_server
from oslo_service import loopingcall
from oslo_service import service as common_service
from oslo_utils import excutils
from oslo_utils import importutils
from neutron.common import config
from neutron.common import rpc as n_rpc
from neutron import context
from neutron.db import api as session
from neutron.i18n import _LE, _LI
from neutron import manager
from neutron import wsgi
service_opts = [
cfg.IntOpt('periodic_interval',
default=40,
help=_('Seconds between running periodic tasks')),
cfg.IntOpt('api_workers',
help=_('Number of separate API worker processes for service. '
'If not specified, the default is equal to the number '
'of CPUs available for best performance.')),
cfg.IntOpt('rpc_workers',
default=0,
help=_('Number of RPC worker processes for service')),
cfg.IntOpt('periodic_fuzzy_delay',
default=5,
help=_('Range of seconds to randomly delay when starting the '
'periodic task scheduler to reduce stampeding. '
'(Disable by setting to 0)')),
]
CONF = cfg.CONF
CONF.register_opts(service_opts)
LOG = logging.getLogger(__name__)
class WsgiService(object):
"""Base class for WSGI based services.
For each api you define, you must also define these flags:
:<api>_listen: The address on which to listen
:<api>_listen_port: The port on which to listen
"""
def __init__(self, app_name):
self.app_name = app_name
self.wsgi_app = None
def start(self):
self.wsgi_app = _run_wsgi(self.app_name)
def wait(self):
self.wsgi_app.wait()
class NeutronApiService(WsgiService):
"""Class for neutron-api service."""
@classmethod
def create(cls, app_name='neutron'):
# Setup logging early, supplying both the CLI options and the
# configuration mapping from the config file
# We only update the conf dict for the verbose and debug
# flags. Everything else must be set up in the conf file...
# Log the options used when starting if we're in debug mode...
config.setup_logging()
service = cls(app_name)
return service
def serve_wsgi(cls):
try:
service = cls.create()
service.start()
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Unrecoverable error: please check log '
'for details.'))
return service
class RpcWorker(common_service.ServiceBase):
"""Wraps a worker to be handled by ProcessLauncher"""
def __init__(self, plugin):
self._plugin = plugin
self._servers = []
def start(self):
self._servers = self._plugin.start_rpc_listeners()
def wait(self):
try:
self._wait()
except Exception:
LOG.exception(_LE('done with wait'))
raise
def _wait(self):
LOG.debug('calling RpcWorker wait()')
for server in self._servers:
if isinstance(server, rpc_server.MessageHandlingServer):
LOG.debug('calling wait on %s', server)
server.wait()
else:
LOG.debug('NOT calling wait on %s', server)
LOG.debug('returning from RpcWorker wait()')
def stop(self):
LOG.debug('calling RpcWorker stop()')
for server in self._servers:
if isinstance(server, rpc_server.MessageHandlingServer):
LOG.debug('calling stop on %s', server)
server.stop()
@staticmethod
def reset():
config.reset_service()
def serve_rpc():
plugin = manager.NeutronManager.get_plugin()
# If 0 < rpc_workers then start_rpc_listeners would be called in a
# subprocess and we cannot simply catch the NotImplementedError. It is
# simpler to check this up front by testing whether the plugin supports
# multiple RPC workers.
if not plugin.rpc_workers_supported():
LOG.debug("Active plugin doesn't implement start_rpc_listeners")
if 0 < cfg.CONF.rpc_workers:
LOG.error(_LE("'rpc_workers = %d' ignored because "
"start_rpc_listeners is not implemented."),
cfg.CONF.rpc_workers)
raise NotImplementedError()
try:
rpc = RpcWorker(plugin)
if cfg.CONF.rpc_workers < 1:
LOG.debug('starting rpc directly, workers=%s',
cfg.CONF.rpc_workers)
rpc.start()
return rpc
else:
# dispose the whole pool before os.fork, otherwise there will
# be shared DB connections in child processes which may cause
# DB errors.
LOG.debug('using launcher for rpc, workers=%s',
cfg.CONF.rpc_workers)
session.dispose()
launcher = common_service.ProcessLauncher(cfg.CONF,
wait_interval=1.0)
launcher.launch_service(rpc, workers=cfg.CONF.rpc_workers)
return launcher
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Unrecoverable error: please check log for '
'details.'))
def _get_api_workers():
workers = cfg.CONF.api_workers
if workers is None:
workers = processutils.get_worker_count()
return workers
def _run_wsgi(app_name):
app = config.load_paste_app(app_name)
if not app:
LOG.error(_LE('No known API applications configured.'))
return
server = wsgi.Server("Neutron")
server.start(app, cfg.CONF.bind_port, cfg.CONF.bind_host,
workers=_get_api_workers())
LOG.info(_LI("Neutron service started, listening on %(host)s:%(port)s"),
{'host': cfg.CONF.bind_host, 'port': cfg.CONF.bind_port})
return server
class Service(n_rpc.Service):
"""Service object for binaries running on hosts.
A service takes a manager and enables rpc by listening to queues based
on topic. It also periodically runs tasks on the manager.
"""
def __init__(self, host, binary, topic, manager, report_interval=None,
periodic_interval=None, periodic_fuzzy_delay=None,
*args, **kwargs):
self.binary = binary
self.manager_class_name = manager
manager_class = importutils.import_class(self.manager_class_name)
self.manager = manager_class(host=host, *args, **kwargs)
self.report_interval = report_interval
self.periodic_interval = periodic_interval
self.periodic_fuzzy_delay = periodic_fuzzy_delay
self.saved_args, self.saved_kwargs = args, kwargs
self.timers = []
super(Service, self).__init__(host, topic, manager=self.manager)
def start(self):
self.manager.init_host()
super(Service, self).start()
if self.report_interval:
pulse = loopingcall.FixedIntervalLoopingCall(self.report_state)
pulse.start(interval=self.report_interval,
initial_delay=self.report_interval)
self.timers.append(pulse)
if self.periodic_interval:
if self.periodic_fuzzy_delay:
initial_delay = random.randint(0, self.periodic_fuzzy_delay)
else:
initial_delay = None
periodic = loopingcall.FixedIntervalLoopingCall(
self.periodic_tasks)
periodic.start(interval=self.periodic_interval,
initial_delay=initial_delay)
self.timers.append(periodic)
self.manager.after_start()
def __getattr__(self, key):
manager = self.__dict__.get('manager', None)
return getattr(manager, key)
@classmethod
def create(cls, host=None, binary=None, topic=None, manager=None,
report_interval=None, periodic_interval=None,
periodic_fuzzy_delay=None):
"""Instantiates class and passes back application object.
:param host: defaults to CONF.host
:param binary: defaults to basename of executable
:param topic: defaults to bin_name - 'neutron-' part
:param manager: defaults to CONF.<topic>_manager
:param report_interval: defaults to CONF.report_interval
:param periodic_interval: defaults to CONF.periodic_interval
:param periodic_fuzzy_delay: defaults to CONF.periodic_fuzzy_delay
"""
if not host:
host = CONF.host
if not binary:
binary = os.path.basename(inspect.stack()[-1][1])
if not topic:
topic = binary.rpartition('neutron-')[2]
topic = topic.replace("-", "_")
if not manager:
manager = CONF.get('%s_manager' % topic, None)
if report_interval is None:
report_interval = CONF.report_interval
if periodic_interval is None:
periodic_interval = CONF.periodic_interval
if periodic_fuzzy_delay is None:
periodic_fuzzy_delay = CONF.periodic_fuzzy_delay
service_obj = cls(host, binary, topic, manager,
report_interval=report_interval,
periodic_interval=periodic_interval,
periodic_fuzzy_delay=periodic_fuzzy_delay)
return service_obj
def kill(self):
"""Destroy the service object."""
self.stop()
def stop(self):
super(Service, self).stop()
for x in self.timers:
try:
x.stop()
except Exception:
LOG.exception(_LE("Exception occurs when timer stops"))
self.timers = []
def wait(self):
super(Service, self).wait()
for x in self.timers:
try:
x.wait()
except Exception:
LOG.exception(_LE("Exception occurs when waiting for timer"))
def reset(self):
config.reset_service()
def periodic_tasks(self, raise_on_error=False):
"""Tasks to be run at a periodic interval."""
ctxt = context.get_admin_context()
self.manager.periodic_tasks(ctxt, raise_on_error=raise_on_error)
def report_state(self):
"""Update the state of this service."""
# Todo(gongysh) report state to neutron server
pass
| apache-2.0 |
sogelink/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_job_wait.py | 34 | 4325 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_job_wait
version_added: "2.3"
author: "Wayne Witzel III (@wwitzel3)"
short_description: Wait for Ansible Tower job to finish.
description:
- Wait for Ansible Tower job to finish and report success or failure. See
U(https://www.ansible.com/tower) for an overview.
options:
job_id:
description:
- ID of the job to monitor.
required: True
min_interval:
description:
- Minimum interval in seconds, to request an update from Tower.
default: 1
max_interval:
description:
- Maximum interval in seconds, to request an update from Tower.
default: 30
timeout:
description:
- Maximum time in seconds to wait for a job to finish.
default: null
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Launch a job
tower_job_launch:
job_template: "My Job Template"
register: job
- name: Wait for job max 120s
tower_job_wait:
job_id: job.id
timeout: 120
'''
RETURN = '''
id:
description: job id that is being waited on
returned: success
type: int
sample: 99
elapsed:
description: total time in seconds the job took to run
returned: success
type: float
sample: 10.879
started:
description: timestamp of when the job started running
returned: success
type: string
sample: "2017-03-01T17:03:53.200234Z"
finished:
description: timestamp of when the job finished running
returned: success
type: string
sample: "2017-03-01T17:04:04.078782Z"
status:
description: current status of job
returned: success
type: string
sample: successful
'''
from ansible.module_utils.ansible_tower import tower_auth_config, tower_check_mode, tower_argument_spec, HAS_TOWER_CLI
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import cStringIO as StringIO
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
def main():
argument_spec = tower_argument_spec()
argument_spec.update(dict(
job_id=dict(type='int', required=True),
timeout=dict(type='int'),
min_interval=dict(type='float', default=1),
max_interval=dict(type='float', default=30),
))
module = AnsibleModule(
argument_spec,
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
json_output = {}
fail_json = None
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
job = tower_cli.get_resource('job')
params = module.params.copy()
# tower-cli gets very noisy when monitoring.
# We pass in our our outfile to suppress the out during our monitor call.
outfile = StringIO()
params['outfile'] = outfile
job_id = params.get('job_id')
try:
result = job.monitor(job_id, **params)
except exc.Timeout as excinfo:
result = job.status(job_id)
result['id'] = job_id
json_output['msg'] = 'Timeout waiting for job to finish.'
json_output['timeout'] = True
except exc.NotFound as excinfo:
fail_json = dict(msg='Unable to wait, no job_id {0} found: {1}'.format(job_id, excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest) as excinfo:
fail_json = dict(msg='Unable to wait for job: {0}'.format(excinfo), changed=False)
if fail_json is not None:
module.fail_json(**fail_json)
json_output['success'] = True
for k in ('id', 'status', 'elapsed', 'started', 'finished'):
json_output[k] = result.get(k)
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 |
sharkykh/SickRage | lib/hachoir_parser/video/asf.py | 86 | 12869 | """
Advanced Streaming Format (ASF) parser, format used by Windows Media Video
(WMF) and Windows Media Audio (WMA).
Informations:
- http://www.microsoft.com/windows/windowsmedia/forpros/format/asfspec.aspx
- http://swpat.ffii.org/pikta/xrani/asf/index.fr.html
Author: Victor Stinner
Creation: 5 august 2006
"""
from hachoir_parser import Parser
from hachoir_core.field import (FieldSet, ParserError,
UInt16, UInt32, UInt64,
TimestampWin64, TimedeltaWin64,
String, PascalString16, Enum,
Bit, Bits, PaddingBits,
PaddingBytes, NullBytes, RawBytes)
from hachoir_core.endian import LITTLE_ENDIAN
from hachoir_core.text_handler import (
displayHandler, filesizeHandler)
from hachoir_core.tools import humanBitRate
from itertools import izip
from hachoir_parser.video.fourcc import audio_codec_name, video_fourcc_name
from hachoir_parser.common.win32 import BitmapInfoHeader, GUID
MAX_HEADER_SIZE = 100 * 1024 # bytes
class AudioHeader(FieldSet):
guid = "F8699E40-5B4D-11CF-A8FD-00805F5C442B"
def createFields(self):
yield Enum(UInt16(self, "twocc"), audio_codec_name)
yield UInt16(self, "channels")
yield UInt32(self, "sample_rate")
yield UInt32(self, "bit_rate")
yield UInt16(self, "block_align")
yield UInt16(self, "bits_per_sample")
yield UInt16(self, "codec_specific_size")
size = self["codec_specific_size"].value
if size:
yield RawBytes(self, "codec_specific", size)
class BitrateMutualExclusion(FieldSet):
guid = "D6E229DC-35DA-11D1-9034-00A0C90349BE"
mutex_name = {
"D6E22A00-35DA-11D1-9034-00A0C90349BE": "Language",
"D6E22A01-35DA-11D1-9034-00A0C90349BE": "Bitrate",
"D6E22A02-35DA-11D1-9034-00A0C90349BE": "Unknown",
}
def createFields(self):
yield Enum(GUID(self, "exclusion_type"), self.mutex_name)
yield UInt16(self, "nb_stream")
for index in xrange(self["nb_stream"].value):
yield UInt16(self, "stream[]")
class VideoHeader(FieldSet):
guid = "BC19EFC0-5B4D-11CF-A8FD-00805F5C442B"
def createFields(self):
if False:
yield UInt32(self, "width0")
yield UInt32(self, "height0")
yield PaddingBytes(self, "reserved[]", 7)
yield UInt32(self, "width")
yield UInt32(self, "height")
yield PaddingBytes(self, "reserved[]", 2)
yield UInt16(self, "depth")
yield Enum(String(self, "codec", 4, charset="ASCII"), video_fourcc_name)
yield NullBytes(self, "padding", 20)
else:
yield UInt32(self, "width")
yield UInt32(self, "height")
yield PaddingBytes(self, "reserved[]", 1)
yield UInt16(self, "format_data_size")
if self["format_data_size"].value < 40:
raise ParserError("Unknown format data size")
yield BitmapInfoHeader(self, "bmp_info", use_fourcc=True)
class FileProperty(FieldSet):
guid = "8CABDCA1-A947-11CF-8EE4-00C00C205365"
def createFields(self):
yield GUID(self, "guid")
yield filesizeHandler(UInt64(self, "file_size"))
yield TimestampWin64(self, "creation_date")
yield UInt64(self, "pckt_count")
yield TimedeltaWin64(self, "play_duration")
yield TimedeltaWin64(self, "send_duration")
yield UInt64(self, "preroll")
yield Bit(self, "broadcast", "Is broadcast?")
yield Bit(self, "seekable", "Seekable stream?")
yield PaddingBits(self, "reserved[]", 30)
yield filesizeHandler(UInt32(self, "min_pckt_size"))
yield filesizeHandler(UInt32(self, "max_pckt_size"))
yield displayHandler(UInt32(self, "max_bitrate"), humanBitRate)
class HeaderExtension(FieldSet):
guid = "5FBF03B5-A92E-11CF-8EE3-00C00C205365"
def createFields(self):
yield GUID(self, "reserved[]")
yield UInt16(self, "reserved[]")
yield UInt32(self, "size")
if self["size"].value:
yield RawBytes(self, "data", self["size"].value)
class Header(FieldSet):
guid = "75B22630-668E-11CF-A6D9-00AA0062CE6C"
def createFields(self):
yield UInt32(self, "obj_count")
yield PaddingBytes(self, "reserved[]", 2)
for index in xrange(self["obj_count"].value):
yield Object(self, "object[]")
class Metadata(FieldSet):
guid = "75B22633-668E-11CF-A6D9-00AA0062CE6C"
names = ("title", "author", "copyright", "xxx", "yyy")
def createFields(self):
for index in xrange(5):
yield UInt16(self, "size[]")
for name, size in izip(self.names, self.array("size")):
if size.value:
yield String(self, name, size.value, charset="UTF-16-LE", strip=" \0")
class Descriptor(FieldSet):
"""
See ExtendedContentDescription class.
"""
TYPE_BYTE_ARRAY = 1
TYPE_NAME = {
0: "Unicode",
1: "Byte array",
2: "BOOL (32 bits)",
3: "DWORD (32 bits)",
4: "QWORD (64 bits)",
5: "WORD (16 bits)"
}
def createFields(self):
yield PascalString16(self, "name", "Name", charset="UTF-16-LE", strip="\0")
yield Enum(UInt16(self, "type"), self.TYPE_NAME)
yield UInt16(self, "value_length")
type = self["type"].value
size = self["value_length"].value
name = "value"
if type == 0 and (size % 2) == 0:
yield String(self, name, size, charset="UTF-16-LE", strip="\0")
elif type in (2, 3):
yield UInt32(self, name)
elif type == 4:
yield UInt64(self, name)
else:
yield RawBytes(self, name, size)
class ExtendedContentDescription(FieldSet):
guid = "D2D0A440-E307-11D2-97F0-00A0C95EA850"
def createFields(self):
yield UInt16(self, "count")
for index in xrange(self["count"].value):
yield Descriptor(self, "descriptor[]")
class Codec(FieldSet):
"""
See CodecList class.
"""
type_name = {
1: "video",
2: "audio"
}
def createFields(self):
yield Enum(UInt16(self, "type"), self.type_name)
yield UInt16(self, "name_len", "Name length in character (byte=len*2)")
if self["name_len"].value:
yield String(self, "name", self["name_len"].value*2, "Name", charset="UTF-16-LE", strip=" \0")
yield UInt16(self, "desc_len", "Description length in character (byte=len*2)")
if self["desc_len"].value:
yield String(self, "desc", self["desc_len"].value*2, "Description", charset="UTF-16-LE", strip=" \0")
yield UInt16(self, "info_len")
if self["info_len"].value:
yield RawBytes(self, "info", self["info_len"].value)
class CodecList(FieldSet):
guid = "86D15240-311D-11D0-A3A4-00A0C90348F6"
def createFields(self):
yield GUID(self, "reserved[]")
yield UInt32(self, "count")
for index in xrange(self["count"].value):
yield Codec(self, "codec[]")
class SimpleIndexEntry(FieldSet):
"""
See SimpleIndex class.
"""
def createFields(self):
yield UInt32(self, "pckt_number")
yield UInt16(self, "pckt_count")
class SimpleIndex(FieldSet):
guid = "33000890-E5B1-11CF-89F4-00A0C90349CB"
def createFields(self):
yield GUID(self, "file_id")
yield TimedeltaWin64(self, "entry_interval")
yield UInt32(self, "max_pckt_count")
yield UInt32(self, "entry_count")
for index in xrange(self["entry_count"].value):
yield SimpleIndexEntry(self, "entry[]")
class BitRate(FieldSet):
"""
See BitRateList class.
"""
def createFields(self):
yield Bits(self, "stream_index", 7)
yield PaddingBits(self, "reserved", 9)
yield displayHandler(UInt32(self, "avg_bitrate"), humanBitRate)
class BitRateList(FieldSet):
guid = "7BF875CE-468D-11D1-8D82-006097C9A2B2"
def createFields(self):
yield UInt16(self, "count")
for index in xrange(self["count"].value):
yield BitRate(self, "bit_rate[]")
class Data(FieldSet):
guid = "75B22636-668E-11CF-A6D9-00AA0062CE6C"
def createFields(self):
yield GUID(self, "file_id")
yield UInt64(self, "packet_count")
yield PaddingBytes(self, "reserved", 2)
size = (self.size - self.current_size) / 8
yield RawBytes(self, "data", size)
class StreamProperty(FieldSet):
guid = "B7DC0791-A9B7-11CF-8EE6-00C00C205365"
def createFields(self):
yield GUID(self, "type")
yield GUID(self, "error_correction")
yield UInt64(self, "time_offset")
yield UInt32(self, "data_len")
yield UInt32(self, "error_correct_len")
yield Bits(self, "stream_index", 7)
yield Bits(self, "reserved[]", 8)
yield Bit(self, "encrypted", "Content is encrypted?")
yield UInt32(self, "reserved[]")
size = self["data_len"].value
if size:
tag = self["type"].value
if tag in Object.TAG_INFO:
name, parser = Object.TAG_INFO[tag][0:2]
yield parser(self, name, size=size*8)
else:
yield RawBytes(self, "data", size)
size = self["error_correct_len"].value
if size:
yield RawBytes(self, "error_correct", size)
class Object(FieldSet):
# This list is converted to a dictionnary later where the key is the GUID
TAG_INFO = (
("header", Header, "Header object"),
("file_prop", FileProperty, "File property"),
("header_ext", HeaderExtension, "Header extension"),
("codec_list", CodecList, "Codec list"),
("simple_index", SimpleIndex, "Simple index"),
("data", Data, "Data object"),
("stream_prop[]", StreamProperty, "Stream properties"),
("bit_rates", BitRateList, "Bit rate list"),
("ext_desc", ExtendedContentDescription, "Extended content description"),
("metadata", Metadata, "Metadata"),
("video_header", VideoHeader, "Video"),
("audio_header", AudioHeader, "Audio"),
("bitrate_mutex", BitrateMutualExclusion, "Bitrate mutual exclusion"),
)
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
tag = self["guid"].value
if tag not in self.TAG_INFO:
self.handler = None
return
info = self.TAG_INFO[tag]
self._name = info[0]
self.handler = info[1]
def createFields(self):
yield GUID(self, "guid")
yield filesizeHandler(UInt64(self, "size"))
size = self["size"].value - self.current_size/8
if 0 < size:
if self.handler:
yield self.handler(self, "content", size=size*8)
else:
yield RawBytes(self, "content", size)
tag_info_list = Object.TAG_INFO
Object.TAG_INFO = dict( (parser[1].guid, parser) for parser in tag_info_list )
class AsfFile(Parser):
MAGIC = "\x30\x26\xB2\x75\x8E\x66\xCF\x11\xA6\xD9\x00\xAA\x00\x62\xCE\x6C"
PARSER_TAGS = {
"id": "asf",
"category": "video",
"file_ext": ("wmv", "wma", "asf"),
"mime": (u"video/x-ms-asf", u"video/x-ms-wmv", u"audio/x-ms-wma"),
"min_size": 24*8,
"description": "Advanced Streaming Format (ASF), used for WMV (video) and WMA (audio)",
"magic": ((MAGIC, 0),),
}
FILE_TYPE = {
"video/x-ms-wmv": (".wmv", u"Window Media Video (wmv)"),
"video/x-ms-asf": (".asf", u"ASF container"),
"audio/x-ms-wma": (".wma", u"Window Media Audio (wma)"),
}
endian = LITTLE_ENDIAN
def validate(self):
magic = self.MAGIC
if self.stream.readBytes(0, len(magic)) != magic:
return "Invalid magic"
header = self[0]
if not(30 <= header["size"].value <= MAX_HEADER_SIZE):
return "Invalid header size (%u)" % header["size"].value
return True
def createMimeType(self):
audio = False
for prop in self.array("header/content/stream_prop"):
guid = prop["content/type"].value
if guid == VideoHeader.guid:
return u"video/x-ms-wmv"
if guid == AudioHeader.guid:
audio = True
if audio:
return u"audio/x-ms-wma"
else:
return u"video/x-ms-asf"
def createFields(self):
while not self.eof:
yield Object(self, "object[]")
def createDescription(self):
return self.FILE_TYPE[self.mime_type][1]
def createFilenameSuffix(self):
return self.FILE_TYPE[self.mime_type][0]
def createContentSize(self):
if self[0].name != "header":
return None
return self["header/content/file_prop/content/file_size"].value * 8
| gpl-3.0 |
Designist/sympy | sympy/polys/tests/test_ring_series.py | 5 | 22003 | from sympy.polys.domains import QQ, EX, RR
from sympy.polys.rings import ring
from sympy.polys.ring_series import (_invert_monoms, rs_integrate,
rs_trunc, rs_mul, rs_square, rs_pow, _has_constant_term, rs_hadamard_exp,
rs_series_from_list, rs_exp, rs_log, rs_newton, rs_series_inversion,
rs_compose_add, rs_asin, rs_atan, rs_atanh, rs_tan, rs_cot, rs_sin, rs_cos,
rs_cos_sin, rs_sinh, rs_cosh, rs_tanh, _tan1, rs_fun, rs_nth_root,
rs_LambertW, rs_series_reversion, rs_is_puiseux)
from sympy.utilities.pytest import raises
from sympy.core.compatibility import range
from sympy.core.symbol import symbols
from sympy.functions import (sin, cos, exp, tan, cot, atan, asin, atanh,
tanh, log, sqrt)
from sympy.core.numbers import Rational
def is_close(a, b):
tol = 10**(-10)
assert abs(a - b) < tol
def test_ring_series1():
R, x = ring('x', QQ)
p = x**4 + 2*x**3 + 3*x + 4
assert _invert_monoms(p) == 4*x**4 + 3*x**3 + 2*x + 1
assert rs_hadamard_exp(p) == x**4/24 + x**3/3 + 3*x + 4
R, x = ring('x', QQ)
p = x**4 + 2*x**3 + 3*x + 4
assert rs_integrate(p, x) == x**5/5 + x**4/2 + 3*x**2/2 + 4*x
R, x, y = ring('x, y', QQ)
p = x**2*y**2 + x + 1
assert rs_integrate(p, x) == x**3*y**2/3 + x**2/2 + x
assert rs_integrate(p, y) == x**2*y**3/3 + x*y + y
def test_trunc():
R, x, y, t = ring('x, y, t', QQ)
p = (y + t*x)**4
p1 = rs_trunc(p, x, 3)
assert p1 == y**4 + 4*y**3*t*x + 6*y**2*t**2*x**2
def test_mul_trunc():
R, x, y, t = ring('x, y, t', QQ)
p = 1 + t*x + t*y
for i in range(2):
p = rs_mul(p, p, t, 3)
assert p == 6*x**2*t**2 + 12*x*y*t**2 + 6*y**2*t**2 + 4*x*t + 4*y*t + 1
p = 1 + t*x + t*y + t**2*x*y
p1 = rs_mul(p, p, t, 2)
assert p1 == 1 + 2*t*x + 2*t*y
R1, z = ring('z', QQ)
def test1(p):
p2 = rs_mul(p, z, x, 2)
raises(ValueError, lambda: test1(p))
p1 = 2 + 2*x + 3*x**2
p2 = 3 + x**2
assert rs_mul(p1, p2, x, 4) == 2*x**3 + 11*x**2 + 6*x + 6
def test_square_trunc():
R, x, y, t = ring('x, y, t', QQ)
p = (1 + t*x + t*y)*2
p1 = rs_mul(p, p, x, 3)
p2 = rs_square(p, x, 3)
assert p1 == p2
p = 1 + x + x**2 + x**3
assert rs_square(p, x, 4) == 4*x**3 + 3*x**2 + 2*x + 1
def test_pow_trunc():
R, x, y, z = ring('x, y, z', QQ)
p0 = y + x*z
p = p0**16
for xx in (x, y, z):
p1 = rs_trunc(p, xx, 8)
p2 = rs_pow(p0, 16, xx, 8)
assert p1 == p2
p = 1 + x
p1 = rs_pow(p, 3, x, 2)
assert p1 == 1 + 3*x
assert rs_pow(p, 0, x, 2) == 1
assert rs_pow(p, -2, x, 2) == 1 - 2*x
p = x + y
assert rs_pow(p, 3, y, 3) == x**3 + 3*x**2*y + 3*x*y**2
assert rs_pow(1 + x, Rational(2, 3), x, 4) == 4*x**3/81 - x**2/9 + 2*x/3 + 1
def test_has_constant_term():
R, x, y, z = ring('x, y, z', QQ)
p = y + x*z
assert _has_constant_term(p, x)
p = x + x**4
assert not _has_constant_term(p, x)
p = 1 + x + x**4
assert _has_constant_term(p, x)
p = x + y + x*z
def test_inversion():
R, x = ring('x', QQ)
p = 2 + x + 2*x**2
n = 5
p1 = rs_series_inversion(p, x, n)
assert rs_trunc(p*p1, x, n) == 1
R, x, y = ring('x, y', QQ)
p = 2 + x + 2*x**2 + y*x + x**2*y
p1 = rs_series_inversion(p, x, n)
assert rs_trunc(p*p1, x, n) == 1
R, x, y = ring('x, y', QQ)
p = 1 + x + y
def test2(p):
p1 = rs_series_inversion(p, x, 4)
raises(NotImplementedError, lambda: test2(p))
p = R.zero
def test3(p):
p1 = rs_series_inversion(p, x, 3)
raises(ZeroDivisionError, lambda: test3(p))
def test_series_reversion():
R, x, y = ring('x, y', QQ)
p = rs_tan(x, x, 10)
r1 = rs_series_reversion(p, x, 8, y)
r2 = rs_atan(y, y, 8)
assert rs_series_reversion(p, x, 8, y) == rs_atan(y, y, 8)
p = rs_sin(x, x, 10)
assert rs_series_reversion(p, x, 8, y) == 5*y**7/112 + 3*y**5/40 + \
y**3/6 + y
def test_series_from_list():
R, x = ring('x', QQ)
p = 1 + 2*x + x**2 + 3*x**3
c = [1, 2, 0, 4, 4]
r = rs_series_from_list(p, c, x, 5)
pc = R.from_list(list(reversed(c)))
r1 = rs_trunc(pc.compose(x, p), x, 5)
assert r == r1
R, x, y = ring('x, y', QQ)
c = [1, 3, 5, 7]
p1 = rs_series_from_list(x + y, c, x, 3, concur=0)
p2 = rs_trunc((1 + 3*(x+y) + 5*(x+y)**2 + 7*(x+y)**3), x, 3)
assert p1 == p2
R, x = ring('x', QQ)
h = 25
p = rs_exp(x, x, h) - 1
p1 = rs_series_from_list(p, c, x, h)
p2 = 0
for i, cx in enumerate(c):
p2 += cx*rs_pow(p, i, x, h)
assert p1 == p2
def test_log():
R, x = ring('x', QQ)
p = 1 + x
p1 = rs_log(p, x, 4)
assert p1 == x - x**2/2 + x**3/3
p = 1 + x +2*x**2/3
p1 = rs_log(p, x, 9)
assert p1 == -17*x**8/648 + 13*x**7/189 - 11*x**6/162 - x**5/45 + \
7*x**4/36 - x**3/3 + x**2/6 + x
p2 = rs_series_inversion(p, x, 9)
p3 = rs_log(p2, x, 9)
assert p3 == -p1
R, x, y = ring('x, y', QQ)
p = 1 + x + 2*y*x**2
p1 = rs_log(p, x, 6)
assert p1 == (4*x**5*y**2 - 2*x**5*y - 2*x**4*y**2 + x**5/5 + 2*x**4*y -
x**4/4 - 2*x**3*y + x**3/3 + 2*x**2*y - x**2/2 + x)
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_log(x + a, x, 5) == -EX(1/(4*a**4))*x**4 + EX(1/(3*a**3))*x**3 \
- EX(1/(2*a**2))*x**2 + EX(1/a)*x + EX(log(a))
assert rs_log(x + x**2*y + a, x, 4) == -EX(a**(-2))*x**3*y + \
EX(1/(3*a**3))*x**3 + EX(1/a)*x**2*y - EX(1/(2*a**2))*x**2 + \
EX(1/a)*x + EX(log(a))
p = x + x**2 + 3
assert rs_log(p, x, 10).compose(x, 5) == EX(log(3) + 19281291595/9920232)
def test_exp():
R, x = ring('x', QQ)
p = x + x**4
for h in [10, 30]:
q = rs_series_inversion(1 + p, x, h) - 1
p1 = rs_exp(q, x, h)
q1 = rs_log(p1, x, h)
assert q1 == q
p1 = rs_exp(p, x, 30)
assert p1.coeff(x**29) == QQ(74274246775059676726972369, 353670479749588078181744640000)
prec = 21
p = rs_log(1 + x, x, prec)
p1 = rs_exp(p, x, prec)
assert p1 == x + 1
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[exp(a), a])
assert rs_exp(x + a, x, 5) == exp(a)*x**4/24 + exp(a)*x**3/6 + \
exp(a)*x**2/2 + exp(a)*x + exp(a)
assert rs_exp(x + x**2*y + a, x, 5) == exp(a)*x**4*y**2/2 + \
exp(a)*x**4*y/2 + exp(a)*x**4/24 + exp(a)*x**3*y + \
exp(a)*x**3/6 + exp(a)*x**2*y + exp(a)*x**2/2 + exp(a)*x + exp(a)
R, x, y = ring('x, y', EX)
assert rs_exp(x + a, x, 5) == EX(exp(a)/24)*x**4 + EX(exp(a)/6)*x**3 + \
EX(exp(a)/2)*x**2 + EX(exp(a))*x + EX(exp(a))
assert rs_exp(x + x**2*y + a, x, 5) == EX(exp(a)/2)*x**4*y**2 + \
EX(exp(a)/2)*x**4*y + EX(exp(a)/24)*x**4 + EX(exp(a))*x**3*y + \
EX(exp(a)/6)*x**3 + EX(exp(a))*x**2*y + EX(exp(a)/2)*x**2 + \
EX(exp(a))*x + EX(exp(a))
def test_newton():
R, x = ring('x', QQ)
p = x**2 - 2
r = rs_newton(p, x, 4)
f = [1, 0, -2]
assert r == 8*x**4 + 4*x**2 + 2
def test_compose_add():
R, x = ring('x', QQ)
p1 = x**3 - 1
p2 = x**2 - 2
assert rs_compose_add(p1, p2) == x**6 - 6*x**4 - 2*x**3 + 12*x**2 - 12*x - 7
def test_fun():
R, x, y = ring('x, y', QQ)
p = x*y + x**2*y**3 + x**5*y
assert rs_fun(p, rs_tan, x, 10) == rs_tan(p, x, 10)
assert rs_fun(p, _tan1, x, 10) == _tan1(p, x, 10)
def test_nth_root():
R, x, y = ring('x, y', QQ)
r1 = rs_nth_root(1 + x**2*y, 4, x, 10)
assert rs_nth_root(1 + x**2*y, 4, x, 10) == -77*x**8*y**4/2048 + \
7*x**6*y**3/128 - 3*x**4*y**2/32 + x**2*y/4 + 1
assert rs_nth_root(1 + x*y + x**2*y**3, 3, x, 5) == -x**4*y**6/9 + \
5*x**4*y**5/27 - 10*x**4*y**4/243 - 2*x**3*y**4/9 + 5*x**3*y**3/81 + \
x**2*y**3/3 - x**2*y**2/9 + x*y/3 + 1
assert rs_nth_root(8*x, 3, x, 3) == 2*x**QQ(1, 3)
assert rs_nth_root(8*x + x**2 + x**3, 3, x, 3) == x**QQ(4,3)/12 + 2*x**QQ(1,3)
r = rs_nth_root(8*x + x**2*y + x**3, 3, x, 4)
assert r == -x**QQ(7,3)*y**2/288 + x**QQ(7,3)/12 + x**QQ(4,3)*y/12 + 2*x**QQ(1,3)
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_nth_root(x + a, 3, x, 4) == EX(5/(81*a**QQ(8, 3)))*x**3 - \
EX(1/(9*a**QQ(5, 3)))*x**2 + EX(1/(3*a**QQ(2, 3)))*x + EX(a**QQ(1, 3))
assert rs_nth_root(x**QQ(2, 3) + x**2*y + 5, 2, x, 3) == -EX(sqrt(5)/100)*\
x**QQ(8, 3)*y - EX(sqrt(5)/16000)*x**QQ(8, 3) + EX(sqrt(5)/10)*x**2*y + \
EX(sqrt(5)/2000)*x**2 - EX(sqrt(5)/200)*x**QQ(4, 3) + \
EX(sqrt(5)/10)*x**QQ(2, 3) + EX(sqrt(5))
def test_atan():
R, x, y = ring('x, y', QQ)
assert rs_atan(x, x, 9) == -x**7/7 + x**5/5 - x**3/3 + x
assert rs_atan(x*y + x**2*y**3, x, 9) == 2*x**8*y**11 - x**8*y**9 + \
2*x**7*y**9 - x**7*y**7/7 - x**6*y**9/3 + x**6*y**7 - x**5*y**7 + \
x**5*y**5/5 - x**4*y**5 - x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_atan(x + a, x, 5) == -EX((a**3 - a)/(a**8 + 4*a**6 + 6*a**4 + \
4*a**2 + 1))*x**4 + EX((3*a**2 - 1)/(3*a**6 + 9*a**4 + \
9*a**2 + 3))*x**3 - EX(a/(a**4 + 2*a**2 + 1))*x**2 + \
EX(1/(a**2 + 1))*x + EX(atan(a))
assert rs_atan(x + x**2*y + a, x, 4) == -EX(2*a/(a**4 + 2*a**2 + 1)) \
*x**3*y + EX((3*a**2 - 1)/(3*a**6 + 9*a**4 + 9*a**2 + 3))*x**3 + \
EX(1/(a**2 + 1))*x**2*y - EX(a/(a**4 + 2*a**2 + 1))*x**2 + EX(1/(a**2 \
+ 1))*x + EX(atan(a))
def test_asin():
R, x, y = ring('x, y', QQ)
assert rs_asin(x + x*y, x, 5) == x**3*y**3/6 + x**3*y**2/2 + x**3*y/2 + \
x**3/6 + x*y + x
assert rs_asin(x*y + x**2*y**3, x, 6) == x**5*y**7/2 + 3*x**5*y**5/40 + \
x**4*y**5/2 + x**3*y**3/6 + x**2*y**3 + x*y
def test_tan():
R, x, y = ring('x, y', QQ)
assert rs_tan(x, x, 9) == \
x + x**3/3 + 2*x**5/15 + 17*x**7/315
assert rs_tan(x*y + x**2*y**3, x, 9) == 4*x**8*y**11/3 + 17*x**8*y**9/45 + \
4*x**7*y**9/3 + 17*x**7*y**7/315 + x**6*y**9/3 + 2*x**6*y**7/3 + \
x**5*y**7 + 2*x**5*y**5/15 + x**4*y**5 + x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[tan(a), a])
assert rs_tan(x + a, x, 5) == (tan(a)**5 + 5*tan(a)**3/3 + \
2*tan(a)/3)*x**4 + (tan(a)**4 + 4*tan(a)**2/3 + 1/3)*x**3 + \
(tan(a)**3 + tan(a))*x**2 + (tan(a)**2 + 1)*x + tan(a)
assert rs_tan(x + x**2*y + a, x, 4) == (2*tan(a)**3 + 2*tan(a))*x**3*y + \
(tan(a)**4 + 4/3*tan(a)**2 + 1/3)*x**3 + (tan(a)**2 + 1)*x**2*y + \
(tan(a)**3 + tan(a))*x**2 + (tan(a)**2 + 1)*x + tan(a)
R, x, y = ring('x, y', EX)
assert rs_tan(x + a, x, 5) == EX(tan(a)**5 + 5*tan(a)**3/3 + \
2*tan(a)/3)*x**4 + EX(tan(a)**4 + 4*tan(a)**2/3 + EX(1)/3)*x**3 + \
EX(tan(a)**3 + tan(a))*x**2 + EX(tan(a)**2 + 1)*x + EX(tan(a))
assert rs_tan(x + x**2*y + a, x, 4) == EX(2*tan(a)**3 + \
2*tan(a))*x**3*y + EX(tan(a)**4 + 4*tan(a)**2/3 + EX(1)/3)*x**3 + \
EX(tan(a)**2 + 1)*x**2*y + EX(tan(a)**3 + tan(a))*x**2 + \
EX(tan(a)**2 + 1)*x + EX(tan(a))
p = x + x**2 + 5
assert rs_atan(p, x, 10).compose(x, 10) == EX(atan(5) + 67701870330562640/ \
668083460499)
def test_cot():
R, x, y = ring('x, y', QQ)
assert rs_cot(x**6 + x**7, x, 8) == x**-6 - x**-5 + x**-4 - x**-3 + \
x**-2 - x**-1 + 1 - x + x**2 - x**3 + x**4 - x**5 + 2*x**6/3 - 4*x**7/3
assert rs_cot(x + x**2*y, x, 5) == -x**4*y**5 - x**4*y/15 + x**3*y**4 - \
x**3/45 - x**2*y**3 - x**2*y/3 + x*y**2 - x/3 - y + x**-1
def test_sin():
R, x, y = ring('x, y', QQ)
assert rs_sin(x, x, 9) == \
x - x**3/6 + x**5/120 - x**7/5040
assert rs_sin(x*y + x**2*y**3, x, 9) == x**8*y**11/12 - \
x**8*y**9/720 + x**7*y**9/12 - x**7*y**7/5040 - x**6*y**9/6 + \
x**6*y**7/24 - x**5*y**7/2 + x**5*y**5/120 - x**4*y**5/2 - \
x**3*y**3/6 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[sin(a), cos(a), a])
assert rs_sin(x + a, x, 5) == sin(a)*x**4/24 - cos(a)*x**3/6 - \
sin(a)*x**2/2 + cos(a)*x + sin(a)
assert rs_sin(x + x**2*y + a, x, 5) == -sin(a)*x**4*y**2/2 - \
cos(a)*x**4*y/2 + sin(a)*x**4/24 - sin(a)*x**3*y - cos(a)*x**3/6 + \
cos(a)*x**2*y - sin(a)*x**2/2 + cos(a)*x + sin(a)
R, x, y = ring('x, y', EX)
assert rs_sin(x + a, x, 5) == EX(sin(a)/24)*x**4 - EX(cos(a)/6)*x**3 - \
EX(sin(a)/2)*x**2 + EX(cos(a))*x + EX(sin(a))
assert rs_sin(x + x**2*y + a, x, 5) == -EX(sin(a)/2)*x**4*y**2 - \
EX(cos(a)/2)*x**4*y + EX(sin(a)/24)*x**4 - EX(sin(a))*x**3*y - \
EX(cos(a)/6)*x**3 + EX(cos(a))*x**2*y - EX(sin(a)/2)*x**2 + \
EX(cos(a))*x + EX(sin(a))
def test_cos():
R, x, y = ring('x, y', QQ)
assert rs_cos(x, x, 9) == \
x**8/40320 - x**6/720 + x**4/24 - x**2/2 + 1
assert rs_cos(x*y + x**2*y**3, x, 9) == x**8*y**12/24 - \
x**8*y**10/48 + x**8*y**8/40320 + x**7*y**10/6 - \
x**7*y**8/120 + x**6*y**8/4 - x**6*y**6/720 + x**5*y**6/6 - \
x**4*y**6/2 + x**4*y**4/24 - x**3*y**4 - x**2*y**2/2 + 1
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', QQ[sin(a), cos(a), a])
assert rs_cos(x + a, x, 5) == cos(a)*x**4/24 + sin(a)*x**3/6 - \
cos(a)*x**2/2 - sin(a)*x + cos(a)
assert rs_cos(x + x**2*y + a, x, 5) == -cos(a)*x**4*y**2/2 + \
sin(a)*x**4*y/2 + cos(a)*x**4/24 - cos(a)*x**3*y + sin(a)*x**3/6 - \
sin(a)*x**2*y - cos(a)*x**2/2 - sin(a)*x + cos(a)
R, x, y = ring('x, y', EX)
assert rs_cos(x + a, x, 5) == EX(cos(a)/24)*x**4 + EX(sin(a)/6)*x**3 - \
EX(cos(a)/2)*x**2 - EX(sin(a))*x + EX(cos(a))
assert rs_cos(x + x**2*y + a, x, 5) == -EX(cos(a)/2)*x**4*y**2 + \
EX(sin(a)/2)*x**4*y + EX(cos(a)/24)*x**4 - EX(cos(a))*x**3*y + \
EX(sin(a)/6)*x**3 - EX(sin(a))*x**2*y - EX(cos(a)/2)*x**2 - \
EX(sin(a))*x + EX(cos(a))
def test_cos_sin():
R, x, y = ring('x, y', QQ)
cos, sin = rs_cos_sin(x, x, 9)
assert cos == rs_cos(x, x, 9)
assert sin == rs_sin(x, x, 9)
cos, sin = rs_cos_sin(x + x*y, x, 5)
assert cos == rs_cos(x + x*y, x, 5)
assert sin == rs_sin(x + x*y, x, 5)
def test_atanh():
R, x, y = ring('x, y', QQ)
assert rs_atanh(x, x, 9) == x**7/7 + x**5/5 + x**3/3 + x
assert rs_atanh(x*y + x**2*y**3, x, 9) == 2*x**8*y**11 + x**8*y**9 + \
2*x**7*y**9 + x**7*y**7/7 + x**6*y**9/3 + x**6*y**7 + x**5*y**7 + \
x**5*y**5/5 + x**4*y**5 + x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_atanh(x + a, x, 5) == EX((a**3 + a)/(a**8 - 4*a**6 + 6*a**4 - \
4*a**2 + 1))*x**4 - EX((3*a**2 + 1)/(3*a**6 - 9*a**4 + \
9*a**2 - 3))*x**3 + EX(a/(a**4 - 2*a**2 + 1))*x**2 - EX(1/(a**2 - \
1))*x + EX(atanh(a))
assert rs_atanh(x + x**2*y + a, x, 4) == EX(2*a/(a**4 - 2*a**2 + \
1))*x**3*y - EX((3*a**2 + 1)/(3*a**6 - 9*a**4 + 9*a**2 - 3))*x**3 - \
EX(1/(a**2 - 1))*x**2*y + EX(a/(a**4 - 2*a**2 + 1))*x**2 - \
EX(1/(a**2 - 1))*x + EX(atanh(a))
p = x + x**2 + 5
assert rs_atanh(p, x, 10).compose(x, 10) == EX(-733442653682135/5079158784 \
+ atanh(5))
def test_sinh():
R, x, y = ring('x, y', QQ)
assert rs_sinh(x, x, 9) == x**7/5040 + x**5/120 + x**3/6 + x
assert rs_sinh(x*y + x**2*y**3, x, 9) == x**8*y**11/12 + \
x**8*y**9/720 + x**7*y**9/12 + x**7*y**7/5040 + x**6*y**9/6 + \
x**6*y**7/24 + x**5*y**7/2 + x**5*y**5/120 + x**4*y**5/2 + \
x**3*y**3/6 + x**2*y**3 + x*y
def test_cosh():
R, x, y = ring('x, y', QQ)
assert rs_cosh(x, x, 9) == x**8/40320 + x**6/720 + x**4/24 + \
x**2/2 + 1
assert rs_cosh(x*y + x**2*y**3, x, 9) == x**8*y**12/24 + \
x**8*y**10/48 + x**8*y**8/40320 + x**7*y**10/6 + \
x**7*y**8/120 + x**6*y**8/4 + x**6*y**6/720 + x**5*y**6/6 + \
x**4*y**6/2 + x**4*y**4/24 + x**3*y**4 + x**2*y**2/2 + 1
def test_tanh():
R, x, y = ring('x, y', QQ)
assert rs_tanh(x, x, 9) == -17*x**7/315 + 2*x**5/15 - x**3/3 + x
assert rs_tanh(x*y + x**2*y**3 , x, 9) == 4*x**8*y**11/3 - \
17*x**8*y**9/45 + 4*x**7*y**9/3 - 17*x**7*y**7/315 - x**6*y**9/3 + \
2*x**6*y**7/3 - x**5*y**7 + 2*x**5*y**5/15 - x**4*y**5 - \
x**3*y**3/3 + x**2*y**3 + x*y
# Constant term in series
a = symbols('a')
R, x, y = ring('x, y', EX)
assert rs_tanh(x + a, x, 5) == EX(tanh(a)**5 - 5*tanh(a)**3/3 + \
2*tanh(a)/3)*x**4 + EX(-tanh(a)**4 + 4*tanh(a)**2/3 - QQ(1, 3))*x**3 + \
EX(tanh(a)**3 - tanh(a))*x**2 + EX(-tanh(a)**2 + 1)*x + EX(tanh(a))
p = rs_tanh(x + x**2*y + a, x, 4)
assert (p.compose(x, 10)).compose(y, 5) == EX(-1000*tanh(a)**4 + \
10100*tanh(a)**3 + 2470*tanh(a)**2/3 - 10099*tanh(a) + QQ(530, 3))
def test_RR():
rs_funcs = [rs_sin, rs_cos, rs_tan, rs_cot, rs_atan, rs_tanh]
sympy_funcs = [sin, cos, tan, cot, atan, tanh]
R, x, y = ring('x, y', RR)
a = symbols('a')
for rs_func, sympy_func in zip(rs_funcs, sympy_funcs):
p = rs_func(2 + x, x, 5).compose(x, 5)
q = sympy_func(2 + a).series(a, 0, 5).removeO()
is_close(p.as_expr(), q.subs(a, 5).n())
p = rs_nth_root(2 + x, 5, x, 5).compose(x, 5)
q = ((2 + a)**QQ(1, 5)).series(a, 0, 5).removeO()
is_close(p.as_expr(), q.subs(a, 5).n())
def test_is_regular():
R, x, y = ring('x, y', QQ)
p = 1 + 2*x + x**2 + 3*x**3
assert not rs_is_puiseux(p, x)
p = x + x**QQ(1,5)*y
assert rs_is_puiseux(p, x)
assert not rs_is_puiseux(p, y)
p = x + x**2*y**QQ(1,5)*y
assert not rs_is_puiseux(p, x)
def test_puiseux():
R, x, y = ring('x, y', QQ)
p = x**QQ(2,5) + x**QQ(2,3) + x
r = rs_series_inversion(p, x, 1)
r1 = -x**QQ(14,15) + x**QQ(4,5) - 3*x**QQ(11,15) + x**QQ(2,3) + \
2*x**QQ(7,15) - x**QQ(2,5) - x**QQ(1,5) + x**QQ(2,15) - x**QQ(-2,15) \
+ x**QQ(-2,5)
assert r == r1
r = rs_nth_root(1 + p, 3, x, 1)
assert r == -x**QQ(4,5)/9 + x**QQ(2,3)/3 + x**QQ(2,5)/3 + 1
r = rs_log(1 + p, x, 1)
assert r == -x**QQ(4,5)/2 + x**QQ(2,3) + x**QQ(2,5)
r = rs_LambertW(p, x, 1)
assert r == -x**QQ(4,5) + x**QQ(2,3) + x**QQ(2,5)
r = rs_exp(p, x, 1)
assert r == x**QQ(4,5)/2 + x**QQ(2,3) + x**QQ(2,5) + 1
p1 = x + x**QQ(1,5)*y
r = rs_exp(p1, x, 1)
assert r == x**QQ(4,5)*y**4/24 + x**QQ(3,5)*y**3/6 + x**QQ(2,5)*y**2/2 + \
x**QQ(1,5)*y + 1
r = rs_atan(p, x, 2)
assert r == -x**QQ(9,5) - x**QQ(26,15) - x**QQ(22,15) - x**QQ(6,5)/3 + \
x + x**QQ(2,3) + x**QQ(2,5)
r = rs_atan(p1, x, 2)
assert r == x**QQ(9,5)*y**9/9 + x**QQ(9,5)*y**4 - x**QQ(7,5)*y**7/7 - \
x**QQ(7,5)*y**2 + x*y**5/5 + x - x**QQ(3,5)*y**3/3 + x**QQ(1,5)*y
r = rs_asin(p, x, 2)
assert r == x**QQ(9,5)/2 + x**QQ(26,15)/2 + x**QQ(22,15)/2 + \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_tan(p, x, 2)
assert r == x**QQ(9,5) + x**QQ(26,15) + x**QQ(22,15) + x**QQ(6,5)/3 + \
x + x**QQ(2,3) + x**QQ(2,5)
r = rs_cot(p, x, 1)
assert r == -x**QQ(14,15) + x**QQ(4,5) - 3*x**QQ(11,15) + \
2*x**QQ(2,3)/3 + 2*x**QQ(7,15) - 4*x**QQ(2,5)/3 - x**QQ(1,5) + \
x**QQ(2,15) - x**QQ(-2,15) + x**QQ(-2,5)
r = rs_sin(p, x, 2)
assert r == -x**QQ(9,5)/2 - x**QQ(26,15)/2 - x**QQ(22,15)/2 - \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_cos(p, x, 2)
assert r == x**QQ(28,15)/6 - x**QQ(5,3) + x**QQ(8,5)/24 - x**QQ(7,5) - \
x**QQ(4,3)/2 - x**QQ(16,15) - x**QQ(4,5)/2 + 1
r = rs_cos_sin(p, x, 2)
assert r[0] == x**QQ(28,15)/6 - x**QQ(5,3) + x**QQ(8,5)/24 - x**QQ(7,5) - \
x**QQ(4,3)/2 - x**QQ(16,15) - x**QQ(4,5)/2 + 1
assert r[1] == -x**QQ(9,5)/2 - x**QQ(26,15)/2 - x**QQ(22,15)/2 - \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_atanh(p, x, 2)
assert r == x**QQ(9,5) + x**QQ(26,15) + x**QQ(22,15) + x**QQ(6,5)/3 + x + \
x**QQ(2,3) + x**QQ(2,5)
r = rs_sinh(p, x, 2)
assert r == x**QQ(9,5)/2 + x**QQ(26,15)/2 + x**QQ(22,15)/2 + \
x**QQ(6,5)/6 + x + x**QQ(2,3) + x**QQ(2,5)
r = rs_cosh(p, x, 2)
assert r == x**QQ(28,15)/6 + x**QQ(5,3) + x**QQ(8,5)/24 + x**QQ(7,5) + \
x**QQ(4,3)/2 + x**QQ(16,15) + x**QQ(4,5)/2 + 1
r = rs_tanh(p, x, 2)
assert r == -x**QQ(9,5) - x**QQ(26,15) - x**QQ(22,15) - x**QQ(6,5)/3 + \
x + x**QQ(2,3) + x**QQ(2,5)
def test1():
R, x = ring('x', QQ)
r = rs_sin(x, x, 15)*x**(-5)
assert r == x**8/6227020800 - x**6/39916800 + x**4/362880 - x**2/5040 + \
QQ(1,120) - x**-2/6 + x**-4
p = rs_sin(x, x, 10)
r = rs_nth_root(p, 2, x, 10)
assert r == -67*x**QQ(17,2)/29030400 - x**QQ(13,2)/24192 + \
x**QQ(9,2)/1440 - x**QQ(5,2)/12 + x**QQ(1,2)
p = rs_sin(x, x, 10)
r = rs_nth_root(p, 7, x, 10)
r = rs_pow(r, 5, x, 10)
assert r == -97*x**QQ(61,7)/124467840 - x**QQ(47,7)/16464 + \
11*x**QQ(33,7)/3528 - 5*x**QQ(19,7)/42 + x**QQ(5,7)
r = rs_exp(x**QQ(1,2), x, 10)
assert r == x**QQ(19,2)/121645100408832000 + x**9/6402373705728000 + \
x**QQ(17,2)/355687428096000 + x**8/20922789888000 + \
x**QQ(15,2)/1307674368000 + x**7/87178291200 + \
x**QQ(13,2)/6227020800 + x**6/479001600 + x**QQ(11,2)/39916800 + \
x**5/3628800 + x**QQ(9,2)/362880 + x**4/40320 + x**QQ(7,2)/5040 + \
x**3/720 + x**QQ(5,2)/120 + x**2/24 + x**QQ(3,2)/6 + x/2 + \
x**QQ(1,2) + 1
def test_puiseux2():
R, y = ring('y', QQ)
S, x = ring('x', R)
p = x + x**QQ(1,5)*y
r = rs_atan(p, x, 3)
assert r == (y**13/13 + y**8 + 2*y**3)*x**QQ(13,5) - (y**11/11 + y**6 +
y)*x**QQ(11,5) + (y**9/9 + y**4)*x**QQ(9,5) - (y**7/7 +
y**2)*x**QQ(7,5) + (y**5/5 + 1)*x - y**3*x**QQ(3,5)/3 + y*x**QQ(1,5)
| bsd-3-clause |
cgstudiomap/cgstudiomap | main/eggs/Python_Chart-1.39-py2.7.egg/pychart/afm/Times_BoldItalic.py | 12 | 1491 | # AFM font Times-BoldItalic (path: /usr/share/fonts/afms/adobe/ptmbi8a.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
import dir
dir.afm["Times-BoldItalic"] = (500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 250, 389, 555, 500, 500, 833, 778, 333, 333, 333, 500, 570, 250, 333, 250, 278, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 333, 333, 570, 570, 570, 500, 832, 667, 667, 667, 722, 667, 667, 722, 778, 389, 500, 667, 611, 889, 722, 722, 611, 722, 667, 556, 611, 722, 667, 889, 667, 611, 611, 333, 278, 333, 570, 500, 333, 500, 500, 444, 500, 444, 333, 500, 556, 278, 278, 500, 278, 778, 556, 500, 500, 500, 389, 389, 278, 556, 444, 667, 500, 444, 389, 348, 220, 348, 570, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 389, 500, 500, 167, 500, 500, 500, 500, 278, 500, 500, 333, 333, 556, 556, 500, 500, 500, 500, 250, 500, 500, 350, 333, 500, 500, 500, 1000, 1000, 500, 500, 500, 333, 333, 333, 333, 333, 333, 333, 333, 500, 333, 333, 500, 333, 333, 333, 1000, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 944, 500, 266, 500, 500, 500, 500, 611, 722, 944, 300, 500, 500, 500, 500, 500, 722, 500, 500, 500, 278, 500, 500, 278, 500, 722, 500, )
| agpl-3.0 |
oihane/odoo | openerp/netsvc.py | 220 | 9506 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2014 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import logging.handlers
import os
import platform
import pprint
import release
import sys
import threading
import psycopg2
import openerp
import sql_db
import tools
_logger = logging.getLogger(__name__)
def log(logger, level, prefix, msg, depth=None):
indent=''
indent_after=' '*len(prefix)
for line in (prefix + pprint.pformat(msg, depth=depth)).split('\n'):
logger.log(level, indent+line)
indent=indent_after
def LocalService(name):
"""
The openerp.netsvc.LocalService() function is deprecated. It still works
in two cases: workflows and reports. For workflows, instead of using
LocalService('workflow'), openerp.workflow should be used (better yet,
methods on openerp.osv.orm.Model should be used). For reports,
openerp.report.render_report() should be used (methods on the Model should
be provided too in the future).
"""
assert openerp.conf.deprecation.allow_local_service
_logger.warning("LocalService() is deprecated since march 2013 (it was called with '%s')." % name)
if name == 'workflow':
return openerp.workflow
if name.startswith('report.'):
report = openerp.report.interface.report_int._reports.get(name)
if report:
return report
else:
dbname = getattr(threading.currentThread(), 'dbname', None)
if dbname:
registry = openerp.modules.registry.RegistryManager.get(dbname)
with registry.cursor() as cr:
return registry['ir.actions.report.xml']._lookup_report(cr, name[len('report.'):])
path_prefix = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))
class PostgreSQLHandler(logging.Handler):
""" PostgreSQL Loggin Handler will store logs in the database, by default
the current database, can be set using --log-db=DBNAME
"""
def emit(self, record):
ct = threading.current_thread()
ct_db = getattr(ct, 'dbname', None)
dbname = tools.config['log_db'] if tools.config['log_db'] and tools.config['log_db'] != '%d' else ct_db
if not dbname:
return
with tools.ignore(Exception), tools.mute_logger('openerp.sql_db'), sql_db.db_connect(dbname, allow_uri=True).cursor() as cr:
cr.autocommit(True)
msg = tools.ustr(record.msg)
if record.args:
msg = msg % record.args
traceback = getattr(record, 'exc_text', '')
if traceback:
msg = "%s\n%s" % (msg, traceback)
# we do not use record.levelname because it may have been changed by ColoredFormatter.
levelname = logging.getLevelName(record.levelno)
val = ('server', ct_db, record.name, levelname, msg, record.pathname[len(path_prefix)+1:], record.lineno, record.funcName)
cr.execute("""
INSERT INTO ir_logging(create_date, type, dbname, name, level, message, path, line, func)
VALUES (NOW() at time zone 'UTC', %s, %s, %s, %s, %s, %s, %s, %s)
""", val)
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, _NOTHING, DEFAULT = range(10)
#The background is set with 40 plus the number of the color, and the foreground with 30
#These are the sequences need to get colored ouput
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
COLOR_PATTERN = "%s%s%%s%s" % (COLOR_SEQ, COLOR_SEQ, RESET_SEQ)
LEVEL_COLOR_MAPPING = {
logging.DEBUG: (BLUE, DEFAULT),
logging.INFO: (GREEN, DEFAULT),
logging.WARNING: (YELLOW, DEFAULT),
logging.ERROR: (RED, DEFAULT),
logging.CRITICAL: (WHITE, RED),
}
class DBFormatter(logging.Formatter):
def format(self, record):
record.pid = os.getpid()
record.dbname = getattr(threading.currentThread(), 'dbname', '?')
return logging.Formatter.format(self, record)
class ColoredFormatter(DBFormatter):
def format(self, record):
fg_color, bg_color = LEVEL_COLOR_MAPPING.get(record.levelno, (GREEN, DEFAULT))
record.levelname = COLOR_PATTERN % (30 + fg_color, 40 + bg_color, record.levelname)
return DBFormatter.format(self, record)
_logger_init = False
def init_logger():
global _logger_init
if _logger_init:
return
_logger_init = True
logging.addLevelName(25, "INFO")
from tools.translate import resetlocale
resetlocale()
# create a format for log messages and dates
format = '%(asctime)s %(pid)s %(levelname)s %(dbname)s %(name)s: %(message)s'
if tools.config['syslog']:
# SysLog Handler
if os.name == 'nt':
handler = logging.handlers.NTEventLogHandler("%s %s" % (release.description, release.version))
elif platform.system() == 'Darwin':
handler = logging.handlers.SysLogHandler('/var/run/log')
else:
handler = logging.handlers.SysLogHandler('/dev/log')
format = '%s %s' % (release.description, release.version) \
+ ':%(dbname)s:%(levelname)s:%(name)s:%(message)s'
elif tools.config['logfile']:
# LogFile Handler
logf = tools.config['logfile']
try:
# We check we have the right location for the log files
dirname = os.path.dirname(logf)
if dirname and not os.path.isdir(dirname):
os.makedirs(dirname)
if tools.config['logrotate'] is not False:
handler = logging.handlers.TimedRotatingFileHandler(filename=logf, when='D', interval=1, backupCount=30)
elif os.name == 'posix':
handler = logging.handlers.WatchedFileHandler(logf)
else:
handler = logging.FileHandler(logf)
except Exception:
sys.stderr.write("ERROR: couldn't create the logfile directory. Logging to the standard output.\n")
handler = logging.StreamHandler(sys.stdout)
else:
# Normal Handler on standard output
handler = logging.StreamHandler(sys.stdout)
# Check that handler.stream has a fileno() method: when running OpenERP
# behind Apache with mod_wsgi, handler.stream will have type mod_wsgi.Log,
# which has no fileno() method. (mod_wsgi.Log is what is being bound to
# sys.stderr when the logging.StreamHandler is being constructed above.)
def is_a_tty(stream):
return hasattr(stream, 'fileno') and os.isatty(stream.fileno())
if os.name == 'posix' and isinstance(handler, logging.StreamHandler) and is_a_tty(handler.stream):
formatter = ColoredFormatter(format)
else:
formatter = DBFormatter(format)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
if tools.config['log_db']:
db_levels = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL,
}
postgresqlHandler = PostgreSQLHandler()
postgresqlHandler.setLevel(int(db_levels.get(tools.config['log_db_level'], tools.config['log_db_level'])))
logging.getLogger().addHandler(postgresqlHandler)
# Configure loggers levels
pseudo_config = PSEUDOCONFIG_MAPPER.get(tools.config['log_level'], [])
logconfig = tools.config['log_handler']
logging_configurations = DEFAULT_LOG_CONFIGURATION + pseudo_config + logconfig
for logconfig_item in logging_configurations:
loggername, level = logconfig_item.split(':')
level = getattr(logging, level, logging.INFO)
logger = logging.getLogger(loggername)
logger.setLevel(level)
for logconfig_item in logging_configurations:
_logger.debug('logger level set: "%s"', logconfig_item)
DEFAULT_LOG_CONFIGURATION = [
'openerp.workflow.workitem:WARNING',
'openerp.http.rpc.request:INFO',
'openerp.http.rpc.response:INFO',
'openerp.addons.web.http:INFO',
'openerp.sql_db:INFO',
':INFO',
]
PSEUDOCONFIG_MAPPER = {
'debug_rpc_answer': ['openerp:DEBUG','openerp.http.rpc.request:DEBUG', 'openerp.http.rpc.response:DEBUG'],
'debug_rpc': ['openerp:DEBUG','openerp.http.rpc.request:DEBUG'],
'debug': ['openerp:DEBUG'],
'debug_sql': ['openerp.sql_db:DEBUG'],
'info': [],
'warn': ['openerp:WARNING', 'werkzeug:WARNING'],
'error': ['openerp:ERROR', 'werkzeug:ERROR'],
'critical': ['openerp:CRITICAL', 'werkzeug:CRITICAL'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
LingxiaoJIA/gem5 | ext/ply/test/yacc_badprec3.py | 174 | 1530 | # -----------------------------------------------------------------------------
# yacc_badprec3.py
#
# Bad precedence
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE','MINUS'),
('right','UMINUS'),
)
# dictionary of names
names = { }
def p_statement_assign(t):
'statement : NAME EQUALS expression'
names[t[1]] = t[3]
def p_statement_expr(t):
'statement : expression'
print(t[1])
def p_expression_binop(t):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[3] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_expression_name(t):
'expression : NAME'
try:
t[0] = names[t[1]]
except LookupError:
print("Undefined name '%s'" % t[1])
t[0] = 0
def p_error(t):
print("Syntax error at '%s'" % t.value)
yacc.yacc()
| bsd-3-clause |
hgl888/chromium-crosswalk-efl | tools/perf/page_sets/page_cycler/moz2.py | 34 | 3545 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class Moz2Page(page_module.Page):
def __init__(self, url, page_set):
super(Moz2Page, self).__init__(url=url, page_set=page_set)
class Moz2PageSet(page_set_module.PageSet):
""" Description: Moz2 page_cycler benchmark """
def __init__(self):
super(Moz2PageSet, self).__init__(
# pylint: disable=C0301
serving_dirs=set(['../../../../data/page_cycler/moz2']),
bucket=page_set_module.PARTNER_BUCKET)
urls_list = [
'file://../../../../data/page_cycler/moz2/bugzilla.mozilla.org/',
'file://../../../../data/page_cycler/moz2/espn.go.com/',
'file://../../../../data/page_cycler/moz2/home.netscape.com/',
'file://../../../../data/page_cycler/moz2/hotwired.lycos.com/',
'file://../../../../data/page_cycler/moz2/lxr.mozilla.org/',
'file://../../../../data/page_cycler/moz2/my.netscape.com/',
'file://../../../../data/page_cycler/moz2/news.cnet.com/',
'file://../../../../data/page_cycler/moz2/slashdot.org/',
'file://../../../../data/page_cycler/moz2/vanilla-page/',
'file://../../../../data/page_cycler/moz2/web.icq.com/',
'file://../../../../data/page_cycler/moz2/www.altavista.com/',
'file://../../../../data/page_cycler/moz2/www.amazon.com/',
'file://../../../../data/page_cycler/moz2/www.aol.com/',
'file://../../../../data/page_cycler/moz2/www.apple.com/',
'file://../../../../data/page_cycler/moz2/www.cnn.com/',
'file://../../../../data/page_cycler/moz2/www.compuserve.com/',
'file://../../../../data/page_cycler/moz2/www.digitalcity.com/',
'file://../../../../data/page_cycler/moz2/www.ebay.com/',
'file://../../../../data/page_cycler/moz2/www.excite.com/',
'file://../../../../data/page_cycler/moz2/www.expedia.com/',
'file://../../../../data/page_cycler/moz2/www.google.com/',
'file://../../../../data/page_cycler/moz2/www.iplanet.com/',
'file://../../../../data/page_cycler/moz2/www.mapquest.com/',
'file://../../../../data/page_cycler/moz2/www.microsoft.com/',
'file://../../../../data/page_cycler/moz2/www.moviefone.com/',
'file://../../../../data/page_cycler/moz2/www.msn.com/',
'file://../../../../data/page_cycler/moz2/www.msnbc.com/',
'file://../../../../data/page_cycler/moz2/www.nytimes.com/',
'file://../../../../data/page_cycler/moz2/www.nytimes.com_Table/',
'file://../../../../data/page_cycler/moz2/www.quicken.com/',
'file://../../../../data/page_cycler/moz2/www.spinner.com/',
'file://../../../../data/page_cycler/moz2/www.sun.com/',
'file://../../../../data/page_cycler/moz2/www.time.com/',
'file://../../../../data/page_cycler/moz2/www.tomshardware.com/',
'file://../../../../data/page_cycler/moz2/www.travelocity.com/',
'file://../../../../data/page_cycler/moz2/www.voodooextreme.com/',
'file://../../../../data/page_cycler/moz2/www.w3.org_DOML2Core/',
'file://../../../../data/page_cycler/moz2/www.wired.com/',
'file://../../../../data/page_cycler/moz2/www.yahoo.com/',
'file://../../../../data/page_cycler/moz2/www.zdnet.com/',
'file://../../../../data/page_cycler/moz2/www.zdnet.com_Gamespot.com/'
]
for url in urls_list:
self.AddPage(Moz2Page(url, self))
| bsd-3-clause |
rahushen/ansible | lib/ansible/modules/notification/sendgrid.py | 20 | 8766 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Matt Makai <matthew.makai@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
version_added: "2.0"
module: sendgrid
short_description: Sends an email with the SendGrid API
description:
- "Sends an email with a SendGrid account through their API, not through
the SMTP service."
notes:
- "This module is non-idempotent because it sends an email through the
external API. It is idempotent only in the case that the module fails."
- "Like the other notification modules, this one requires an external
dependency to work. In this case, you'll need an active SendGrid
account."
- "In order to use api_key, cc, bcc, attachments, from_name, html_body, headers
you must pip install sendgrid"
- "since 2.2 username and password are not required if you supply an api_key"
requirements:
- sendgrid python library
options:
username:
description:
- username for logging into the SendGrid account.
- Since 2.2 it is only required if api_key is not supplied.
required: false
default: null
password:
description:
- password that corresponds to the username
- Since 2.2 it is only required if api_key is not supplied.
required: false
default: null
from_address:
description:
- the address in the "from" field for the email
required: true
to_addresses:
description:
- a list with one or more recipient email addresses
required: true
subject:
description:
- the desired subject for the email
required: true
api_key:
description:
- sendgrid API key to use instead of username/password
version_added: 2.2
required: false
default: null
cc:
description:
- a list of email addresses to cc
version_added: 2.2
required: false
default: null
bcc:
description:
- a list of email addresses to bcc
version_added: 2.2
required: false
default: null
attachments:
description:
- a list of relative or explicit paths of files you want to attach (7MB limit as per SendGrid docs)
version_added: 2.2
required: false
default: null
from_name:
description:
- the name you want to appear in the from field, i.e 'John Doe'
version_added: 2.2
required: false
default: null
html_body:
description:
- whether the body is html content that should be rendered
version_added: 2.2
required: false
default: false
headers:
description:
- a dict to pass on as headers
version_added: 2.2
required: false
default: null
author: "Matt Makai (@makaimc)"
'''
EXAMPLES = '''
# send an email to a single recipient that the deployment was successful
- sendgrid:
username: "{{ sendgrid_username }}"
password: "{{ sendgrid_password }}"
from_address: "ansible@mycompany.com"
to_addresses:
- "ops@mycompany.com"
subject: "Deployment success."
body: "The most recent Ansible deployment was successful."
delegate_to: localhost
# send an email to more than one recipient that the build failed
- sendgrid:
username: "{{ sendgrid_username }}"
password: "{{ sendgrid_password }}"
from_address: "build@mycompany.com"
to_addresses:
- "ops@mycompany.com"
- "devteam@mycompany.com"
subject: "Build failure!."
body: "Unable to pull source repository from Git server."
delegate_to: localhost
'''
# =======================================
# sendgrid module support methods
#
import os
try:
import sendgrid
HAS_SENDGRID = True
except ImportError:
HAS_SENDGRID = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils._text import to_bytes
from ansible.module_utils.urls import fetch_url
def post_sendgrid_api(module, username, password, from_address, to_addresses,
subject, body, api_key=None, cc=None, bcc=None, attachments=None,
html_body=False, from_name=None, headers=None):
if not HAS_SENDGRID:
SENDGRID_URI = "https://api.sendgrid.com/api/mail.send.json"
AGENT = "Ansible"
data = {'api_user': username, 'api_key': password,
'from': from_address, 'subject': subject, 'text': body}
encoded_data = urlencode(data)
to_addresses_api = ''
for recipient in to_addresses:
recipient = to_bytes(recipient, errors='surrogate_or_strict')
to_addresses_api += '&to[]=%s' % recipient
encoded_data += to_addresses_api
headers = {'User-Agent': AGENT,
'Content-type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'}
return fetch_url(module, SENDGRID_URI, data=encoded_data, headers=headers, method='POST')
else:
if api_key:
sg = sendgrid.SendGridClient(api_key)
else:
sg = sendgrid.SendGridClient(username, password)
message = sendgrid.Mail()
message.set_subject(subject)
for recip in to_addresses:
message.add_to(recip)
if cc:
for recip in cc:
message.add_cc(recip)
if bcc:
for recip in bcc:
message.add_bcc(recip)
if headers:
message.set_headers(headers)
if attachments:
for f in attachments:
name = os.path.basename(f)
message.add_attachment(name, f)
if from_name:
message.set_from('%s <%s.' % (from_name, from_address))
else:
message.set_from(from_address)
if html_body:
message.set_html(body)
else:
message.set_text(body)
return sg.send(message)
# =======================================
# Main
#
def main():
module = AnsibleModule(
argument_spec=dict(
username=dict(required=False),
password=dict(required=False, no_log=True),
api_key=dict(required=False, no_log=True),
bcc=dict(required=False, type='list'),
cc=dict(required=False, type='list'),
headers=dict(required=False, type='dict'),
from_address=dict(required=True),
from_name=dict(required=False),
to_addresses=dict(required=True, type='list'),
subject=dict(required=True),
body=dict(required=True),
html_body=dict(required=False, default=False, type='bool'),
attachments=dict(required=False, type='list')
),
supports_check_mode=True,
mutually_exclusive=[
['api_key', 'password'],
['api_key', 'username']
],
required_together=[['username', 'password']],
)
username = module.params['username']
password = module.params['password']
api_key = module.params['api_key']
bcc = module.params['bcc']
cc = module.params['cc']
headers = module.params['headers']
from_name = module.params['from_name']
from_address = module.params['from_address']
to_addresses = module.params['to_addresses']
subject = module.params['subject']
body = module.params['body']
html_body = module.params['html_body']
attachments = module.params['attachments']
sendgrid_lib_args = [api_key, bcc, cc, headers, from_name, html_body, attachments]
if any(lib_arg is not None for lib_arg in sendgrid_lib_args) and not HAS_SENDGRID:
module.fail_json(msg='You must install the sendgrid python library if you want to use any of the following arguments: '
'api_key, bcc, cc, headers, from_name, html_body, attachments')
response, info = post_sendgrid_api(module, username, password,
from_address, to_addresses, subject, body, attachments=attachments,
bcc=bcc, cc=cc, headers=headers, html_body=html_body, api_key=api_key)
if not HAS_SENDGRID:
if info['status'] != 200:
module.fail_json(msg="unable to send email through SendGrid API: %s" % info['msg'])
else:
if response != 200:
module.fail_json(msg="unable to send email through SendGrid API: %s" % info['message'])
module.exit_json(msg=subject, changed=False)
if __name__ == '__main__':
main()
| gpl-3.0 |
VagrantApe/flaskMicroblog | venv/lib/python2.7/site-packages/whoosh/filedb/structfile.py | 96 | 12453 | # Copyright 2009 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from array import array
from copy import copy
from struct import calcsize
from whoosh.compat import BytesIO, bytes_type
from whoosh.compat import dump as dump_pickle
from whoosh.compat import load as load_pickle
from whoosh.compat import array_frombytes, array_tobytes
from whoosh.system import _INT_SIZE, _SHORT_SIZE, _FLOAT_SIZE, _LONG_SIZE
from whoosh.system import IS_LITTLE
from whoosh.system import pack_byte, unpack_byte, pack_sbyte, unpack_sbyte
from whoosh.system import pack_ushort, unpack_ushort
from whoosh.system import pack_ushort_le, unpack_ushort_le
from whoosh.system import pack_int, unpack_int, pack_uint, unpack_uint
from whoosh.system import pack_uint_le, unpack_uint_le
from whoosh.system import pack_long, unpack_long, pack_ulong, unpack_ulong
from whoosh.system import pack_float, unpack_float
from whoosh.util.varints import varint, read_varint
from whoosh.util.varints import signed_varint, decode_signed_varint
_SIZEMAP = dict((typecode, calcsize(typecode)) for typecode in "bBiIhHqQf")
_ORDERMAP = {"little": "<", "big": ">"}
_types = (("sbyte", "b"), ("ushort", "H"), ("int", "i"),
("long", "q"), ("float", "f"))
# Main function
class StructFile(object):
"""Returns a "structured file" object that wraps the given file object and
provides numerous additional methods for writing structured data, such as
"write_varint" and "write_long".
"""
def __init__(self, fileobj, name=None, onclose=None):
self.file = fileobj
self._name = name
self.onclose = onclose
self.is_closed = False
self.is_real = hasattr(fileobj, "fileno")
if self.is_real:
self.fileno = fileobj.fileno
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self._name)
def __str__(self):
return self._name
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def __iter__(self):
return iter(self.file)
def raw_file(self):
return self.file
def read(self, *args, **kwargs):
return self.file.read(*args, **kwargs)
def readline(self, *args, **kwargs):
return self.file.readline(*args, **kwargs)
def write(self, *args, **kwargs):
return self.file.write(*args, **kwargs)
def tell(self, *args, **kwargs):
return self.file.tell(*args, **kwargs)
def seek(self, *args, **kwargs):
return self.file.seek(*args, **kwargs)
def truncate(self, *args, **kwargs):
return self.file.truncate(*args, **kwargs)
def flush(self):
"""Flushes the buffer of the wrapped file. This is a no-op if the
wrapped file does not have a flush method.
"""
if hasattr(self.file, "flush"):
self.file.flush()
def close(self):
"""Closes the wrapped file.
"""
if self.is_closed:
raise Exception("This file is already closed")
if self.onclose:
self.onclose(self)
if hasattr(self.file, "close"):
self.file.close()
self.is_closed = True
def subset(self, offset, length, name=None):
from whoosh.filedb.compound import SubFile
name = name or self._name
return StructFile(SubFile(self.file, offset, length), name=name)
def write_string(self, s):
"""Writes a string to the wrapped file. This method writes the length
of the string first, so you can read the string back without having to
know how long it was.
"""
self.write_varint(len(s))
self.write(s)
def write_string2(self, s):
self.write(pack_ushort(len(s)) + s)
def write_string4(self, s):
self.write(pack_int(len(s)) + s)
def read_string(self):
"""Reads a string from the wrapped file.
"""
return self.read(self.read_varint())
def read_string2(self):
l = self.read_ushort()
return self.read(l)
def read_string4(self):
l = self.read_int()
return self.read(l)
def get_string2(self, pos):
l = self.get_ushort(pos)
base = pos + _SHORT_SIZE
return self.get(base, l), base + l
def get_string4(self, pos):
l = self.get_int(pos)
base = pos + _INT_SIZE
return self.get(base, l), base + l
def skip_string(self):
l = self.read_varint()
self.seek(l, 1)
def write_varint(self, i):
"""Writes a variable-length unsigned integer to the wrapped file.
"""
self.write(varint(i))
def write_svarint(self, i):
"""Writes a variable-length signed integer to the wrapped file.
"""
self.write(signed_varint(i))
def read_varint(self):
"""Reads a variable-length encoded unsigned integer from the wrapped
file.
"""
return read_varint(self.read)
def read_svarint(self):
"""Reads a variable-length encoded signed integer from the wrapped
file.
"""
return decode_signed_varint(read_varint(self.read))
def write_tagint(self, i):
"""Writes a sometimes-compressed unsigned integer to the wrapped file.
This is similar to the varint methods but uses a less compressed but
faster format.
"""
# Store numbers 0-253 in one byte. Byte 254 means "an unsigned 16-bit
# int follows." Byte 255 means "An unsigned 32-bit int follows."
if i <= 253:
self.write(chr(i))
elif i <= 65535:
self.write("\xFE" + pack_ushort(i))
else:
self.write("\xFF" + pack_uint(i))
def read_tagint(self):
"""Reads a sometimes-compressed unsigned integer from the wrapped file.
This is similar to the varint methods but uses a less compressed but
faster format.
"""
tb = ord(self.read(1))
if tb == 254:
return self.read_ushort()
elif tb == 255:
return self.read_uint()
else:
return tb
def write_byte(self, n):
"""Writes a single byte to the wrapped file, shortcut for
``file.write(chr(n))``.
"""
self.write(pack_byte(n))
def read_byte(self):
return ord(self.read(1))
def write_pickle(self, obj, protocol=-1):
"""Writes a pickled representation of obj to the wrapped file.
"""
dump_pickle(obj, self.file, protocol)
def read_pickle(self):
"""Reads a pickled object from the wrapped file.
"""
return load_pickle(self.file)
def write_sbyte(self, n):
self.write(pack_sbyte(n))
def write_int(self, n):
self.write(pack_int(n))
def write_uint(self, n):
self.write(pack_uint(n))
def write_uint_le(self, n):
self.write(pack_uint_le(n))
def write_ushort(self, n):
self.write(pack_ushort(n))
def write_ushort_le(self, n):
self.write(pack_ushort_le(n))
def write_long(self, n):
self.write(pack_long(n))
def write_ulong(self, n):
self.write(pack_ulong(n))
def write_float(self, n):
self.write(pack_float(n))
def write_array(self, arry):
if IS_LITTLE:
arry = copy(arry)
arry.byteswap()
if self.is_real:
arry.tofile(self.file)
else:
self.write(array_tobytes(arry))
def read_sbyte(self):
return unpack_sbyte(self.read(1))[0]
def read_int(self):
return unpack_int(self.read(_INT_SIZE))[0]
def read_uint(self):
return unpack_uint(self.read(_INT_SIZE))[0]
def read_uint_le(self):
return unpack_uint_le(self.read(_INT_SIZE))[0]
def read_ushort(self):
return unpack_ushort(self.read(_SHORT_SIZE))[0]
def read_ushort_le(self):
return unpack_ushort_le(self.read(_SHORT_SIZE))[0]
def read_long(self):
return unpack_long(self.read(_LONG_SIZE))[0]
def read_ulong(self):
return unpack_ulong(self.read(_LONG_SIZE))[0]
def read_float(self):
return unpack_float(self.read(_FLOAT_SIZE))[0]
def read_array(self, typecode, length):
a = array(typecode)
if self.is_real:
a.fromfile(self.file, length)
else:
array_frombytes(a, self.read(length * _SIZEMAP[typecode]))
if IS_LITTLE:
a.byteswap()
return a
def get(self, position, length):
self.seek(position)
return self.read(length)
def get_byte(self, position):
return unpack_byte(self.get(position, 1))[0]
def get_sbyte(self, position):
return unpack_sbyte(self.get(position, 1))[0]
def get_int(self, position):
return unpack_int(self.get(position, _INT_SIZE))[0]
def get_uint(self, position):
return unpack_uint(self.get(position, _INT_SIZE))[0]
def get_ushort(self, position):
return unpack_ushort(self.get(position, _SHORT_SIZE))[0]
def get_long(self, position):
return unpack_long(self.get(position, _LONG_SIZE))[0]
def get_ulong(self, position):
return unpack_ulong(self.get(position, _LONG_SIZE))[0]
def get_float(self, position):
return unpack_float(self.get(position, _FLOAT_SIZE))[0]
def get_array(self, position, typecode, length):
self.seek(position)
return self.read_array(typecode, length)
class BufferFile(StructFile):
def __init__(self, buf, name=None, onclose=None):
self._buf = buf
self._name = name
self.file = BytesIO(buf)
self.onclose = onclose
self.is_real = False
self.is_closed = False
def subset(self, position, length, name=None):
name = name or self._name
return BufferFile(self.get(position, length), name=name)
def get(self, position, length):
return bytes_type(self._buf[position:position + length])
def get_array(self, position, typecode, length):
a = array(typecode)
array_frombytes(a, self.get(position, length * _SIZEMAP[typecode]))
if IS_LITTLE:
a.byteswap()
return a
class ChecksumFile(StructFile):
def __init__(self, *args, **kwargs):
StructFile.__init__(self, *args, **kwargs)
self._check = 0
self._crc32 = __import__("zlib").crc32
def __iter__(self):
for line in self.file:
self._check = self._crc32(line, self._check)
yield line
def seek(self, *args):
raise Exception("Cannot seek on a ChecksumFile")
def read(self, *args, **kwargs):
b = self.file.read(*args, **kwargs)
self._check = self._crc32(b, self._check)
return b
def write(self, b):
self._check = self._crc32(b, self._check)
self.file.write(b)
def checksum(self):
return self._check & 0xffffffff
| bsd-3-clause |
Parallel-in-Time/pySDC | pySDC/implementations/problem_classes/AllenCahn_MPIFFT.py | 1 | 10392 | import numpy as np
from mpi4py import MPI
from mpi4py_fft import PFFT
from pySDC.core.Errors import ParameterError, ProblemError
from pySDC.core.Problem import ptype
from pySDC.implementations.datatype_classes.mesh import mesh, imex_mesh
from mpi4py_fft import newDistArray
class allencahn_imex(ptype):
"""
Example implementing Allen-Cahn equation in 2-3D using mpi4py-fft for solving linear parts, IMEX time-stepping
mpi4py-fft: https://mpi4py-fft.readthedocs.io/en/latest/
Attributes:
fft: fft object
X: grid coordinates in real space
K2: Laplace operator in spectral space
dx: mesh width in x direction
dy: mesh width in y direction
"""
def __init__(self, problem_params, dtype_u=mesh, dtype_f=imex_mesh):
"""
Initialization routine
Args:
problem_params (dict): custom parameters for the example
dtype_u: fft data type (will be passed to parent class)
dtype_f: fft data type wuth implicit and explicit parts (will be passed to parent class)
"""
if 'L' not in problem_params:
problem_params['L'] = 1.0
if 'init_type' not in problem_params:
problem_params['init_type'] = 'circle'
if 'comm' not in problem_params:
problem_params['comm'] = None
if 'dw' not in problem_params:
problem_params['dw'] = 0.0
# these parameters will be used later, so assert their existence
essential_keys = ['nvars', 'eps', 'L', 'radius', 'dw', 'spectral']
for key in essential_keys:
if key not in problem_params:
msg = 'need %s to instantiate problem, only got %s' % (key, str(problem_params.keys()))
raise ParameterError(msg)
if not (isinstance(problem_params['nvars'], tuple) and len(problem_params['nvars']) > 1):
raise ProblemError('Need at least two dimensions')
# Creating FFT structure
ndim = len(problem_params['nvars'])
axes = tuple(range(ndim))
self.fft = PFFT(problem_params['comm'], list(problem_params['nvars']), axes=axes, dtype=np.float, collapse=True)
# get test data to figure out type and dimensions
tmp_u = newDistArray(self.fft, problem_params['spectral'])
# invoke super init, passing the communicator and the local dimensions as init
super(allencahn_imex, self).__init__(init=(tmp_u.shape, problem_params['comm'], tmp_u.dtype),
dtype_u=dtype_u, dtype_f=dtype_f, params=problem_params)
L = np.array([self.params.L] * ndim, dtype=float)
# get local mesh
X = np.ogrid[self.fft.local_slice(False)]
N = self.fft.global_shape()
for i in range(len(N)):
X[i] = (X[i] * L[i] / N[i])
self.X = [np.broadcast_to(x, self.fft.shape(False)) for x in X]
# get local wavenumbers and Laplace operator
s = self.fft.local_slice()
N = self.fft.global_shape()
k = [np.fft.fftfreq(n, 1. / n).astype(int) for n in N[:-1]]
k.append(np.fft.rfftfreq(N[-1], 1. / N[-1]).astype(int))
K = [ki[si] for ki, si in zip(k, s)]
Ks = np.meshgrid(*K, indexing='ij', sparse=True)
Lp = 2 * np.pi / L
for i in range(ndim):
Ks[i] = (Ks[i] * Lp[i]).astype(float)
K = [np.broadcast_to(k, self.fft.shape(True)) for k in Ks]
K = np.array(K).astype(float)
self.K2 = np.sum(K * K, 0, dtype=float)
# Need this for diagnostics
self.dx = self.params.L / problem_params['nvars'][0]
self.dy = self.params.L / problem_params['nvars'][1]
def eval_f(self, u, t):
"""
Routine to evaluate the RHS
Args:
u (dtype_u): current values
t (float): current time
Returns:
dtype_f: the RHS
"""
f = self.dtype_f(self.init)
if self.params.spectral:
f.impl = -self.K2 * u
if self.params.eps > 0:
tmp = self.fft.backward(u)
tmpf = - 2.0 / self.params.eps ** 2 * tmp * (1.0 - tmp) * (1.0 - 2.0 * tmp) - \
6.0 * self.params.dw * tmp * (1.0 - tmp)
f.expl[:] = self.fft.forward(tmpf)
else:
u_hat = self.fft.forward(u)
lap_u_hat = -self.K2 * u_hat
f.impl[:] = self.fft.backward(lap_u_hat, f.impl)
if self.params.eps > 0:
f.expl = - 2.0 / self.params.eps ** 2 * u * (1.0 - u) * (1.0 - 2.0 * u) - \
6.0 * self.params.dw * u * (1.0 - u)
return f
def solve_system(self, rhs, factor, u0, t):
"""
Simple FFT solver for the diffusion part
Args:
rhs (dtype_f): right-hand side for the linear system
factor (float) : abbrev. for the node-to-node stepsize (or any other factor required)
u0 (dtype_u): initial guess for the iterative solver (not used here so far)
t (float): current time (e.g. for time-dependent BCs)
Returns:
dtype_u: solution as mesh
"""
if self.params.spectral:
me = rhs / (1.0 + factor * self.K2)
else:
me = self.dtype_u(self.init)
rhs_hat = self.fft.forward(rhs)
rhs_hat /= (1.0 + factor * self.K2)
me[:] = self.fft.backward(rhs_hat)
return me
def u_exact(self, t):
"""
Routine to compute the exact solution at time t
Args:
t (float): current time
Returns:
dtype_u: exact solution
"""
assert t == 0, 'ERROR: u_exact only valid for t=0'
me = self.dtype_u(self.init, val=0.0)
if self.params.init_type == 'circle':
r2 = (self.X[0] - 0.5) ** 2 + (self.X[1] - 0.5) ** 2
if self.params.spectral:
tmp = 0.5 * (1.0 + np.tanh((self.params.radius - np.sqrt(r2)) / (np.sqrt(2) * self.params.eps)))
me[:] = self.fft.forward(tmp)
else:
me[:] = 0.5 * (1.0 + np.tanh((self.params.radius - np.sqrt(r2)) / (np.sqrt(2) * self.params.eps)))
elif self.params.init_type == 'circle_rand':
ndim = len(me.shape)
L = int(self.params.L)
# get random radii for circles/spheres
np.random.seed(1)
lbound = 3.0 * self.params.eps
ubound = 0.5 - self.params.eps
rand_radii = (ubound - lbound) * np.random.random_sample(size=tuple([L] * ndim)) + lbound
# distribute circles/spheres
tmp = newDistArray(self.fft, False)
if ndim == 2:
for i in range(0, L):
for j in range(0, L):
# build radius
r2 = (self.X[0] + i - L + 0.5) ** 2 + (self.X[1] + j - L + 0.5) ** 2
# add this blob, shifted by 1 to avoid issues with adding up negative contributions
tmp += np.tanh((rand_radii[i, j] - np.sqrt(r2)) / (np.sqrt(2) * self.params.eps)) + 1
# normalize to [0,1]
tmp *= 0.5
assert np.all(tmp <= 1.0)
if self.params.spectral:
me[:] = self.fft.forward(tmp)
else:
me[:] = tmp[:]
else:
raise NotImplementedError('type of initial value not implemented, got %s' % self.params.init_type)
return me
class allencahn_imex_timeforcing(allencahn_imex):
"""
Example implementing Allen-Cahn equation in 2-3D using mpi4py-fft for solving linear parts, IMEX time-stepping,
time-dependent forcing
"""
def eval_f(self, u, t):
"""
Routine to evaluate the RHS
Args:
u (dtype_u): current values
t (float): current time
Returns:
dtype_f: the RHS
"""
f = self.dtype_f(self.init)
if self.params.spectral:
f.impl = -self.K2 * u
tmp = newDistArray(self.fft, False)
tmp[:] = self.fft.backward(u, tmp)
if self.params.eps > 0:
tmpf = -2.0 / self.params.eps ** 2 * tmp * (1.0 - tmp) * (1.0 - 2.0 * tmp)
else:
tmpf = self.dtype_f(self.init, val=0.0)
# build sum over RHS without driving force
Rt_local = float(np.sum(self.fft.backward(f.impl) + tmpf))
if self.params.comm is not None:
Rt_global = self.params.comm.allreduce(sendobj=Rt_local, op=MPI.SUM)
else:
Rt_global = Rt_local
# build sum over driving force term
Ht_local = float(np.sum(6.0 * tmp * (1.0 - tmp)))
if self.params.comm is not None:
Ht_global = self.params.comm.allreduce(sendobj=Ht_local, op=MPI.SUM)
else:
Ht_global = Rt_local
# add/substract time-dependent driving force
if Ht_global != 0.0:
dw = Rt_global / Ht_global
else:
dw = 0.0
tmpf -= 6.0 * dw * tmp * (1.0 - tmp)
f.expl[:] = self.fft.forward(tmpf)
else:
u_hat = self.fft.forward(u)
lap_u_hat = -self.K2 * u_hat
f.impl[:] = self.fft.backward(lap_u_hat, f.impl)
if self.params.eps > 0:
f.expl = -2.0 / self.params.eps ** 2 * u * (1.0 - u) * (1.0 - 2.0 * u)
# build sum over RHS without driving force
Rt_local = float(np.sum(f.impl + f.expl))
if self.params.comm is not None:
Rt_global = self.params.comm.allreduce(sendobj=Rt_local, op=MPI.SUM)
else:
Rt_global = Rt_local
# build sum over driving force term
Ht_local = float(np.sum(6.0 * u * (1.0 - u)))
if self.params.comm is not None:
Ht_global = self.params.comm.allreduce(sendobj=Ht_local, op=MPI.SUM)
else:
Ht_global = Rt_local
# add/substract time-dependent driving force
if Ht_global != 0.0:
dw = Rt_global / Ht_global
else:
dw = 0.0
f.expl -= 6.0 * dw * u * (1.0 - u)
return f
| bsd-2-clause |
modulexcite/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/cp1255.py | 93 | 13029 | """ Python Character Mapping Codec cp1255 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1255.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1255',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\ufffe' # 0x81 -> UNDEFINED
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\ufffe' # 0x8A -> UNDEFINED
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x8C -> UNDEFINED
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\u02dc' # 0x98 -> SMALL TILDE
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\ufffe' # 0x9A -> UNDEFINED
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufffe' # 0x9C -> UNDEFINED
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\ufffe' # 0x9F -> UNDEFINED
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\u20aa' # 0xA4 -> NEW SHEQEL SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xd7' # 0xAA -> MULTIPLICATION SIGN
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xf7' # 0xBA -> DIVISION SIGN
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xbf' # 0xBF -> INVERTED QUESTION MARK
u'\u05b0' # 0xC0 -> HEBREW POINT SHEVA
u'\u05b1' # 0xC1 -> HEBREW POINT HATAF SEGOL
u'\u05b2' # 0xC2 -> HEBREW POINT HATAF PATAH
u'\u05b3' # 0xC3 -> HEBREW POINT HATAF QAMATS
u'\u05b4' # 0xC4 -> HEBREW POINT HIRIQ
u'\u05b5' # 0xC5 -> HEBREW POINT TSERE
u'\u05b6' # 0xC6 -> HEBREW POINT SEGOL
u'\u05b7' # 0xC7 -> HEBREW POINT PATAH
u'\u05b8' # 0xC8 -> HEBREW POINT QAMATS
u'\u05b9' # 0xC9 -> HEBREW POINT HOLAM
u'\ufffe' # 0xCA -> UNDEFINED
u'\u05bb' # 0xCB -> HEBREW POINT QUBUTS
u'\u05bc' # 0xCC -> HEBREW POINT DAGESH OR MAPIQ
u'\u05bd' # 0xCD -> HEBREW POINT METEG
u'\u05be' # 0xCE -> HEBREW PUNCTUATION MAQAF
u'\u05bf' # 0xCF -> HEBREW POINT RAFE
u'\u05c0' # 0xD0 -> HEBREW PUNCTUATION PASEQ
u'\u05c1' # 0xD1 -> HEBREW POINT SHIN DOT
u'\u05c2' # 0xD2 -> HEBREW POINT SIN DOT
u'\u05c3' # 0xD3 -> HEBREW PUNCTUATION SOF PASUQ
u'\u05f0' # 0xD4 -> HEBREW LIGATURE YIDDISH DOUBLE VAV
u'\u05f1' # 0xD5 -> HEBREW LIGATURE YIDDISH VAV YOD
u'\u05f2' # 0xD6 -> HEBREW LIGATURE YIDDISH DOUBLE YOD
u'\u05f3' # 0xD7 -> HEBREW PUNCTUATION GERESH
u'\u05f4' # 0xD8 -> HEBREW PUNCTUATION GERSHAYIM
u'\ufffe' # 0xD9 -> UNDEFINED
u'\ufffe' # 0xDA -> UNDEFINED
u'\ufffe' # 0xDB -> UNDEFINED
u'\ufffe' # 0xDC -> UNDEFINED
u'\ufffe' # 0xDD -> UNDEFINED
u'\ufffe' # 0xDE -> UNDEFINED
u'\ufffe' # 0xDF -> UNDEFINED
u'\u05d0' # 0xE0 -> HEBREW LETTER ALEF
u'\u05d1' # 0xE1 -> HEBREW LETTER BET
u'\u05d2' # 0xE2 -> HEBREW LETTER GIMEL
u'\u05d3' # 0xE3 -> HEBREW LETTER DALET
u'\u05d4' # 0xE4 -> HEBREW LETTER HE
u'\u05d5' # 0xE5 -> HEBREW LETTER VAV
u'\u05d6' # 0xE6 -> HEBREW LETTER ZAYIN
u'\u05d7' # 0xE7 -> HEBREW LETTER HET
u'\u05d8' # 0xE8 -> HEBREW LETTER TET
u'\u05d9' # 0xE9 -> HEBREW LETTER YOD
u'\u05da' # 0xEA -> HEBREW LETTER FINAL KAF
u'\u05db' # 0xEB -> HEBREW LETTER KAF
u'\u05dc' # 0xEC -> HEBREW LETTER LAMED
u'\u05dd' # 0xED -> HEBREW LETTER FINAL MEM
u'\u05de' # 0xEE -> HEBREW LETTER MEM
u'\u05df' # 0xEF -> HEBREW LETTER FINAL NUN
u'\u05e0' # 0xF0 -> HEBREW LETTER NUN
u'\u05e1' # 0xF1 -> HEBREW LETTER SAMEKH
u'\u05e2' # 0xF2 -> HEBREW LETTER AYIN
u'\u05e3' # 0xF3 -> HEBREW LETTER FINAL PE
u'\u05e4' # 0xF4 -> HEBREW LETTER PE
u'\u05e5' # 0xF5 -> HEBREW LETTER FINAL TSADI
u'\u05e6' # 0xF6 -> HEBREW LETTER TSADI
u'\u05e7' # 0xF7 -> HEBREW LETTER QOF
u'\u05e8' # 0xF8 -> HEBREW LETTER RESH
u'\u05e9' # 0xF9 -> HEBREW LETTER SHIN
u'\u05ea' # 0xFA -> HEBREW LETTER TAV
u'\ufffe' # 0xFB -> UNDEFINED
u'\ufffe' # 0xFC -> UNDEFINED
u'\u200e' # 0xFD -> LEFT-TO-RIGHT MARK
u'\u200f' # 0xFE -> RIGHT-TO-LEFT MARK
u'\ufffe' # 0xFF -> UNDEFINED
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
Altaf-Mahdi/android_kernel_oneplus_msm8994 | tools/perf/scripts/python/event_analyzing_sample.py | 4719 | 7393 | # event_analyzing_sample.py: general event handler in python
#
# Current perf report is already very powerful with the annotation integrated,
# and this script is not trying to be as powerful as perf report, but
# providing end user/developer a flexible way to analyze the events other
# than trace points.
#
# The 2 database related functions in this script just show how to gather
# the basic information, and users can modify and write their own functions
# according to their specific requirement.
#
# The first function "show_general_events" just does a basic grouping for all
# generic events with the help of sqlite, and the 2nd one "show_pebs_ll" is
# for a x86 HW PMU event: PEBS with load latency data.
#
import os
import sys
import math
import struct
import sqlite3
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from EventClass import *
#
# If the perf.data has a big number of samples, then the insert operation
# will be very time consuming (about 10+ minutes for 10000 samples) if the
# .db database is on disk. Move the .db file to RAM based FS to speedup
# the handling, which will cut the time down to several seconds.
#
con = sqlite3.connect("/dev/shm/perf.db")
con.isolation_level = None
def trace_begin():
print "In trace_begin:\n"
#
# Will create several tables at the start, pebs_ll is for PEBS data with
# load latency info, while gen_events is for general event.
#
con.execute("""
create table if not exists gen_events (
name text,
symbol text,
comm text,
dso text
);""")
con.execute("""
create table if not exists pebs_ll (
name text,
symbol text,
comm text,
dso text,
flags integer,
ip integer,
status integer,
dse integer,
dla integer,
lat integer
);""")
#
# Create and insert event object to a database so that user could
# do more analysis with simple database commands.
#
def process_event(param_dict):
event_attr = param_dict["attr"]
sample = param_dict["sample"]
raw_buf = param_dict["raw_buf"]
comm = param_dict["comm"]
name = param_dict["ev_name"]
# Symbol and dso info are not always resolved
if (param_dict.has_key("dso")):
dso = param_dict["dso"]
else:
dso = "Unknown_dso"
if (param_dict.has_key("symbol")):
symbol = param_dict["symbol"]
else:
symbol = "Unknown_symbol"
# Create the event object and insert it to the right table in database
event = create_event(name, comm, dso, symbol, raw_buf)
insert_db(event)
def insert_db(event):
if event.ev_type == EVTYPE_GENERIC:
con.execute("insert into gen_events values(?, ?, ?, ?)",
(event.name, event.symbol, event.comm, event.dso))
elif event.ev_type == EVTYPE_PEBS_LL:
event.ip &= 0x7fffffffffffffff
event.dla &= 0x7fffffffffffffff
con.execute("insert into pebs_ll values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(event.name, event.symbol, event.comm, event.dso, event.flags,
event.ip, event.status, event.dse, event.dla, event.lat))
def trace_end():
print "In trace_end:\n"
# We show the basic info for the 2 type of event classes
show_general_events()
show_pebs_ll()
con.close()
#
# As the event number may be very big, so we can't use linear way
# to show the histogram in real number, but use a log2 algorithm.
#
def num2sym(num):
# Each number will have at least one '#'
snum = '#' * (int)(math.log(num, 2) + 1)
return snum
def show_general_events():
# Check the total record number in the table
count = con.execute("select count(*) from gen_events")
for t in count:
print "There is %d records in gen_events table" % t[0]
if t[0] == 0:
return
print "Statistics about the general events grouped by thread/symbol/dso: \n"
# Group by thread
commq = con.execute("select comm, count(comm) from gen_events group by comm order by -count(comm)")
print "\n%16s %8s %16s\n%s" % ("comm", "number", "histogram", "="*42)
for row in commq:
print "%16s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by symbol
print "\n%32s %8s %16s\n%s" % ("symbol", "number", "histogram", "="*58)
symbolq = con.execute("select symbol, count(symbol) from gen_events group by symbol order by -count(symbol)")
for row in symbolq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by dso
print "\n%40s %8s %16s\n%s" % ("dso", "number", "histogram", "="*74)
dsoq = con.execute("select dso, count(dso) from gen_events group by dso order by -count(dso)")
for row in dsoq:
print "%40s %8d %s" % (row[0], row[1], num2sym(row[1]))
#
# This function just shows the basic info, and we could do more with the
# data in the tables, like checking the function parameters when some
# big latency events happen.
#
def show_pebs_ll():
count = con.execute("select count(*) from pebs_ll")
for t in count:
print "There is %d records in pebs_ll table" % t[0]
if t[0] == 0:
return
print "Statistics about the PEBS Load Latency events grouped by thread/symbol/dse/latency: \n"
# Group by thread
commq = con.execute("select comm, count(comm) from pebs_ll group by comm order by -count(comm)")
print "\n%16s %8s %16s\n%s" % ("comm", "number", "histogram", "="*42)
for row in commq:
print "%16s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by symbol
print "\n%32s %8s %16s\n%s" % ("symbol", "number", "histogram", "="*58)
symbolq = con.execute("select symbol, count(symbol) from pebs_ll group by symbol order by -count(symbol)")
for row in symbolq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by dse
dseq = con.execute("select dse, count(dse) from pebs_ll group by dse order by -count(dse)")
print "\n%32s %8s %16s\n%s" % ("dse", "number", "histogram", "="*58)
for row in dseq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
# Group by latency
latq = con.execute("select lat, count(lat) from pebs_ll group by lat order by lat")
print "\n%32s %8s %16s\n%s" % ("latency", "number", "histogram", "="*58)
for row in latq:
print "%32s %8d %s" % (row[0], row[1], num2sym(row[1]))
def trace_unhandled(event_name, context, event_fields_dict):
print ' '.join(['%s=%s'%(k,str(v))for k,v in sorted(event_fields_dict.items())])
| gpl-2.0 |
greypanda/sitepipe | senders/smssender.py | 1 | 2335 | #!/usr/bin/python3
"""
Subscribes to the mqtt topic sent by Naemon/Nagios notification command.
Parses the message and constructs an SMS message.
Using the Twilio client API, sends a text message to the user.
This script runs forever so it should be monitored by supervisord
TODO:
send the message id to mqtt
DEPENDENCIES:
The twilio library must be at 6.4.3+ or the create function will generate a json error,
must be str not bytes.
DEPENDENCIES:
For python 3, twilio 6.4.3 is required
"""
import paho.mqtt.client as mqtt
import configparser
import json
from twilio.rest import Client
DEBUG = True
""" Call back from mqtt connect """
def on_connect(client,config,flags,rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(Config.get('smssender','mqtt_subscribe_sms_send'))
""" Connects to twilio and sends the sms message """
def smsout(sms,fromuser,subject,message,config):
# Note to self: these will eventually be moved to the config file
# Your Account SID from twilio.com/console
account_sid = config.get('twilio','account_sid')
auth_token = config.get('twilio','auth_token')
sms_phone = config.get('twilio','phone_number')
client = Client(account_sid, auth_token)
message = client.messages.create(
to=sms,
from_=sms_phone,
body=message)
print(message.sid)
def on_message(client, config, msg):
if DEBUG:
print(msg.topic+" "+msg.payload.decode('utf8'))
dmsg = json.loads(msg.payload.decode('utf8'))
if DEBUG:
print(dmsg)
smsout(dmsg["sms"],"smssender <smssender@pandavista.org>","broken",dmsg['message'],config)
Config = configparser.ConfigParser()
Config.read("/opt/sitepipe/etc/sitepipe/config.ini")
MQTT_BROKER = Config.get('client','mqtt_broker')
MQTT_PORT = Config.getint('client','mqtt_port')
MQTT_KEEPALIVE = Config.getint('client','mqtt_keepalive')
MQTT_SUBSCRIBE_MAIL_SEND = Config.get('smssender','mqtt_subscribe_sms_send')
client = mqtt.Client(userdata=Config)
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_BROKER,MQTT_PORT,MQTT_KEEPALIVE)
client.loop_forever()
| mit |
lvdongr/spark | python/pyspark/mllib/recommendation.py | 28 | 11729 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import array
import sys
from collections import namedtuple
from pyspark import SparkContext, since
from pyspark.rdd import RDD
from pyspark.mllib.common import JavaModelWrapper, callMLlibFunc, inherit_doc
from pyspark.mllib.util import JavaLoader, JavaSaveable
from pyspark.sql import DataFrame
__all__ = ['MatrixFactorizationModel', 'ALS', 'Rating']
class Rating(namedtuple("Rating", ["user", "product", "rating"])):
"""
Represents a (user, product, rating) tuple.
>>> r = Rating(1, 2, 5.0)
>>> (r.user, r.product, r.rating)
(1, 2, 5.0)
>>> (r[0], r[1], r[2])
(1, 2, 5.0)
.. versionadded:: 1.2.0
"""
def __reduce__(self):
return Rating, (int(self.user), int(self.product), float(self.rating))
@inherit_doc
class MatrixFactorizationModel(JavaModelWrapper, JavaSaveable, JavaLoader):
"""A matrix factorisation model trained by regularized alternating
least-squares.
>>> r1 = (1, 1, 1.0)
>>> r2 = (1, 2, 2.0)
>>> r3 = (2, 1, 2.0)
>>> ratings = sc.parallelize([r1, r2, r3])
>>> model = ALS.trainImplicit(ratings, 1, seed=10)
>>> model.predict(2, 2)
0.4...
>>> testset = sc.parallelize([(1, 2), (1, 1)])
>>> model = ALS.train(ratings, 2, seed=0)
>>> model.predictAll(testset).collect()
[Rating(user=1, product=1, rating=1.0...), Rating(user=1, product=2, rating=1.9...)]
>>> model = ALS.train(ratings, 4, seed=10)
>>> model.userFeatures().collect()
[(1, array('d', [...])), (2, array('d', [...]))]
>>> model.recommendUsers(1, 2)
[Rating(user=2, product=1, rating=1.9...), Rating(user=1, product=1, rating=1.0...)]
>>> model.recommendProducts(1, 2)
[Rating(user=1, product=2, rating=1.9...), Rating(user=1, product=1, rating=1.0...)]
>>> model.rank
4
>>> first_user = model.userFeatures().take(1)[0]
>>> latents = first_user[1]
>>> len(latents)
4
>>> model.productFeatures().collect()
[(1, array('d', [...])), (2, array('d', [...]))]
>>> first_product = model.productFeatures().take(1)[0]
>>> latents = first_product[1]
>>> len(latents)
4
>>> products_for_users = model.recommendProductsForUsers(1).collect()
>>> len(products_for_users)
2
>>> products_for_users[0]
(1, (Rating(user=1, product=2, rating=...),))
>>> users_for_products = model.recommendUsersForProducts(1).collect()
>>> len(users_for_products)
2
>>> users_for_products[0]
(1, (Rating(user=2, product=1, rating=...),))
>>> model = ALS.train(ratings, 1, nonnegative=True, seed=10)
>>> model.predict(2, 2)
3.73...
>>> df = sqlContext.createDataFrame([Rating(1, 1, 1.0), Rating(1, 2, 2.0), Rating(2, 1, 2.0)])
>>> model = ALS.train(df, 1, nonnegative=True, seed=10)
>>> model.predict(2, 2)
3.73...
>>> model = ALS.trainImplicit(ratings, 1, nonnegative=True, seed=10)
>>> model.predict(2, 2)
0.4...
>>> import os, tempfile
>>> path = tempfile.mkdtemp()
>>> model.save(sc, path)
>>> sameModel = MatrixFactorizationModel.load(sc, path)
>>> sameModel.predict(2, 2)
0.4...
>>> sameModel.predictAll(testset).collect()
[Rating(...
>>> from shutil import rmtree
>>> try:
... rmtree(path)
... except OSError:
... pass
.. versionadded:: 0.9.0
"""
@since("0.9.0")
def predict(self, user, product):
"""
Predicts rating for the given user and product.
"""
return self._java_model.predict(int(user), int(product))
@since("0.9.0")
def predictAll(self, user_product):
"""
Returns a list of predicted ratings for input user and product
pairs.
"""
assert isinstance(user_product, RDD), "user_product should be RDD of (user, product)"
first = user_product.first()
assert len(first) == 2, "user_product should be RDD of (user, product)"
user_product = user_product.map(lambda u_p: (int(u_p[0]), int(u_p[1])))
return self.call("predict", user_product)
@since("1.2.0")
def userFeatures(self):
"""
Returns a paired RDD, where the first element is the user and the
second is an array of features corresponding to that user.
"""
return self.call("getUserFeatures").mapValues(lambda v: array.array('d', v))
@since("1.2.0")
def productFeatures(self):
"""
Returns a paired RDD, where the first element is the product and the
second is an array of features corresponding to that product.
"""
return self.call("getProductFeatures").mapValues(lambda v: array.array('d', v))
@since("1.4.0")
def recommendUsers(self, product, num):
"""
Recommends the top "num" number of users for a given product and
returns a list of Rating objects sorted by the predicted rating in
descending order.
"""
return list(self.call("recommendUsers", product, num))
@since("1.4.0")
def recommendProducts(self, user, num):
"""
Recommends the top "num" number of products for a given user and
returns a list of Rating objects sorted by the predicted rating in
descending order.
"""
return list(self.call("recommendProducts", user, num))
def recommendProductsForUsers(self, num):
"""
Recommends the top "num" number of products for all users. The
number of recommendations returned per user may be less than "num".
"""
return self.call("wrappedRecommendProductsForUsers", num)
def recommendUsersForProducts(self, num):
"""
Recommends the top "num" number of users for all products. The
number of recommendations returned per product may be less than
"num".
"""
return self.call("wrappedRecommendUsersForProducts", num)
@property
@since("1.4.0")
def rank(self):
"""Rank for the features in this model"""
return self.call("rank")
@classmethod
@since("1.3.1")
def load(cls, sc, path):
"""Load a model from the given path"""
model = cls._load_java(sc, path)
wrapper = sc._jvm.org.apache.spark.mllib.api.python.MatrixFactorizationModelWrapper(model)
return MatrixFactorizationModel(wrapper)
class ALS(object):
"""Alternating Least Squares matrix factorization
.. versionadded:: 0.9.0
"""
@classmethod
def _prepare(cls, ratings):
if isinstance(ratings, RDD):
pass
elif isinstance(ratings, DataFrame):
ratings = ratings.rdd
else:
raise TypeError("Ratings should be represented by either an RDD or a DataFrame, "
"but got %s." % type(ratings))
first = ratings.first()
if isinstance(first, Rating):
pass
elif isinstance(first, (tuple, list)):
ratings = ratings.map(lambda x: Rating(*x))
else:
raise TypeError("Expect a Rating or a tuple/list, but got %s." % type(first))
return ratings
@classmethod
@since("0.9.0")
def train(cls, ratings, rank, iterations=5, lambda_=0.01, blocks=-1, nonnegative=False,
seed=None):
"""
Train a matrix factorization model given an RDD of ratings by users
for a subset of products. The ratings matrix is approximated as the
product of two lower-rank matrices of a given rank (number of
features). To solve for these features, ALS is run iteratively with
a configurable level of parallelism.
:param ratings:
RDD of `Rating` or (userID, productID, rating) tuple.
:param rank:
Number of features to use (also referred to as the number of latent factors).
:param iterations:
Number of iterations of ALS.
(default: 5)
:param lambda_:
Regularization parameter.
(default: 0.01)
:param blocks:
Number of blocks used to parallelize the computation. A value
of -1 will use an auto-configured number of blocks.
(default: -1)
:param nonnegative:
A value of True will solve least-squares with nonnegativity
constraints.
(default: False)
:param seed:
Random seed for initial matrix factorization model. A value
of None will use system time as the seed.
(default: None)
"""
model = callMLlibFunc("trainALSModel", cls._prepare(ratings), rank, iterations,
lambda_, blocks, nonnegative, seed)
return MatrixFactorizationModel(model)
@classmethod
@since("0.9.0")
def trainImplicit(cls, ratings, rank, iterations=5, lambda_=0.01, blocks=-1, alpha=0.01,
nonnegative=False, seed=None):
"""
Train a matrix factorization model given an RDD of 'implicit
preferences' of users for a subset of products. The ratings matrix
is approximated as the product of two lower-rank matrices of a
given rank (number of features). To solve for these features, ALS
is run iteratively with a configurable level of parallelism.
:param ratings:
RDD of `Rating` or (userID, productID, rating) tuple.
:param rank:
Number of features to use (also referred to as the number of latent factors).
:param iterations:
Number of iterations of ALS.
(default: 5)
:param lambda_:
Regularization parameter.
(default: 0.01)
:param blocks:
Number of blocks used to parallelize the computation. A value
of -1 will use an auto-configured number of blocks.
(default: -1)
:param alpha:
A constant used in computing confidence.
(default: 0.01)
:param nonnegative:
A value of True will solve least-squares with nonnegativity
constraints.
(default: False)
:param seed:
Random seed for initial matrix factorization model. A value
of None will use system time as the seed.
(default: None)
"""
model = callMLlibFunc("trainImplicitALSModel", cls._prepare(ratings), rank,
iterations, lambda_, blocks, alpha, nonnegative, seed)
return MatrixFactorizationModel(model)
def _test():
import doctest
import pyspark.mllib.recommendation
from pyspark.sql import SQLContext
globs = pyspark.mllib.recommendation.__dict__.copy()
sc = SparkContext('local[4]', 'PythonTest')
globs['sc'] = sc
globs['sqlContext'] = SQLContext(sc)
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
globs['sc'].stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
_test()
| apache-2.0 |
uberlaggydarwin/useless | Documentation/target/tcm_mod_builder.py | 4981 | 41422 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
mKeRix/home-assistant | homeassistant/components/bmw_connected_drive/binary_sensor.py | 6 | 8402 | """Reads vehicle status from BMW connected drive portal."""
import logging
from bimmer_connected.state import ChargingState, LockState
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.const import ATTR_ATTRIBUTION, LENGTH_KILOMETERS
from . import DOMAIN as BMW_DOMAIN
from .const import ATTRIBUTION
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES = {
"lids": ["Doors", "opening", "mdi:car-door-lock"],
"windows": ["Windows", "opening", "mdi:car-door"],
"door_lock_state": ["Door lock state", "lock", "mdi:car-key"],
"lights_parking": ["Parking lights", "light", "mdi:car-parking-lights"],
"condition_based_services": ["Condition based services", "problem", "mdi:wrench"],
"check_control_messages": ["Control messages", "problem", "mdi:car-tire-alert"],
}
SENSOR_TYPES_ELEC = {
"charging_status": ["Charging status", "power", "mdi:ev-station"],
"connection_status": ["Connection status", "plug", "mdi:car-electric"],
}
SENSOR_TYPES_ELEC.update(SENSOR_TYPES)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the BMW sensors."""
accounts = hass.data[BMW_DOMAIN]
_LOGGER.debug("Found BMW accounts: %s", ", ".join([a.name for a in accounts]))
devices = []
for account in accounts:
for vehicle in account.account.vehicles:
if vehicle.has_hv_battery:
_LOGGER.debug("BMW with a high voltage battery")
for key, value in sorted(SENSOR_TYPES_ELEC.items()):
if key in vehicle.available_attributes:
device = BMWConnectedDriveSensor(
account, vehicle, key, value[0], value[1], value[2]
)
devices.append(device)
elif vehicle.has_internal_combustion_engine:
_LOGGER.debug("BMW with an internal combustion engine")
for key, value in sorted(SENSOR_TYPES.items()):
if key in vehicle.available_attributes:
device = BMWConnectedDriveSensor(
account, vehicle, key, value[0], value[1], value[2]
)
devices.append(device)
add_entities(devices, True)
class BMWConnectedDriveSensor(BinarySensorEntity):
"""Representation of a BMW vehicle binary sensor."""
def __init__(
self, account, vehicle, attribute: str, sensor_name, device_class, icon
):
"""Initialize sensor."""
self._account = account
self._vehicle = vehicle
self._attribute = attribute
self._name = f"{self._vehicle.name} {self._attribute}"
self._unique_id = f"{self._vehicle.vin}-{self._attribute}"
self._sensor_name = sensor_name
self._device_class = device_class
self._icon = icon
self._state = None
@property
def should_poll(self) -> bool:
"""Return False.
Data update is triggered from BMWConnectedDriveEntity.
"""
return False
@property
def unique_id(self):
"""Return the unique ID of the binary sensor."""
return self._unique_id
@property
def name(self):
"""Return the name of the binary sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_class(self):
"""Return the class of the binary sensor."""
return self._device_class
@property
def is_on(self):
"""Return the state of the binary sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes of the binary sensor."""
vehicle_state = self._vehicle.state
result = {
"car": self._vehicle.name,
ATTR_ATTRIBUTION: ATTRIBUTION,
}
if self._attribute == "lids":
for lid in vehicle_state.lids:
result[lid.name] = lid.state.value
elif self._attribute == "windows":
for window in vehicle_state.windows:
result[window.name] = window.state.value
elif self._attribute == "door_lock_state":
result["door_lock_state"] = vehicle_state.door_lock_state.value
result["last_update_reason"] = vehicle_state.last_update_reason
elif self._attribute == "lights_parking":
result["lights_parking"] = vehicle_state.parking_lights.value
elif self._attribute == "condition_based_services":
for report in vehicle_state.condition_based_services:
result.update(self._format_cbs_report(report))
elif self._attribute == "check_control_messages":
check_control_messages = vehicle_state.check_control_messages
has_check_control_messages = vehicle_state.has_check_control_messages
if has_check_control_messages:
cbs_list = []
for message in check_control_messages:
cbs_list.append(message["ccmDescriptionShort"])
result["check_control_messages"] = cbs_list
else:
result["check_control_messages"] = "OK"
elif self._attribute == "charging_status":
result["charging_status"] = vehicle_state.charging_status.value
result["last_charging_end_result"] = vehicle_state.last_charging_end_result
elif self._attribute == "connection_status":
result["connection_status"] = vehicle_state.connection_status
return sorted(result.items())
def update(self):
"""Read new state data from the library."""
vehicle_state = self._vehicle.state
# device class opening: On means open, Off means closed
if self._attribute == "lids":
_LOGGER.debug("Status of lid: %s", vehicle_state.all_lids_closed)
self._state = not vehicle_state.all_lids_closed
if self._attribute == "windows":
self._state = not vehicle_state.all_windows_closed
# device class lock: On means unlocked, Off means locked
if self._attribute == "door_lock_state":
# Possible values: LOCKED, SECURED, SELECTIVE_LOCKED, UNLOCKED
self._state = vehicle_state.door_lock_state not in [
LockState.LOCKED,
LockState.SECURED,
]
# device class light: On means light detected, Off means no light
if self._attribute == "lights_parking":
self._state = vehicle_state.are_parking_lights_on
# device class problem: On means problem detected, Off means no problem
if self._attribute == "condition_based_services":
self._state = not vehicle_state.are_all_cbs_ok
if self._attribute == "check_control_messages":
self._state = vehicle_state.has_check_control_messages
# device class power: On means power detected, Off means no power
if self._attribute == "charging_status":
self._state = vehicle_state.charging_status in [ChargingState.CHARGING]
# device class plug: On means device is plugged in,
# Off means device is unplugged
if self._attribute == "connection_status":
self._state = vehicle_state.connection_status == "CONNECTED"
def _format_cbs_report(self, report):
result = {}
service_type = report.service_type.lower().replace("_", " ")
result[f"{service_type} status"] = report.state.value
if report.due_date is not None:
result[f"{service_type} date"] = report.due_date.strftime("%Y-%m-%d")
if report.due_distance is not None:
distance = round(
self.hass.config.units.length(report.due_distance, LENGTH_KILOMETERS)
)
result[
f"{service_type} distance"
] = f"{distance} {self.hass.config.units.length_unit}"
return result
def update_callback(self):
"""Schedule a state update."""
self.schedule_update_ha_state(True)
async def async_added_to_hass(self):
"""Add callback after being added to hass.
Show latest data after startup.
"""
self._account.add_update_listener(self.update_callback)
| mit |
vad/django-cms | cms/tests/menu_page_viewperm_staff.py | 24 | 18485 | # -*- coding: utf-8 -*-
from __future__ import with_statement
from django.test.utils import override_settings
from django.contrib.auth import get_user_model
from cms.tests.menu_page_viewperm import ViewPermissionTests
__all__ = [
'ViewPermissionComplexMenuStaffNodeTests',
]
@override_settings(
CMS_PERMISSION=True,
CMS_PUBLIC_FOR='staff',
)
class ViewPermissionComplexMenuStaffNodeTests(ViewPermissionTests):
"""
Test CMS_PUBLIC_FOR=staff group access and menu nodes rendering
"""
def test_public_pages_anonymous_norestrictions(self):
"""
All pages are INVISIBLE to an anonymous user
"""
all_pages = self._setup_tree_pages()
granted = []
self.assertGrantedVisibility(all_pages, granted)
def test_public_menu_anonymous_user(self):
"""
Anonymous sees nothing, as he is no staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = []
self.assertGrantedVisibility(all_pages, granted)
def test_node_staff_access_page_and_children_group_1(self):
"""
simulate behaviour of group b member
group_b_ACCESS_PAGE_AND_CHILDREN to page_b
staff user
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b',
'page_b_a',
'page_b_b',
'page_b_c',
'page_b_d',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_1')
# user 1 is member of group_b_access_page_and_children
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_1@django-cms.org')
else:
user = get_user_model().objects.get(username='user_1')
urls = self.get_url_dict(all_pages)
# call /
self.assertViewAllowed(urls["/en/page_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_c/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_node_staff_access_page_and_children_group_1_no_staff(self):
"""
simulate behaviour of group b member
group_b_ACCESS_PAGE_AND_CHILDREN to page_b
no staff user
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = [
'page_b',
'page_b_a',
'page_b_b',
'page_b_c',
'page_b_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_1_nostaff')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_1_nostaff@django-cms.org')
else:
user = get_user_model().objects.get(username='user_1_nostaff')
urls = self.get_url_dict(all_pages)
self.assertViewAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_c/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_d/"], user)
self.assertViewNotAllowed(urls["/en/page_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_a/"], user)
def test_node_staff_access_children_group_2(self):
"""
simulate behaviour of group 2 member
GROUPNAME_2 = 'group_b_b_ACCESS_CHILDREN'
to page_b_b and user is staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b_b_a',
'page_b_b_b',
'page_b_b_c',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_2')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_2@django-cms.org')
else:
user = get_user_model().objects.get(username='user_2')
urls = self.get_url_dict(all_pages)
self.assertViewNotAllowed(urls['/en/page_b/'], user)
self.assertViewNotAllowed(urls['/en/page_b/page_b_b/'], user)
self.assertViewAllowed(urls['/en/page_b/page_b_b/page_b_b_a/'], user)
self.assertViewAllowed(urls['/en/page_b/page_b_b/page_b_b_b/'], user)
self.assertViewNotAllowed(urls['/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/'], user)
self.assertViewNotAllowed(urls['/en/page_d/'], user)
self.assertViewAllowed(urls['/en/page_d/page_d_a/'], user)
#
def test_node_staff_access_children_group_2_nostaff(self):
"""
simulate behaviour of group 2 member
GROUPNAME_2 = 'group_b_b_ACCESS_CHILDREN'
to page_b_b and user is no staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_b_b_a',
'page_b_b_b',
'page_b_b_c',
]
self.assertGrantedVisibility(all_pages, granted, username='user_2_nostaff')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_2_nostaff@django-cms.org')
else:
user = get_user_model().objects.get(username='user_2_nostaff')
urls = self.get_url_dict(all_pages)
# member of group that has access to this page
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_a/"], user)
def test_node_staff_access_page_and_descendants_group_3(self):
"""
simulate behaviour of group 3 member
group_b_ACCESS_PAGE_AND_DESCENDANTS to page_b
and user is staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b',
'page_b_a',
'page_b_b',
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
'page_b_c',
'page_b_d',
'page_b_d_a',
'page_b_d_b',
'page_b_d_c',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_3')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_3@django-cms.org')
else:
user = get_user_model().objects.get(username='user_3')
urls = self.get_url_dict(all_pages)
url = self.get_pages_root()
self.assertViewAllowed(urls[url], user)
self.assertViewAllowed(urls["/en/page_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_c/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_d/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewAllowed(urls["/en/page_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_b/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_c/"], user)
def test_node_staff_access_page_and_descendants_group_3_nostaff(self):
"""
simulate behaviour of group 3 member
group_b_ACCESS_PAGE_AND_DESCENDANTS to page_b
user is not staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_b',
'page_b_a',
'page_b_b',
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
'page_b_c',
'page_b_d',
'page_b_d_a',
'page_b_d_b',
'page_b_d_c',
]
self.assertGrantedVisibility(all_pages, granted, username='user_3_nostaff')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_3_nostaff@django-cms.org')
else:
user = get_user_model().objects.get(username='user_3_nostaff')
urls = self.get_url_dict(all_pages)
# call /
url = self.get_pages_root()
self.assertViewNotAllowed(urls[url], user)
self.assertViewAllowed(urls["/en/page_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_c/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_d/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewNotAllowed(urls["/en/page_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_b/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_c/"], user)
def test_node_staff_access_descendants_group_4(self):
"""
simulate behaviour of group 4 member
group_b_b_ACCESS_DESCENDANTS to page_b_b
user is staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
'page_c',
'page_c_a',
'page_c_b',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_4')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_4@django-cms.org')
else:
user = get_user_model().objects.get(username='user_4')
urls = self.get_url_dict(all_pages)
# call /
url = self.get_pages_root()
self.assertViewAllowed(urls[url], user)
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_c/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_d/"], user)
# not a direct child
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewAllowed(urls["/en/page_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_b/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_c/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_d/"], user)
def test_node_staff_access_descendants_group_4_nostaff(self):
"""
simulate behaviour of group 4 member
group_b_b_ACCESS_DESCENDANTS to page_b_b
user is no staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = [
'page_b_b_a',
'page_b_b_a_a',
'page_b_b_b',
'page_b_b_c',
]
self.assertGrantedVisibility(all_pages, granted, username='user_4_nostaff')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_4_nostaff@django-cms.org')
else:
user = get_user_model().objects.get(username='user_4_nostaff')
urls = self.get_url_dict(all_pages)
url = self.get_pages_root()
self.assertViewNotAllowed(urls[url], user)
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_c/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_d/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewNotAllowed(urls["/en/page_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_b/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_d/"], user)
def test_node_staff_access_page_group_5(self):
"""
simulate behaviour of group b member
group_d_ACCESS_PAGE to page_d
user is staff
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_a',
'page_c',
'page_c_a',
'page_c_b',
'page_d',
'page_d_a',
'page_d_b',
'page_d_c',
'page_d_d',
]
self.assertGrantedVisibility(all_pages, granted, username='user_5')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_5@django-cms.org')
else:
user = get_user_model().objects.get(username='user_5')
urls = self.get_url_dict(all_pages)
url = self.get_pages_root()
self.assertViewAllowed(urls[url], user)
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_c/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_d/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/page_b_b_a_a/"], user)
self.assertViewAllowed(urls["/en/page_c/"], user)
self.assertViewAllowed(urls["/en/page_d/"], user)
self.assertViewAllowed(urls["/en/page_d/page_d_a/"], user)
def test_node_staff_access_page_group_5_nostaff(self):
"""
simulate behaviour of group b member
group_d_ACCESS_PAGE to page_d
nostaff user
"""
self._setup_user_groups()
all_pages = self._setup_tree_pages()
self._setup_view_restrictions()
granted = ['page_d',]
self.assertGrantedVisibility(all_pages, granted, username='user_5_nostaff')
if get_user_model().USERNAME_FIELD == 'email':
user = get_user_model().objects.get(email='user_5_nostaff@django-cms.org')
else:
user = get_user_model().objects.get(username='user_5_nostaff')
urls = self.get_url_dict(all_pages)
url = self.get_pages_root()
self.assertViewNotAllowed(urls[url], user)
self.assertViewAllowed(urls["/en/page_d/"], user)
self.assertViewNotAllowed(urls["/en/page_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_c/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_d/"], user)
self.assertViewNotAllowed(urls["/en/page_b/page_b_b/page_b_b_a/"], user)
self.assertViewNotAllowed(urls["/en/page_c/"], user)
self.assertViewAllowed(urls["/en/page_d/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_a/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_b/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_c/"], user)
self.assertViewNotAllowed(urls["/en/page_d/page_d_d/"], user)
| bsd-3-clause |
bencharb/AutobahnPython | examples/twisted/wamp/rpc/slowsquare/backend.py | 2 | 2306 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
from os import environ
from twisted.internet.defer import inlineCallbacks, \
returnValue
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
from autobahn.twisted.util import sleep
class Component(ApplicationSession):
"""
A math service application component.
"""
@inlineCallbacks
def onJoin(self, details):
print("session attached")
def square(x):
return x * x
yield self.register(square, 'com.math.square')
@inlineCallbacks
def slowsquare(x, delay=1):
yield sleep(delay)
returnValue(x * x)
yield self.register(slowsquare, 'com.math.slowsquare')
print("procedures registered")
if __name__ == '__main__':
runner = ApplicationRunner(
environ.get("AUTOBAHN_DEMO_ROUTER", "ws://127.0.0.1:8080/ws"),
u"crossbardemo",
debug_wamp=False, # optional; log many WAMP details
debug=False, # optional; log even more details
)
runner.run(Component)
| mit |
xiaoyaozi5566/DiamondCache | src/arch/x86/isa/insts/simd64/floating_point/compare/compare_and_write_mask.py | 91 | 2172 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
# PFCMPEQ
# PFCMPGT
# PFCMPGE
'''
| bsd-3-clause |
wanghao524151/scrapy_joy | open_loan/tasks.py | 1 | 7173 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os, sys, django
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "scrapy_joy.settings")
django.setup()
from datetime import date, timedelta
from celery.task import task
from dynamic_scraper.utils.task_utils import TaskUtils
from django.db import transaction
from django.db.models import Sum, Avg
from django.core import mail
from django.template import Context, loader
from scrapy_joy import settings
from open_loan.models import LoanScraper, Loan, StaDayData, LoanCategory, LoanWebsite, SubscribeEmail
@task()
def run_spiders():
t = TaskUtils()
t.run_spiders(LoanScraper, 'scraper', 'scraper_runtime', 'loan_spider')
@task()
def run_checkers():
t = TaskUtils()
t.run_checkers(Loan, 'loan_website__scraper', 'checker_runtime', 'loan_checker')
@task()
@transaction.atomic
def run_sta_day_data(sta_day=(date.today() - timedelta(days=1))):
# 删除该日期的统计数据,避免重复数据
StaDayData.objects.filter(sta_date=sta_day).delete()
site_dict = dict([(i.name, i) for i in LoanWebsite.objects.all()])
cat_dict = dict([(i.fullname, i) for i in LoanCategory.objects.all()])
# 查询该日期的所有原始标的信息,并归类
loans = Loan.objects.filter(created__range=(sta_day, sta_day+timedelta(days=1)))
cat1_dict, cat2_dict = {}, {}
for loan in loans:
if loan.category1.fullname not in cat1_dict:
cat1_dict[loan.category1.fullname] = {}
if loan.category2.fullname not in cat2_dict:
cat2_dict[loan.category2.fullname] = {}
if u'活期' in loan.category1.fullname:
term = 1
elif loan.duration < 90:
term = 1
elif loan.duration < 180:
term = 3
elif loan.duration < 270:
term = 6
elif loan.duration < 360:
term = 9
elif loan.duration < 540:
term = 12
elif loan.duration < 720:
term = 18
elif loan.duration < 1080:
term = 24
else:
term = 36
if term not in cat1_dict[loan.category1.fullname]:
cat1_dict[loan.category1.fullname][term] = {}
if term not in cat2_dict[loan.category2.fullname]:
cat2_dict[loan.category2.fullname][term] = {}
if loan.site.name not in cat1_dict[loan.category1.fullname][term]:
cat1_dict[loan.category1.fullname][term][loan.site.name] = []
if loan.site.name not in cat2_dict[loan.category2.fullname][term]:
cat2_dict[loan.category2.fullname][term][loan.site.name] = []
cat1_dict[loan.category1.fullname][term][loan.site.name].append(loan.year_rate)
cat2_dict[loan.category2.fullname][term][loan.site.name].append(loan.year_rate)
for cat1 in cat1_dict:
for term in cat1_dict[cat1]:
for site in cat1_dict[cat1][term]:
item = cat1_dict[cat1][term][site]
data = StaDayData(
sta_date=sta_day,
site=site_dict.get(site),
category1=cat_dict.get(cat1),
term=term,
term_unit=StaDayData.TERM_UNIT_CHOICES[1][0],
rate=sum(item)/len(item),
sta_cnt=len(item),
)
data.save()
for cat2 in cat2_dict:
for term in cat2_dict[cat2]:
for site in cat2_dict[cat2][term]:
item = cat2_dict[cat2][term][site]
data = StaDayData(
sta_date=sta_day,
site=site_dict.get(site),
category2=cat_dict.get(cat2),
term=term,
term_unit=StaDayData.TERM_UNIT_CHOICES[1][0],
rate=sum(item)/len(item),
sta_cnt=len(item),
)
data.save()
@task()
def send_week_email(mail_list=[], today=date.today()):
""" 周指数邮件 """
if not mail_list:
mail_list = [obj.email for obj in SubscribeEmail.objects.all()]
subject = u'【Kaisa利率】周指数'
last_week_start_date = today + timedelta(-7 - today.weekday()) # 上周星期一
last_week_end_date = today + timedelta(-today.weekday()) # 上周星期一
last_week_loans = Loan.objects.filter(created__range=(last_week_start_date, last_week_end_date))
last_week_loan_cnt = last_week_loans.count()
last_week_loan_rate = last_week_loans.aggregate(avg=Avg('year_rate'))['avg'] or 0
last_week_loan_amount = last_week_loans.aggregate(sum=Sum('amount'))['sum'] or 0
host_name = settings.HOST_NAME
email_template_name = 'week_email.html'
t = loader.get_template(email_template_name)
html_content = t.render(Context(locals()))
conn = mail.get_connection()
conn.open()
msg_list = []
for to in mail_list:
msg = mail.EmailMultiAlternatives(subject, html_content, settings.DEFAULT_FROM_EMAIL, [to, ])
msg.attach_alternative(html_content, "text/html")
msg_list.append(msg)
conn.send_messages(msg_list)
conn.close()
@task()
def send_month_email(mail_list=[], today=date.today()):
""" 月指数邮件 """
if not mail_list:
mail_list = [obj.email for obj in SubscribeEmail.objects.all()]
subject = u'【Kaisa利率】月指数'
if today.month <= 1:
last_month_start_date = date(day=1, month=12, year=today.year-1)
else:
last_month_start_date = date(day=1, month=today.month-1, year=today.year)
last_month_end_date = date(day=1, month=today.month, year=today.year) - timedelta(days=1) # 上月最后一天
last_month_loans = Loan.objects.filter(created__range=(last_month_start_date, last_month_end_date))
last_month_loan_cnt = last_month_loans.count()
last_month_loan_rate = last_month_loans.aggregate(avg=Avg('year_rate'))['avg'] or 0
last_month_loan_amount = last_month_loans.aggregate(sum=Sum('amount'))['sum'] or 0
host_name = settings.HOST_NAME
email_template_name = 'month_email.html'
t = loader.get_template(email_template_name)
html_content = t.render(Context(locals()))
conn = mail.get_connection()
conn.open()
msg_list = []
for to in mail_list:
msg = mail.EmailMultiAlternatives(subject, html_content, settings.DEFAULT_FROM_EMAIL, [to, ])
msg.attach_alternative(html_content, "text/html")
msg_list.append(msg)
conn.send_messages(msg_list)
conn.close()
if __name__ == "__main__":
year = int(raw_input("请输入年份:"))
month = int(raw_input("请输入月份:"))
# 开始时间
start_day = date(year=year, month=month, day=1)
# 结束时间
if 0 < month < 12:
end_day = date(year=year, month=(month+1), day=1)
elif month == 12:
end_day = date(year=(year+1), month=1, day=1)
else:
print "输入月份" + str(month) + "有误"
while start_day < end_day:
run_sta_day_data(start_day)
start_day = start_day + timedelta(days=1) | apache-2.0 |
jangorecki/h2o-3 | h2o-py/tests/testdir_munging/pyunit_cbind.py | 6 | 2740 | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import h2o
from h2o.exceptions import H2OTypeError, H2OValueError
from tests import pyunit_utils
def test_cbind():
"""Test H2OFrame.cbind() method."""
hdf = h2o.import_file(path=pyunit_utils.locate('smalldata/jira/pub-180.csv'))
otherhdf = h2o.import_file(path=pyunit_utils.locate('smalldata/jira/v-11.csv'))
rows, cols = hdf.shape
assert rows == 12 and cols == 4, "unexpected dimensions in original"
##################################
# non-mutating h2o.cbind #
##################################
# frame to frame
hdf2 = hdf.cbind(hdf)
rows2, cols2 = hdf2.dim
assert hdf2.shape == (12, 8)
print(hdf2.frame_id)
assert hdf2.shape == (12, 8)
# vec to vec
xx = hdf[0]
yy = hdf[1]
hdf3 = xx.cbind(yy)
assert hdf3.shape == (12, 2)
assert hdf3.names == ['colgroup', 'colgroup2']
print(hdf3.frame_id)
assert hdf3.shape == (12, 2)
assert hdf3.names == ['colgroup', 'colgroup2']
# vec to frame
hdf4 = hdf.cbind(yy)
hdf5 = yy.cbind(hdf)
assert hdf4.shape == hdf5.shape == (12, 5)
# logical expressions
hdf6 = (hdf[2] <= 5).cbind(hdf[3] >= 4)
assert hdf6.shape == (12, 2)
# unequal rows should fail
try:
hdf.cbind(otherhdf)
assert False, "Expected an error"
except H2OValueError:
pass
# cbinding of wrong types should fail
try:
hdf.cbind("hello")
assert False
except H2OTypeError:
pass
try:
hdf.cbind([hdf, {"x": hdf}])
assert False
except H2OTypeError:
pass
# cbinding of multiple columns
hdf7 = xx.cbind([xx, xx, xx])
assert hdf7.shape == (12, 4)
print(hdf7.frame_id)
assert hdf7.shape == (12, 4)
# cbinding of constants
hdf8 = xx.cbind([1, -1])
assert hdf8.shape == (12, 3)
print(hdf8.frame_id)
assert hdf8.shape == (12, 3)
###################################
# mutating H2OFrame.cbind #
###################################
# frame to frame
hdf = hdf.cbind(hdf)
assert hdf.shape == (12, 8)
print(hdf.frame_id)
assert hdf.shape == (12, 8)
# frame to vec
hdf = hdf.cbind(yy)
assert hdf.shape == (12, 9)
print(hdf.frame_id)
assert hdf.shape == (12, 9)
# logical expressions
hdf = hdf.cbind(hdf[2] <= 5)
assert hdf.shape == (12, 10)
assert hdf.names == ['colgroup', 'colgroup2', 'col1', 'col2',
'colgroup0', 'colgroup20', 'col10', 'col20', 'colgroup21', 'col11']
print(hdf.frame_id)
assert hdf.shape == (12, 10)
if __name__ == "__main__":
pyunit_utils.standalone_test(test_cbind)
else:
test_cbind()
| apache-2.0 |
roadhead/satchmo | satchmo/make-messages.py | 16 | 6741 | #!/usr/bin/env python
# Modified version of Django's make-messages.py
# This is from Django revision 6445.
# It has been modified to parse ".txt" and ".rml" files in addition to its
# normal behavior of parsing ".py" and ".html" files.
# Need to ensure that the i18n framework is enabled
from django.conf import settings
settings.configure(USE_I18N = True)
from django.utils.translation import templatize
import re
import os
import sys
import getopt
from itertools import dropwhile
pythonize_re = re.compile(r'\n\s*//')
def make_messages():
localedir = None
if os.path.isdir(os.path.join('conf', 'locale')):
localedir = os.path.abspath(os.path.join('conf', 'locale'))
elif os.path.isdir('locale'):
localedir = os.path.abspath('locale')
else:
print "This script should be run from the django svn tree or your project or app tree."
print "If you did indeed run it from the svn checkout or your project or application,"
print "maybe you are just missing the conf/locale (in the django tree) or locale (for project"
print "and application) directory?"
print "make-messages.py doesn't create it automatically, you have to create it by hand if"
print "you want to enable i18n for your project or application."
sys.exit(1)
(opts, args) = getopt.getopt(sys.argv[1:], 'l:d:va')
lang = None
domain = 'django'
verbose = False
all = False
for o, v in opts:
if o == '-l':
lang = v
elif o == '-d':
domain = v
elif o == '-v':
verbose = True
elif o == '-a':
all = True
if domain not in ('django', 'djangojs'):
print "currently make-messages.py only supports domains 'django' and 'djangojs'"
sys.exit(1)
if (lang is None and not all) or domain is None:
print "usage: make-messages.py -l <language>"
print " or: make-messages.py -a"
sys.exit(1)
languages = []
if lang is not None:
languages.append(lang)
elif all:
languages = [el for el in os.listdir(localedir) if not el.startswith('.')]
for lang in languages:
print "processing language", lang
basedir = os.path.join(localedir, lang, 'LC_MESSAGES')
if not os.path.isdir(basedir):
os.makedirs(basedir)
pofile = os.path.join(basedir, '%s.po' % domain)
potfile = os.path.join(basedir, '%s.pot' % domain)
if os.path.exists(potfile):
os.unlink(potfile)
all_files = []
for (dirpath, dirnames, filenames) in os.walk("."):
all_files.extend([(dirpath, f) for f in filenames])
all_files.sort()
for dirpath, file in all_files:
if domain == 'djangojs' and file.endswith('.js'):
if verbose: sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
src = open(os.path.join(dirpath, file), "rb").read()
src = pythonize_re.sub('\n#', src)
open(os.path.join(dirpath, '%s.py' % file), "wb").write(src)
thefile = '%s.py' % file
cmd = 'xgettext %s -d %s -L Perl --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy:1,2 --from-code UTF-8 -o - "%s"' % (
os.path.exists(potfile) and '--omit-header' or '', domain, os.path.join(dirpath, thefile))
(stdin, stdout, stderr) = os.popen3(cmd, 't')
msgs = stdout.read()
errors = stderr.read()
if errors:
print "errors happened while running xgettext on %s" % file
print errors
sys.exit(8)
old = '#: '+os.path.join(dirpath, thefile)[2:]
new = '#: '+os.path.join(dirpath, file)[2:]
msgs = msgs.replace(old, new)
if msgs:
open(potfile, 'ab').write(msgs)
os.unlink(os.path.join(dirpath, thefile))
elif domain == 'django' and (file.endswith('.py') or file.endswith('.html') or file.endswith('.txt') or file.endswith('.rml')):
thefile = file
if file.endswith('.html') or file.endswith('.txt') or file.endswith('.rml'):
src = open(os.path.join(dirpath, file), "rb").read()
thefile = '%s.py' % file
open(os.path.join(dirpath, thefile), "wb").write(templatize(src))
if verbose:
sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
cmd = 'xgettext -d %s -L Python --keyword=gettext_noop --keyword=gettext_lazy --keyword=ngettext_lazy:1,2 --keyword=ugettext_noop --keyword=ugettext_lazy --keyword=ungettext_lazy:1,2 --from-code UTF-8 -o - "%s"' % (
domain, os.path.join(dirpath, thefile))
(stdin, stdout, stderr) = os.popen3(cmd, 't')
msgs = stdout.read()
errors = stderr.read()
if errors:
print "errors happened while running xgettext on %s" % file
print errors
sys.exit(8)
if thefile != file:
old = '#: '+os.path.join(dirpath, thefile)[2:]
new = '#: '+os.path.join(dirpath, file)[2:]
msgs = msgs.replace(old, new)
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
if msgs:
open(potfile, 'ab').write(msgs)
if thefile != file:
os.unlink(os.path.join(dirpath, thefile))
if os.path.exists(potfile):
(stdin, stdout, stderr) = os.popen3('msguniq --to-code=utf-8 "%s"' % potfile, 'b')
msgs = stdout.read()
errors = stderr.read()
if errors:
print "errors happened while running msguniq"
print errors
sys.exit(8)
open(potfile, 'w').write(msgs)
if os.path.exists(pofile):
(stdin, stdout, stderr) = os.popen3('msgmerge -q "%s" "%s"' % (pofile, potfile), 'b')
msgs = stdout.read()
errors = stderr.read()
if errors:
print "errors happened while running msgmerge"
print errors
sys.exit(8)
open(pofile, 'wb').write(msgs)
os.unlink(potfile)
if __name__ == "__main__":
make_messages()
| bsd-3-clause |
fernandog/Medusa | ext/stevedore/tests/test_sphinxext.py | 65 | 4050 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for the sphinx extension
"""
from __future__ import unicode_literals
from stevedore import extension
from stevedore import sphinxext
from stevedore.tests import utils
import mock
import pkg_resources
def _make_ext(name, docstring):
def inner():
pass
inner.__doc__ = docstring
m1 = mock.Mock(spec=pkg_resources.EntryPoint)
m1.module_name = '%s_module' % name
s = mock.Mock(return_value='ENTRY_POINT(%s)' % name)
m1.__str__ = s
return extension.Extension(name, m1, inner, None)
class TestSphinxExt(utils.TestCase):
def setUp(self):
super(TestSphinxExt, self).setUp()
self.exts = [
_make_ext('test1', 'One-line docstring'),
_make_ext('test2', 'Multi-line docstring\n\nAnother para'),
]
self.em = extension.ExtensionManager.make_test_instance(self.exts)
def test_simple_list(self):
results = list(sphinxext._simple_list(self.em))
self.assertEqual(
[
('* test1 -- One-line docstring', 'test1_module'),
('* test2 -- Multi-line docstring', 'test2_module'),
],
results,
)
def test_simple_list_no_docstring(self):
ext = [_make_ext('nodoc', None)]
em = extension.ExtensionManager.make_test_instance(ext)
results = list(sphinxext._simple_list(em))
self.assertEqual(
[
('* nodoc -- ', 'nodoc_module'),
],
results,
)
def test_detailed_list(self):
results = list(sphinxext._detailed_list(self.em))
self.assertEqual(
[
('test1', 'test1_module'),
('-----', 'test1_module'),
('\n', 'test1_module'),
('One-line docstring', 'test1_module'),
('\n', 'test1_module'),
('test2', 'test2_module'),
('-----', 'test2_module'),
('\n', 'test2_module'),
('Multi-line docstring\n\nAnother para', 'test2_module'),
('\n', 'test2_module'),
],
results,
)
def test_detailed_list_format(self):
results = list(sphinxext._detailed_list(self.em, over='+', under='+'))
self.assertEqual(
[
('+++++', 'test1_module'),
('test1', 'test1_module'),
('+++++', 'test1_module'),
('\n', 'test1_module'),
('One-line docstring', 'test1_module'),
('\n', 'test1_module'),
('+++++', 'test2_module'),
('test2', 'test2_module'),
('+++++', 'test2_module'),
('\n', 'test2_module'),
('Multi-line docstring\n\nAnother para', 'test2_module'),
('\n', 'test2_module'),
],
results,
)
def test_detailed_list_no_docstring(self):
ext = [_make_ext('nodoc', None)]
em = extension.ExtensionManager.make_test_instance(ext)
results = list(sphinxext._detailed_list(em))
self.assertEqual(
[
('nodoc', 'nodoc_module'),
('-----', 'nodoc_module'),
('\n', 'nodoc_module'),
('.. warning:: No documentation found in ENTRY_POINT(nodoc)',
'nodoc_module'),
('\n', 'nodoc_module'),
],
results,
)
| gpl-3.0 |
ZihengJiang/mxnet | example/neural-style/end_to_end/boost_inference.py | 25 | 1780 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
sys.path.insert(0, "../mxnet/python")
import mxnet as mx
import numpy as np
#import basic
import data_processing
import gen_v3
import gen_v4
dshape = (1, 3, 480, 640)
clip_norm = 1.0 * np.prod(dshape)
model_prefix = "./model/"
ctx = mx.gpu(0)
# generator
gens = [gen_v4.get_module("g0", dshape, ctx),
gen_v3.get_module("g1", dshape, ctx),
gen_v3.get_module("g2", dshape, ctx),
gen_v4.get_module("g3", dshape, ctx)]
for i in range(len(gens)):
gens[i].load_params("./model/%d/v3_0002-0026000.params" % i)
content_np = data_processing.PreprocessContentImage("../IMG_4343.jpg", min(dshape[2:]), dshape)
data = [mx.nd.array(content_np)]
for i in range(len(gens)):
gens[i].forward(mx.io.DataBatch([data[-1]], [0]), is_train=False)
new_img = gens[i].get_outputs()[0]
data.append(new_img.copyto(mx.cpu()))
data_processing.SaveImage(new_img.asnumpy(), "out_%d.jpg" % i)
import os
os.system("rm -rf out.zip")
os.system("zip out.zip out_*")
| apache-2.0 |
uoaerg/linux-ecn | tools/perf/python/twatch.py | 1565 | 1316 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
Huyuwei/tvm | nnvm/tests/python/frontend/mxnet/model_zoo/vgg.py | 5 | 4240 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""References:
Simonyan, Karen, and Andrew Zisserman. "Very deep convolutional networks for
large-scale image recognition." arXiv preprint arXiv:1409.1556 (2014).
"""
import mxnet as mx
import numpy as np
def get_feature(internel_layer, layers, filters, batch_norm = False, **kwargs):
for i, num in enumerate(layers):
for j in range(num):
internel_layer = mx.sym.Convolution(data = internel_layer, kernel=(3, 3), pad=(1, 1), num_filter=filters[i], name="conv%s_%s" %(i + 1, j + 1))
if batch_norm:
internel_layer = mx.symbol.BatchNorm(data=internel_layer, name="bn%s_%s" %(i + 1, j + 1))
internel_layer = mx.sym.Activation(data=internel_layer, act_type="relu", name="relu%s_%s" %(i + 1, j + 1))
internel_layer = mx.sym.Pooling(data=internel_layer, pool_type="max", kernel=(2, 2), stride=(2,2), name="pool%s" %(i + 1))
return internel_layer
def get_classifier(input_data, num_classes, **kwargs):
flatten = mx.sym.Flatten(data=input_data, name="flatten")
try:
fc6 = mx.sym.FullyConnected(data=flatten, num_hidden=4096, name="fc6", flatten=False)
relu6 = mx.sym.Activation(data=fc6, act_type="relu", name="relu6")
drop6 = mx.sym.Dropout(data=relu6, p=0.5, name="drop6")
fc7 = mx.sym.FullyConnected(data=drop6, num_hidden=4096, name="fc7", flatten=False)
relu7 = mx.sym.Activation(data=fc7, act_type="relu", name="relu7")
drop7 = mx.sym.Dropout(data=relu7, p=0.5, name="drop7")
fc8 = mx.sym.FullyConnected(data=drop7, num_hidden=num_classes, name="fc8", flatten=False)
except:
fc6 = mx.sym.FullyConnected(data=flatten, num_hidden=4096, name="fc6")
relu6 = mx.sym.Activation(data=fc6, act_type="relu", name="relu6")
drop6 = mx.sym.Dropout(data=relu6, p=0.5, name="drop6")
fc7 = mx.sym.FullyConnected(data=drop6, num_hidden=4096, name="fc7")
relu7 = mx.sym.Activation(data=fc7, act_type="relu", name="relu7")
drop7 = mx.sym.Dropout(data=relu7, p=0.5, name="drop7")
fc8 = mx.sym.FullyConnected(data=drop7, num_hidden=num_classes, name="fc8")
return fc8
def get_symbol(num_classes, num_layers=11, batch_norm=False, dtype='float32', **kwargs):
"""
Parameters
----------
num_classes : int, default 1000
Number of classification classes.
num_layers : int
Number of layers for the variant of densenet. Options are 11, 13, 16, 19.
batch_norm : bool, default False
Use batch normalization.
dtype: str, float32 or float16
Data precision.
"""
vgg_spec = {11: ([1, 1, 2, 2, 2], [64, 128, 256, 512, 512]),
13: ([2, 2, 2, 2, 2], [64, 128, 256, 512, 512]),
16: ([2, 2, 3, 3, 3], [64, 128, 256, 512, 512]),
19: ([2, 2, 4, 4, 4], [64, 128, 256, 512, 512])}
if num_layers not in vgg_spec:
raise ValueError("Invalide num_layers {}. Possible choices are 11,13,16,19.".format(num_layers))
layers, filters = vgg_spec[num_layers]
data = mx.sym.Variable(name="data")
if dtype == 'float16':
data = mx.sym.Cast(data=data, dtype=np.float16)
feature = get_feature(data, layers, filters, batch_norm)
classifier = get_classifier(feature, num_classes)
if dtype == 'float16':
classifier = mx.sym.Cast(data=classifier, dtype=np.float32)
symbol = mx.sym.softmax(data=classifier, name='softmax')
return symbol
| apache-2.0 |
Azure/azure-sdk-for-python | sdk/storage/azure-storage-file-datalake/azure/storage/filedatalake/_path_client.py | 1 | 48699 | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from datetime import datetime
from typing import Any, Dict, Union
try:
from urllib.parse import urlparse, quote
except ImportError:
from urlparse import urlparse # type: ignore
from urllib2 import quote # type: ignore
import six
from azure.core.exceptions import AzureError, HttpResponseError
from azure.storage.blob import BlobClient
from ._data_lake_lease import DataLakeLeaseClient
from ._deserialize import process_storage_error
from ._generated import AzureDataLakeStorageRESTAPI
from ._models import LocationMode, DirectoryProperties, AccessControlChangeResult, AccessControlChanges, \
AccessControlChangeCounters, AccessControlChangeFailure
from ._serialize import convert_dfs_url_to_blob_url, get_mod_conditions, \
get_path_http_headers, add_metadata_headers, get_lease_id, get_source_mod_conditions, get_access_conditions
from ._shared.base_client import StorageAccountHostsMixin, parse_query
from ._shared.response_handlers import return_response_headers, return_headers_and_deserialized
_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION = (
'The require_encryption flag is set, but encryption is not supported'
' for this method.')
class PathClient(StorageAccountHostsMixin):
def __init__(
self, account_url, # type: str
file_system_name, # type: str
path_name, # type: str
credential=None, # type: Optional[Any]
**kwargs # type: Any
):
# type: (...) -> None
try:
if not account_url.lower().startswith('http'):
account_url = "https://" + account_url
except AttributeError:
raise ValueError("Account URL must be a string.")
parsed_url = urlparse(account_url.rstrip('/'))
# remove the preceding/trailing delimiter from the path components
file_system_name = file_system_name.strip('/')
# the name of root directory is /
if path_name != '/':
path_name = path_name.strip('/')
if not (file_system_name and path_name):
raise ValueError("Please specify a file system name and file path.")
if not parsed_url.netloc:
raise ValueError("Invalid URL: {}".format(account_url))
blob_account_url = convert_dfs_url_to_blob_url(account_url)
self._blob_account_url = blob_account_url
datalake_hosts = kwargs.pop('_hosts', None)
blob_hosts = None
if datalake_hosts:
blob_primary_account_url = convert_dfs_url_to_blob_url(datalake_hosts[LocationMode.PRIMARY])
blob_hosts = {LocationMode.PRIMARY: blob_primary_account_url, LocationMode.SECONDARY: ""}
self._blob_client = BlobClient(blob_account_url, file_system_name, path_name,
credential=credential, _hosts=blob_hosts, **kwargs)
_, sas_token = parse_query(parsed_url.query)
self.file_system_name = file_system_name
self.path_name = path_name
self._query_str, self._raw_credential = self._format_query_string(sas_token, credential)
super(PathClient, self).__init__(parsed_url, service='dfs', credential=self._raw_credential,
_hosts=datalake_hosts, **kwargs)
# ADLS doesn't support secondary endpoint, make sure it's empty
self._hosts[LocationMode.SECONDARY] = ""
self._client = AzureDataLakeStorageRESTAPI(self.url, file_system=file_system_name, path=path_name,
pipeline=self._pipeline)
self._datalake_client_for_blob_operation = AzureDataLakeStorageRESTAPI(
self._blob_client.url,
file_system=file_system_name,
path=path_name,
pipeline=self._pipeline)
def __exit__(self, *args):
self._blob_client.close()
super(PathClient, self).__exit__(*args)
def close(self):
# type: () -> None
""" This method is to close the sockets opened by the client.
It need not be used when using with a context manager.
"""
self._blob_client.close()
self.__exit__()
def _format_url(self, hostname):
file_system_name = self.file_system_name
if isinstance(file_system_name, six.text_type):
file_system_name = file_system_name.encode('UTF-8')
return "{}://{}/{}/{}{}".format(
self.scheme,
hostname,
quote(file_system_name),
quote(self.path_name, safe='~'),
self._query_str)
def _create_path_options(self, resource_type, content_settings=None, metadata=None, **kwargs):
# type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any]
if self.require_encryption or (self.key_encryption_key is not None):
raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
access_conditions = get_access_conditions(kwargs.pop('lease', None))
mod_conditions = get_mod_conditions(kwargs)
path_http_headers = None
if content_settings:
path_http_headers = get_path_http_headers(content_settings)
options = {
'resource': resource_type,
'properties': add_metadata_headers(metadata),
'permissions': kwargs.pop('permissions', None),
'umask': kwargs.pop('umask', None),
'path_http_headers': path_http_headers,
'lease_access_conditions': access_conditions,
'modified_access_conditions': mod_conditions,
'timeout': kwargs.pop('timeout', None),
'cls': return_response_headers}
options.update(kwargs)
return options
def _create(self, resource_type, content_settings=None, metadata=None, **kwargs):
# type: (...) -> Dict[str, Union[str, datetime]]
"""
Create directory or file
:param resource_type:
Required for Create File and Create Directory.
The value must be "file" or "directory". Possible values include:
'directory', 'file'
:type resource_type: str
:param ~azure.storage.filedatalake.ContentSettings content_settings:
ContentSettings object used to set path properties.
:param metadata:
Name-value pairs associated with the file/directory as metadata.
:type metadata: dict(str, str)
:keyword lease:
Required if the file/directory has an active lease. Value can be a LeaseClient object
or the lease ID as a string.
:paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword str umask:
Optional and only valid if Hierarchical Namespace is enabled for the account.
When creating a file or directory and the parent folder does not have a default ACL,
the umask restricts the permissions of the file or directory to be created.
The resulting permission is given by p & ^u, where p is the permission and u is the umask.
For example, if p is 0777 and u is 0057, then the resulting permission is 0720.
The default permission is 0777 for a directory and 0666 for a file. The default umask is 0027.
The umask must be specified in 4-digit octal notation (e.g. 0766).
:keyword permissions:
Optional and only valid if Hierarchical Namespace
is enabled for the account. Sets POSIX access permissions for the file
owner, the file owning group, and others. Each class may be granted
read, write, or execute permission. The sticky bit is also supported.
Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are
supported.
:type permissions: str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: Dict[str, Union[str, datetime]]
"""
options = self._create_path_options(
resource_type,
content_settings=content_settings,
metadata=metadata,
**kwargs)
try:
return self._client.path.create(**options)
except HttpResponseError as error:
process_storage_error(error)
@staticmethod
def _delete_path_options(**kwargs):
# type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any]
access_conditions = get_access_conditions(kwargs.pop('lease', None))
mod_conditions = get_mod_conditions(kwargs)
options = {
'lease_access_conditions': access_conditions,
'modified_access_conditions': mod_conditions,
'cls': return_response_headers,
'timeout': kwargs.pop('timeout', None)}
options.update(kwargs)
return options
def _delete(self, **kwargs):
# type: (**Any) -> Dict[Union[datetime, str]]
"""
Marks the specified path for deletion.
:keyword lease:
Required if the file/directory has an active lease. Value can be a LeaseClient object
or the lease ID as a string.
:type lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:param ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:param ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:param int timeout:
The timeout parameter is expressed in seconds.
:return: None
"""
options = self._delete_path_options(**kwargs)
try:
return self._client.path.delete(**options)
except HttpResponseError as error:
process_storage_error(error)
@staticmethod
def _set_access_control_options(owner=None, group=None, permissions=None, acl=None, **kwargs):
# type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any]
access_conditions = get_access_conditions(kwargs.pop('lease', None))
mod_conditions = get_mod_conditions(kwargs)
options = {
'owner': owner,
'group': group,
'permissions': permissions,
'acl': acl,
'lease_access_conditions': access_conditions,
'modified_access_conditions': mod_conditions,
'timeout': kwargs.pop('timeout', None),
'cls': return_response_headers}
options.update(kwargs)
return options
def set_access_control(self, owner=None, # type: Optional[str]
group=None, # type: Optional[str]
permissions=None, # type: Optional[str]
acl=None, # type: Optional[str]
**kwargs):
# type: (...) -> Dict[str, Union[str, datetime]]
"""
Set the owner, group, permissions, or access control list for a path.
:param owner:
Optional. The owner of the file or directory.
:type owner: str
:param group:
Optional. The owning group of the file or directory.
:type group: str
:param permissions:
Optional and only valid if Hierarchical Namespace
is enabled for the account. Sets POSIX access permissions for the file
owner, the file owning group, and others. Each class may be granted
read, write, or execute permission. The sticky bit is also supported.
Both symbolic (rwxrw-rw-) and 4-digit octal notation (e.g. 0766) are
supported.
permissions and acl are mutually exclusive.
:type permissions: str
:param acl:
Sets POSIX access control rights on files and directories.
The value is a comma-separated list of access control entries. Each
access control entry (ACE) consists of a scope, a type, a user or
group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
permissions and acl are mutually exclusive.
:type acl: str
:keyword lease:
Required if the file/directory has an active lease. Value can be a LeaseClient object
or the lease ID as a string.
:paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:keyword: response dict (Etag and last modified).
"""
if not any([owner, group, permissions, acl]):
raise ValueError("At least one parameter should be set for set_access_control API")
options = self._set_access_control_options(owner=owner, group=group, permissions=permissions, acl=acl, **kwargs)
try:
return self._client.path.set_access_control(**options)
except HttpResponseError as error:
process_storage_error(error)
@staticmethod
def _get_access_control_options(upn=None, # type: Optional[bool]
**kwargs):
# type: (...) -> Dict[str, Any]
access_conditions = get_access_conditions(kwargs.pop('lease', None))
mod_conditions = get_mod_conditions(kwargs)
options = {
'action': 'getAccessControl',
'upn': upn if upn else False,
'lease_access_conditions': access_conditions,
'modified_access_conditions': mod_conditions,
'timeout': kwargs.pop('timeout', None),
'cls': return_response_headers}
options.update(kwargs)
return options
def get_access_control(self, upn=None, # type: Optional[bool]
**kwargs):
# type: (...) -> Dict[str, Any]
"""
:param upn: Optional.
Valid only when Hierarchical Namespace is
enabled for the account. If "true", the user identity values returned
in the x-ms-owner, x-ms-group, and x-ms-acl response headers will be
transformed from Azure Active Directory Object IDs to User Principal
Names. If "false", the values will be returned as Azure Active
Directory Object IDs. The default value is false. Note that group and
application Object IDs are not translated because they do not have
unique friendly names.
:type upn: bool
:keyword lease:
Required if the file/directory has an active lease. Value can be a LeaseClient object
or the lease ID as a string.
:paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:keyword: response dict.
"""
options = self._get_access_control_options(upn=upn, **kwargs)
try:
return self._client.path.get_properties(**options)
except HttpResponseError as error:
process_storage_error(error)
@staticmethod
def _set_access_control_recursive_options(mode, acl, **kwargs):
# type: (str, str, **Any) -> Dict[str, Any]
options = {
'mode': mode,
'force_flag': kwargs.pop('continue_on_failure', None),
'timeout': kwargs.pop('timeout', None),
'continuation': kwargs.pop('continuation_token', None),
'max_records': kwargs.pop('batch_size', None),
'acl': acl,
'cls': return_headers_and_deserialized}
options.update(kwargs)
return options
def set_access_control_recursive(self,
acl,
**kwargs):
# type: (str, **Any) -> AccessControlChangeResult
"""
Sets the Access Control on a path and sub-paths.
:param acl:
Sets POSIX access control rights on files and directories.
The value is a comma-separated list of access control entries. Each
access control entry (ACE) consists of a scope, a type, a user or
group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:keyword func(~azure.storage.filedatalake.AccessControlChanges) progress_hook:
Callback where the caller can track progress of the operation
as well as collect paths that failed to change Access Control.
:keyword str continuation_token:
Optional continuation token that can be used to resume previously stopped operation.
:keyword int batch_size:
Optional. If data set size exceeds batch size then operation will be split into multiple
requests so that progress can be tracked. Batch size should be between 1 and 2000.
The default when unspecified is 2000.
:keyword int max_batches:
Optional. Defines maximum number of batches that single change Access Control operation can execute.
If maximum is reached before all sub-paths are processed,
then continuation token can be used to resume operation.
Empty value indicates that maximum number of batches in unbound and operation continues till end.
:keyword bool continue_on_failure:
If set to False, the operation will terminate quickly on encountering user errors (4XX).
If True, the operation will ignore user errors and proceed with the operation on other sub-entities of
the directory.
Continuation token will only be returned when continue_on_failure is True in case of user errors.
If not set the default value is False for this.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: A summary of the recursive operations, including the count of successes and failures,
as well as a continuation token in case the operation was terminated prematurely.
:rtype: :class:`~azure.storage.filedatalake.AccessControlChangeResult`
:raises ~azure.core.exceptions.AzureError:
User can restart the operation using continuation_token field of AzureError if the token is available.
"""
if not acl:
raise ValueError("The Access Control List must be set for this operation")
progress_hook = kwargs.pop('progress_hook', None)
max_batches = kwargs.pop('max_batches', None)
options = self._set_access_control_recursive_options(mode='set', acl=acl, **kwargs)
return self._set_access_control_internal(options=options, progress_hook=progress_hook,
max_batches=max_batches)
def update_access_control_recursive(self,
acl,
**kwargs):
# type: (str, **Any) -> AccessControlChangeResult
"""
Modifies the Access Control on a path and sub-paths.
:param acl:
Modifies POSIX access control rights on files and directories.
The value is a comma-separated list of access control entries. Each
access control entry (ACE) consists of a scope, a type, a user or
group identifier, and permissions in the format
"[scope:][type]:[id]:[permissions]".
:type acl: str
:keyword func(~azure.storage.filedatalake.AccessControlChanges) progress_hook:
Callback where the caller can track progress of the operation
as well as collect paths that failed to change Access Control.
:keyword str continuation_token:
Optional continuation token that can be used to resume previously stopped operation.
:keyword int batch_size:
Optional. If data set size exceeds batch size then operation will be split into multiple
requests so that progress can be tracked. Batch size should be between 1 and 2000.
The default when unspecified is 2000.
:keyword int max_batches:
Optional. Defines maximum number of batches that single change Access Control operation can execute.
If maximum is reached before all sub-paths are processed,
then continuation token can be used to resume operation.
Empty value indicates that maximum number of batches in unbound and operation continues till end.
:keyword bool continue_on_failure:
If set to False, the operation will terminate quickly on encountering user errors (4XX).
If True, the operation will ignore user errors and proceed with the operation on other sub-entities of
the directory.
Continuation token will only be returned when continue_on_failure is True in case of user errors.
If not set the default value is False for this.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: A summary of the recursive operations, including the count of successes and failures,
as well as a continuation token in case the operation was terminated prematurely.
:rtype: :class:`~azure.storage.filedatalake.AccessControlChangeResult`
:raises ~azure.core.exceptions.AzureError:
User can restart the operation using continuation_token field of AzureError if the token is available.
"""
if not acl:
raise ValueError("The Access Control List must be set for this operation")
progress_hook = kwargs.pop('progress_hook', None)
max_batches = kwargs.pop('max_batches', None)
options = self._set_access_control_recursive_options(mode='modify', acl=acl, **kwargs)
return self._set_access_control_internal(options=options, progress_hook=progress_hook,
max_batches=max_batches)
def remove_access_control_recursive(self,
acl,
**kwargs):
# type: (str, **Any) -> AccessControlChangeResult
"""
Removes the Access Control on a path and sub-paths.
:param acl:
Removes POSIX access control rights on files and directories.
The value is a comma-separated list of access control entries. Each
access control entry (ACE) consists of a scope, a type, and a user or
group identifier in the format "[scope:][type]:[id]".
:type acl: str
:keyword func(~azure.storage.filedatalake.AccessControlChanges) progress_hook:
Callback where the caller can track progress of the operation
as well as collect paths that failed to change Access Control.
:keyword str continuation_token:
Optional continuation token that can be used to resume previously stopped operation.
:keyword int batch_size:
Optional. If data set size exceeds batch size then operation will be split into multiple
requests so that progress can be tracked. Batch size should be between 1 and 2000.
The default when unspecified is 2000.
:keyword int max_batches:
Optional. Defines maximum number of batches that single change Access Control operation can execute.
If maximum is reached before all sub-paths are processed then,
continuation token can be used to resume operation.
Empty value indicates that maximum number of batches in unbound and operation continues till end.
:keyword bool continue_on_failure:
If set to False, the operation will terminate quickly on encountering user errors (4XX).
If True, the operation will ignore user errors and proceed with the operation on other sub-entities of
the directory.
Continuation token will only be returned when continue_on_failure is True in case of user errors.
If not set the default value is False for this.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: A summary of the recursive operations, including the count of successes and failures,
as well as a continuation token in case the operation was terminated prematurely.
:rtype: :class:`~azure.storage.filedatalake.AccessControlChangeResult`
:raises ~azure.core.exceptions.AzureError:
User can restart the operation using continuation_token field of AzureError if the token is available.
"""
if not acl:
raise ValueError("The Access Control List must be set for this operation")
progress_hook = kwargs.pop('progress_hook', None)
max_batches = kwargs.pop('max_batches', None)
options = self._set_access_control_recursive_options(mode='remove', acl=acl, **kwargs)
return self._set_access_control_internal(options=options, progress_hook=progress_hook,
max_batches=max_batches)
def _set_access_control_internal(self, options, progress_hook, max_batches=None):
try:
continue_on_failure = options.get('force_flag')
total_directories_successful = 0
total_files_success = 0
total_failure_count = 0
batch_count = 0
last_continuation_token = None
current_continuation_token = None
continue_operation = True
while continue_operation:
headers, resp = self._client.path.set_access_control_recursive(**options)
# make a running tally so that we can report the final results
total_directories_successful += resp.directories_successful
total_files_success += resp.files_successful
total_failure_count += resp.failure_count
batch_count += 1
current_continuation_token = headers['continuation']
if current_continuation_token is not None:
last_continuation_token = current_continuation_token
if progress_hook is not None:
progress_hook(AccessControlChanges(
batch_counters=AccessControlChangeCounters(
directories_successful=resp.directories_successful,
files_successful=resp.files_successful,
failure_count=resp.failure_count,
),
aggregate_counters=AccessControlChangeCounters(
directories_successful=total_directories_successful,
files_successful=total_files_success,
failure_count=total_failure_count,
),
batch_failures=[AccessControlChangeFailure(
name=failure.name,
is_directory=failure.type == 'DIRECTORY',
error_message=failure.error_message) for failure in resp.failed_entries],
continuation=last_continuation_token))
# update the continuation token, if there are more operations that cannot be completed in a single call
max_batches_satisfied = (max_batches is not None and batch_count == max_batches)
continue_operation = bool(current_continuation_token) and not max_batches_satisfied
options['continuation'] = current_continuation_token
# currently the service stops on any failure, so we should send back the last continuation token
# for the user to retry the failed updates
# otherwise we should just return what the service gave us
return AccessControlChangeResult(counters=AccessControlChangeCounters(
directories_successful=total_directories_successful,
files_successful=total_files_success,
failure_count=total_failure_count),
continuation=last_continuation_token
if total_failure_count > 0 and not continue_on_failure else current_continuation_token)
except HttpResponseError as error:
error.continuation_token = last_continuation_token
process_storage_error(error)
except AzureError as error:
error.continuation_token = last_continuation_token
raise error
def _rename_path_options(self, rename_source, content_settings=None, metadata=None, **kwargs):
# type: (Optional[ContentSettings], Optional[Dict[str, str]], **Any) -> Dict[str, Any]
if self.require_encryption or (self.key_encryption_key is not None):
raise ValueError(_ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
if metadata or kwargs.pop('permissions', None) or kwargs.pop('umask', None):
raise ValueError("metadata, permissions, umask is not supported for this operation")
access_conditions = get_access_conditions(kwargs.pop('lease', None))
source_lease_id = get_lease_id(kwargs.pop('source_lease', None))
mod_conditions = get_mod_conditions(kwargs)
source_mod_conditions = get_source_mod_conditions(kwargs)
path_http_headers = None
if content_settings:
path_http_headers = get_path_http_headers(content_settings)
options = {
'rename_source': rename_source,
'path_http_headers': path_http_headers,
'lease_access_conditions': access_conditions,
'source_lease_id': source_lease_id,
'modified_access_conditions': mod_conditions,
'source_modified_access_conditions': source_mod_conditions,
'timeout': kwargs.pop('timeout', None),
'mode': 'legacy',
'cls': return_response_headers}
options.update(kwargs)
return options
def _rename_path(self, rename_source, **kwargs):
# type: (str, **Any) -> Dict[str, Any]
"""
Rename directory or file
:param rename_source:
The value must have the following format: "/{filesystem}/{path}".
:type rename_source: str
:keyword ~azure.storage.filedatalake.ContentSettings content_settings:
ContentSettings object used to set path properties.
:keyword source_lease:
A lease ID for the source path. If specified,
the source path must have an active lease and the leaase ID must
match.
:paramtype source_lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword lease:
Required if the file/directory has an active lease. Value can be a LeaseClient object
or the lease ID as a string.
:paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword ~datetime.datetime source_if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime source_if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str source_etag:
The source ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions source_match_condition:
The source match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
"""
options = self._rename_path_options(
rename_source,
**kwargs)
try:
return self._client.path.create(**options)
except HttpResponseError as error:
process_storage_error(error)
def _get_path_properties(self, **kwargs):
# type: (**Any) -> Union[FileProperties, DirectoryProperties]
"""Returns all user-defined metadata, standard HTTP properties, and
system properties for the file or directory. It does not return the content of the directory or file.
:keyword lease:
Required if the directory or file has an active lease. Value can be a DataLakeLeaseClient object
or the lease ID as a string.
:paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: DirectoryProperties or FileProperties
.. admonition:: Example:
.. literalinclude:: ../tests/test_blob_samples_common.py
:start-after: [START get_blob_properties]
:end-before: [END get_blob_properties]
:language: python
:dedent: 8
:caption: Getting the properties for a file/directory.
"""
path_properties = self._blob_client.get_blob_properties(**kwargs)
return path_properties
def _exists(self, **kwargs):
# type: (**Any) -> bool
"""
Returns True if a path exists and returns False otherwise.
:kwarg int timeout:
The timeout parameter is expressed in seconds.
:returns: boolean
"""
return self._blob_client.exists(**kwargs)
def set_metadata(self, metadata, # type: Dict[str, str]
**kwargs):
# type: (...) -> Dict[str, Union[str, datetime]]
"""Sets one or more user-defined name-value pairs for the specified
file system. Each call to this operation replaces all existing metadata
attached to the file system. To remove all metadata from the file system,
call this operation with no metadata dict.
:param metadata:
A dict containing name-value pairs to associate with the file system as
metadata. Example: {'category':'test'}
:type metadata: dict[str, str]
:keyword lease:
If specified, set_file_system_metadata only succeeds if the
file system's lease is active and matches this ID.
:paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: file system-updated property dict (Etag and last modified).
"""
return self._blob_client.set_blob_metadata(metadata=metadata, **kwargs)
def set_http_headers(self, content_settings=None, # type: Optional[ContentSettings]
**kwargs):
# type: (...) -> Dict[str, Any]
"""Sets system properties on the file or directory.
If one property is set for the content_settings, all properties will be overriden.
:param ~azure.storage.filedatalake.ContentSettings content_settings:
ContentSettings object used to set file/directory properties.
:keyword lease:
If specified, set_file_system_metadata only succeeds if the
file system's lease is active and matches this ID.
:paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: file/directory-updated property dict (Etag and last modified)
:rtype: Dict[str, Any]
"""
return self._blob_client.set_http_headers(content_settings=content_settings, **kwargs)
def acquire_lease(self, lease_duration=-1, # type: Optional[int]
lease_id=None, # type: Optional[str]
**kwargs):
# type: (...) -> DataLakeLeaseClient
"""
Requests a new lease. If the file or directory does not have an active lease,
the DataLake service creates a lease on the file/directory and returns a new
lease ID.
:param int lease_duration:
Specifies the duration of the lease, in seconds, or negative one
(-1) for a lease that never expires. A non-infinite lease can be
between 15 and 60 seconds. A lease duration cannot be changed
using renew or change. Default is -1 (infinite lease).
:param str lease_id:
Proposed lease ID, in a GUID string format. The DataLake service returns
400 (Invalid request) if the proposed lease ID is not in the correct format.
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: A DataLakeLeaseClient object, that can be run in a context manager.
:rtype: ~azure.storage.filedatalake.DataLakeLeaseClient
"""
lease = DataLakeLeaseClient(self, lease_id=lease_id) # type: ignore
lease.acquire(lease_duration=lease_duration, **kwargs)
return lease
| mit |
jeremiahyan/lammps | tools/moltemplate/src/ttree_matrix_stack.py | 19 | 29921 | # Author: Andrew Jewett (jewett.aij@gmail.com)
# http://www.chem.ucsb.edu/~sheagroup
# License: 3-clause BSD License (See LICENSE.TXT)
# Copyright (c) 2012, Regents of the University of California
# All rights reserved.
from collections import deque
from array import array
from ttree_lex import *
#import sys
def MultMat(dest, A, B):
""" Multiply two matrices together. Store result in "dest".
"""
I = len(A)
J = len(B[0])
K = len(B) # or len(A[0])
for i in range(0,I):
for j in range(0,J):
dest[i][j] = 0.0
for k in range(0,K):
dest[i][j] += A[i][k] * B[k][j]
def MatToStr(M):
strs = []
for i in range(0, len(M)):
for j in range(0, len(M[i])):
strs.append(str(M[i][j])+' ')
strs.append('\n')
return(''.join(strs))
def LinTransform(dest, M, x):
""" Multiply matrix M by 1-dimensioal array (vector) "x" (from the right).
Store result in 1-dimensional array "dest".
In this function, wetreat "x" and "dest" as a column vectors.
(Not row vectors.)
"""
I = len(A)
J = len(x)
for i in range(0,I):
dest[i] = 0.0
for j in range(0,J):
dest[i] += M[i][j] * x[j]
def AffineTransform(dest, M, x):
""" This function performs an affine transformation on vector "x".
Multiply 3-dimensional vector "x" by first three columns of 3x4
matrix M. Add to this the final column of M. Store result in "dest":
dest[0] = M[0][0]*x[0] + M[0][1]*x[1] + M[0][2]*x[2] + M[0][3]
dest[1] = M[1][0]*x[0] + M[1][1]*x[1] + M[1][2]*x[2] + M[1][3]
dest[2] = M[2][0]*x[0] + M[2][1]*x[1] + M[2][2]*x[2] + M[2][3]
"""
D = len(M)
#assert(len(M[0]) == D+1)
for i in range(0,D):
dest[i] = 0.0
for j in range(0,D):
dest[i] += M[i][j] * x[j]
dest[i] += M[i][D] #(translation offset stored in final column)
def AffineCompose(dest, M2, M1):
"""
Multiplication for pairs of 3x4 matrices is technically undefined.
However what we want to do is compose two affine transformations: M1 and M2
3x4 matrices are used to define rotations/translations
x' = M[0][0]*x + M[0][1]*y + M[0][2]*z + M[0][3]
y' = M[1][0]*x + M[1][1]*y + M[1][2]*z + M[1][3]
z' = M[2][0]*x + M[2][1]*y + M[2][2]*z + M[2][3]
We want to create a new 3x4 matrix representing an affine transformation
(M2 M1), defined so that when (M2 M1) is applied to vector x, the result is
M2 (M1 x). In other words:
first, affine transformation M1 is applied to to x
then, affine transformation M2 is applied to (M1 x)
"""
D = len(M1)
#assert(len(M1[0]) == D+1)
#assert(len(M2[0]) == D+1)
for i in range(0, D):
dest[i][D] = 0.0
for j in range(0, D+1):
dest[i][j] = 0.0
for k in range(0, D):
dest[i][j] += M2[i][k] * M1[k][j]
dest[i][D] += M2[i][D]
def CopyMat(dest, source):
for i in range(0, len(source)):
for j in range(0, len(source[i])):
dest[i][j] = source[i][j]
class AffineStack(object):
"""
This class defines a matrix stack used to define compositions of affine
transformations of 3 dimensional coordinates (rotation and translation).
Affine transformations are represented using 3x4 matrices.
(Coordinates of atoms are thought of as column vectors: [[x],[y],[z]],
although they are represented internally in the more ordinary way [x,y,z].
To aplly an affine transformation to a vector, multiply the vector
by the matrix, from the left-hand side, as explained in the comments for:
AffineTransform(dest, M, x)
Note: The last column of the 3x4 matrix stores a translational offset.
This bears similarity with the original OpenGL matrix stack
http://content.gpwiki.org/index.php/OpenGL:Tutorials:Theory
(OpenGL uses 4x4 matrices. We don't need the final row of these matrices,
because in OpenGL, these rows are used for perspective transformations.)
http://en.wikipedia.org/wiki/Homogeneous_coordinates#Use_in_computer_graphics
"""
def __init__(self):
self.stack = None
self.M = None
self._tmp = None
self.Clear()
def Clear(self):
self.stack = deque([])
self.M = [[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0]] # (identity, initially)
self._tmp = [[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0]]
def PushRight(self, M):
#Push a copy of matrix self.M onto the stack
# We make no distinction between "right" and "left" here.
# All transformations are pushed onto the stack in the same way.
# (The "right" and "left" refer to whether the matrix is multiplied
# on the right of left hand side. Because not all matrices need be
# invertible, we require that matrices be popped from the stack
# in the reverse order they were pushed. This prevents the ability
# to push and pop matrices to either end of the stack in an arbitrary
# order (like append(), appendleft(), pop(), popleft()).)
self.stack.append([[self.M[i][j] for j in range(0,len(self.M[i]))]
for i in range(0,len(self.M))])
# The "Right" and "Left" refer to whether the new matrix is multiplied
# on the right or left side of the culmulatie matrix product.
AffineCompose(self._tmp, self.M, M) # Afterwards, self._tmp = self.M * M
#sys.stderr.write('DEBUG: PushLeft()\n' +
# MatToStr(self._tmp) + '\n = \n' +
# MatToStr(M) + '\n * \n' +
# MatToStr(self.M) + '\n')
CopyMat(self.M, self._tmp) # Copy self._tmp into self.M
def PushLeft(self, M):
#Push a copy of matrix self.M onto the stack
# We make no distinction between right and left here.
# All transformations are pushed onto the stack in the same way.
# (The "right" and "left" refer to whether the matrix is multiplied
# on the right of left hand side. Because not all matrices need be
# invertible, we require that matrices be popped from the stack
# in the reverse order they were pushed. This prevents the ability
# to push and pop matrices to either end of the stack in an arbitrary
# order (like append(), appendleft(), pop(), popleft()).)
self.stack.append([[self.M[i][j] for j in range(0,len(self.M[i]))]
for i in range(0,len(self.M))])
# The "Right" and "Left" refer to whether the new matrix is multiplied
# on the right or left side of the culmulatie matrix product.
AffineCompose(self._tmp, M, self.M) # Afterwards, self._tmp = M * self.M
#sys.stderr.write('DEBUG: PushLeft()\n' +
# MatToStr(self._tmp) + '\n = \n' +
# MatToStr(M) + '\n * \n' +
# MatToStr(self.M) + '\n')
CopyMat(self.M, self._tmp) # Copy self.tmp into self.M
def Pop(self):
CopyMat(self.M, self.stack.pop())
# (No need to return a matrix,"self.M",after popping.
# The caller can directly access self.M later.)
#return self.M
def PopRight(self):
self.Pop()
def PopLeft(self):
self.Pop()
def PushCommandsRight(self,
text, # text containing affine transformation commands
# The next two arguments are optional:
src_loc = OSrcLoc(), # for debugging
xcm = None): # position of center of object
"""Generate affine transformation matrices from simple text commands
(such as \"rotcm(90,0,0,1)\" and \"move(0,5.0,0)".
Chains of "rotcm", "movecm", "rot", and "move" commands
can also be strung together:
\"rotcm(90,0,0,1).move(0,5.0,0)\"
Commands ending in \"cm\" are carried out relative to center-of-mass
(average position) of the object, and consequently require
an additional argument (\"xcm\").
"""
self.PushRight(AffineStack.CommandsToMatrix(text, src_loc, xcm))
def PushCommandsLeft(self,
text, # text containing affine transformation commands
# The next two arguments are optional:
src_loc = OSrcLoc(), # for debugging
xcm = None): # position of center of object
self.PushLeft(AffineStack.CommandsToMatrix(text, src_loc, xcm))
def __len__(self):
return 1 + len(self.stack)
@staticmethod
def CommandsToMatrix(text, # text containing affine transformation commands
src_loc = OSrcLoc(), # for debugging
xcm = None): # position of center of object
Mdest=[[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0]]
M =[[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0]]
Mtmp =[[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0]]
transform_commands = text.split(').')
for transform_str in transform_commands:
if transform_str.find('move(') == 0:
i_paren_close = transform_str.find(')')
if i_paren_close == -1:
i_paren_close = len(transform_str)
args = transform_str[5:i_paren_close].split(',')
if (len(args) != 3):
raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
' Invalid command: \"'+transform_str+'\"\n'
' This command requires 3 numerical arguments.')
M = [[1.0, 0.0, 0.0, float(args[0])],
[0.0, 1.0, 0.0, float(args[1])],
[0.0, 0.0, 1.0, float(args[2])]]
AffineCompose(Mtmp, M, Mdest)
CopyMat(Mdest, Mtmp)
#if transform_str.find('movecm(') == 0:
# # assert(xcm != None)
# i_paren_close = transform_str.find(')')
# if i_paren_close == -1:
# i_paren_close = len(transform_str)
# args = transform_str[8:i_paren_close].split(',')
# if (len(args) != 3):
# raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
# ' Invalid command: \"'+transform_str+'\"\n'
# ' This command requires 3 numerical arguments.')
# M = [[1.0, 0.0, 0.0, float(args[0])-(xcm[0])],
# [0.0, 1.0, 0.0, float(args[1])-(xcm[1])],
# [0.0, 0.0, 1.0, float(args[2])-(xcm[2])]]
# AffineCompose(Mtmp, M, Mdest)
# CopyMat(Mdest, Mtmp)
elif transform_str.find('rot(') == 0:
i_paren_close = transform_str.find(')')
if i_paren_close == -1:
i_paren_close = len(transform_str)
args = transform_str[4:i_paren_close].split(',')
center_v = None
if (len(args) == 7):
center_v = [float(args[4]), float(args[5]), float(args[6])]
elif (len(args) != 4):
raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
' Invalid command: \"'+transform_str+'\"\n'
' This command requires either 4 or 7 numerical arguments. Either:\n'
' rot(angle, axisX, axisY, axiZ) or \n'
' rot(angle, axisX, axisY, axiZ, centerX, centerY, centerZ)')
M[0][3] = 0.0 #RotMatAXYZ() only modifies 3x3 submatrix of M
M[1][3] = 0.0 #The remaining final column must be zeroed by hand
M[2][3] = 0.0
RotMatAXYZ(M,
float(args[0])*math.pi/180.0,
float(args[1]),
float(args[2]),
float(args[3]))
if (center_v == None):
AffineCompose(Mtmp, M, Mdest)
CopyMat(Mdest, Mtmp)
else:
# Move "center_v" to the origin
moveCentToOrig = [[1.0, 0.0, 0.0, -center_v[0]],
[0.0, 1.0, 0.0, -center_v[1]],
[0.0, 0.0, 1.0, -center_v[2]]]
AffineCompose(Mtmp, moveCentToOrig, Mdest)
CopyMat(Mdest, Mtmp)
# Rotate the coordinates (relative to the origin)
AffineCompose(Mtmp, M, Mdest) # M is the rotation matrix
CopyMat(Mdest, Mtmp)
# Move the origin back to center_v
moveCentBack = [[1.0, 0.0, 0.0, center_v[0]],
[0.0, 1.0, 0.0, center_v[1]],
[0.0, 0.0, 1.0, center_v[2]]]
AffineCompose(Mtmp, moveCentBack, Mdest)
CopyMat(Mdest, Mtmp)
# # elif transform_str.find('rotcm(') == 0:
# # assert(xcm != None)
# # i_paren_close = transform_str.find(')')
# # if i_paren_close == -1:
# # i_paren_close = len(transform_str)
# # args = transform_str[6:i_paren_close].split(',')
# # if (len(args) != 4):
# # raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
# # ' Invalid command: \"'+transform_str+'\"\n'
# # ' This command requires 4 numerical arguments.')
# #
# # moveCMtoOrig = [[1.0, 0.0, 0.0, -xcm[0]],
# # [0.0, 1.0, 0.0, -xcm[1]],
# # [0.0, 0.0, 1.0, -xcm[2]]]
# # AffineCompose(Mtmp, moveCMtoOrig, Mdest)
# # CopyMat(Mdest, Mtmp)
# # M[0][3] = 0.0#RotMatAXYZ() only modifies 3x3 submatrix of M
# # M[1][3] = 0.0#The remaining final column must be zeroed by hand
# # M[2][3] = 0.0
# # RotMatAXYZ(M,
# # float(args[0])*math.pi/180.0,
# # float(args[1]),
# # float(args[2]),
# # float(args[3]))
# # AffineCompose(Mtmp, M, Mdest)
# # CopyMat(Mdest, Mtmp)
# # moveCmBack = [[1.0, 0.0, 0.0, xcm[0]],
# # [0.0, 1.0, 0.0, xcm[1]],
# # [0.0, 0.0, 1.0, xcm[2]]]
# # AffineCompose(Mtmp, moveCmBack, Mdest)
# # CopyMat(Mdest, Mtmp)
elif transform_str.find('rotvv(') == 0:
i_paren_close = transform_str.find(')')
if i_paren_close == -1:
i_paren_close = len(transform_str)
args = transform_str[6:i_paren_close].split(',')
center_v = None
if (len(args) == 9):
center_v = [float(args[6]), float(args[7]), float(args[8])]
elif (len(args) != 6):
raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
' Invalid command: \"'+transform_str+'\"\n'
' This command requires either 6 or 9 numerical arguments. Either:\n'
' rotvv(Xold,Yold,Zold,Xnew,Ynew,Znew) or \n'
' rotvv(Xold,Yold,Zold,Xnew,Ynew,Znew,centerX,centerY,centerZ)')
M[0][3] = 0.0 #RotMatXYZXYZ() only modifies 3x3 submatrix of M
M[1][3] = 0.0 #The remaining final column must be zeroed by hand
M[2][3] = 0.0
RotMatXYZXYZ(M,
float(args[0]),
float(args[1]),
float(args[2]),
float(args[3]),
float(args[4]),
float(args[5]))
if (center_v == None):
AffineCompose(Mtmp, M, Mdest)
CopyMat(Mdest, Mtmp)
else:
# Move "center_v" to the origin
moveCentToOrig = [[1.0, 0.0, 0.0, -center_v[0]],
[0.0, 1.0, 0.0, -center_v[1]],
[0.0, 0.0, 1.0, -center_v[2]]]
AffineCompose(Mtmp, moveCentToOrig, Mdest)
CopyMat(Mdest, Mtmp)
# Rotate the coordinates (relative to the origin)
AffineCompose(Mtmp, M, Mdest) # M is the rotation matrix
CopyMat(Mdest, Mtmp)
# Move the origin back to center_v
moveCentBack = [[1.0, 0.0, 0.0, center_v[0]],
[0.0, 1.0, 0.0, center_v[1]],
[0.0, 0.0, 1.0, center_v[2]]]
AffineCompose(Mtmp, moveCentBack, Mdest)
CopyMat(Mdest, Mtmp)
elif transform_str.find('scale(') == 0:
i_paren_close = transform_str.find(')')
if i_paren_close == -1:
i_paren_close = len(transform_str)
args = transform_str[6:i_paren_close].split(',')
if (len(args) == 1):
scale_v = [float(args[0]), float(args[0]), float(args[0])]
center_v = [0.0, 0.0, 0.0]
elif (len(args) == 3):
scale_v = [float(args[0]), float(args[1]), float(args[2])]
center_v = [0.0, 0.0, 0.0]
elif (len(args) == 4):
scale_v = [float(args[0]), float(args[0]), float(args[0])]
center_v = [float(args[1]), float(args[2]), float(args[3])]
elif (len(args) == 6):
scale_v = [float(args[0]), float(args[1]), float(args[2])]
center_v = [float(args[3]), float(args[4]), float(args[5])]
else:
raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
' Invalid command: \"'+transform_str+'\"\n'
' This command requires either 1, 3, 4, or 6 numerical arguments. Either:\n'
' scale(ratio), or \n'
' scale(ratioX, ratioY, ratioZ),\n'
' scale(ratio, centerX, centerY, centerZ), or\n'
' scale(ratioX, ratioY, ratioZ, centerX, centerY, centerZ)')
ScaleMat(M, scale_v)
# Now worry about translation:
for d in range(0, 3):
M[d][3] = center_v[d] * (1.0 - scale_v[d])
AffineCompose(Mtmp, M, Mdest)
CopyMat(Mdest, Mtmp)
# # elif transform_str.find('scalecm(') == 0:
# # assert(xcm != None)
# # i_paren_close = transform_str.find(')')
# # if i_paren_close == -1:
# # i_paren_close = len(transform_str)
# # args = transform_str[8:i_paren_close].split(',')
# #
# # moveCMtoOrig = [[1.0, 0.0, 0.0, -xcm[0]],
# # [0.0, 1.0, 0.0, -xcm[1]],
# # [0.0, 0.0, 1.0, -xcm[2]]]
# # AffineCompose(Mtmp, moveCMtoOrig, Mdest)
# # CopyMat(Mdest, Mtmp)
# #
# # M[0][3] = 0.0 #ScaleMat() only modifies 3x3 submatrix of M
# # M[1][3] = 0.0 #The remaining final column must be zeroed by hand
# # M[2][3] = 0.0
# # if (len(args) == 1):
# # ScaleMat(M, args[0])
# # elif (len(args) == 3):
# # ScaleMat(M, args)
# # else:
# # raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
# # ' Invalid command: \"'+transform_str+'\"\n'
# # ' This command requires either 1 or 3 numerical arguments.')
# #
# # AffineCompose(Mtmp, M, Mdest)
# # CopyMat(Mdest, Mtmp)
# # moveCmBack = [[1.0, 0.0, 0.0, xcm[0]],
# # [0.0, 1.0, 0.0, xcm[1]],
# # [0.0, 0.0, 1.0, xcm[2]]]
# # AffineCompose(Mtmp, moveCmBack, Mdest)
# # CopyMat(Mdest, Mtmp)
else:
raise InputError('Error near '+ErrorLeader(src_loc.infile, src_loc.lineno)+':\n'
' Unknown transformation command: \"'+transform_str+'\"\n')
return Mdest
class MultiAffineStack(object):
def __init__(self, which_stack=None):
self.tot_stack = None
self.stack_lookup = None
self.stack_keys = None
self.stacks = None
self.M = None
self.error_if_substack_empty = False
self.Clear()
def Clear(self):
self.tot_stack = AffineStack()
self.stack_lookup = {}
self.stack_keys = deque([])
self.stacks = deque([])
self.M = self.tot_stack.M
self.error_if_substack_empty = False
def _Update(self):
self.tot_stack.Clear()
self.M = self.tot_stack.M
assert(len(self.stacks) > 0)
for stack in self.stacks:
self.tot_stack.PushRight(stack.M)
def PushStack(self, which_stack):
stack = AffineStack()
self.stack_keys.append(which_stack)
self.stack_lookup[which_stack] = stack
self.stacks.append(stack)
self.tot_stack.PushRight(stack.M)
def PopStack(self):
assert(len(self.stacks) > 0)
self.tot_stack.PopRight()
which_stack = self.stack_keys.pop()
del self.stack_lookup[which_stack]
self.stacks.pop()
def Push(self, M, which_stack=None, right_not_left = True):
if len(self.stacks) == 0:
self.PushStack(which_stack)
if which_stack == None:
stack = self.stacks[-1]
if right_not_left:
stack.PushRight(M) # This should copy the matrix M into stack.M
else:
stack.PushLeft(M)
else:
stack = self.stack_lookup[which_stack]
if right_not_left:
stack.PushRight(M)
else:
stack.PushLeft(M)
if stack == self.stacks[-1]:
self.tot_stack.PopRight() #Replace the last matrix on self.tot_stack
# Note: Always use tot_stack.PopRight (even if right_not_left=False)
self.tot_stack.PushRight(stack.M) # with the the updated version.
# Note: We could call self._Update(M) here, but that is slower.
else:
self._Update()
def PushRight(self, M, which_stack=None):
self.Push(M, which_stack, right_not_left=True)
def PushLeft(self, M, which_stack=None):
self.Push(M, which_stack, right_not_left=False)
def PushCommandsRight(self,
text, # text containing affine transformation commands
# The next two arguments are optional:
src_loc = OSrcLoc(), # for debugging
xcm = None,
which_stack=None): # position of center of object
"""Generate affine transformation matrices from simple text commands
(such as \"rotcm(90,0,0,1)\" and \"move(0,5.0,0)".
Chains of "rotcm", "movecm", "rot", and "move" commands
can also be strung together:
\"rotcm(90,0,0,1).move(0,5.0,0)\"
Commands ending in \"cm\" are carried out relative to center-of-mass
(average position) of the object, and consequently require
an additional argument (\"xcm\").
"""
self.PushRight(AffineStack.CommandsToMatrix(text, src_loc, xcm),
which_stack)
def PushCommandsLeft(self,
text, # text containing affine transformation commands
# The next two arguments are optional:
src_loc = OSrcLoc(), # for debugging
xcm = None, # position of center of object
which_stack = None):
self.PushLeft(AffineStack.CommandsToMatrix(text, src_loc, xcm),
which_stack)
def Pop(self, which_stack=None, right_not_left=True):
#empty_stack_error = False
if which_stack == None:
stack = self.stacks[-1]
if len(stack) >= 1:
if right_not_left:
stack.PopRight()
else:
stack.PopLeft()
# Note: We could call self._Update(M) here, but that is slower
self.tot_stack.PopRight() #Replace the last matrix on self.tot_stack
# Note: Always use tot_stack.PopRight (even if right_not_left=False)
self.tot_stack.PushRight(stack.M) # with the the updated version.
else:
assert(False)
# OPTIONAL CODE BELOW AUTOMATICALLY INVOKES self.PopStack() WHEN
# THE stacks[-1].stack IS EMPTY. PROBABLY DOES NOT WORK. IGNORE
# if (not self.error_if_substack_empty):
# if right_not_left:
# assert(len(self.stacks) > 0)
# self.PopStack()
# else:
# assert(False)
# else:
# empty_stack_error = True
else:
stack = self.stack_lookup[which_stack]
if len(stack) > 1:
if right_not_left:
stack.PopRight()
else:
stack.PopLeft()
self._Update()
else:
assert(False)
#empty_stack_error = True
def PopRight(self, which_stack=None):
self.Pop(which_stack, right_not_left=True)
def PopLeft(self, which_stack=None):
self.Pop(which_stack, right_not_left=True)
import math
def ScaleMat(dest, scale):
for i in range(0, len(dest)):
for j in range(0, len(dest[i])):
dest[i][j] = 0.0
if ((type(scale) is float) or (type(scale) is int)):
for i in range(0, len(dest)):
dest[i][i] = scale
else:
for i in range(0, len(dest)):
dest[i][i] = scale[i]
def RotMatAXYZ(dest, angle, axis_x, axis_y, axis_z):
r = math.sqrt(axis_x*axis_x + axis_y*axis_y + axis_z*axis_z)
X = 1.0
Y = 0.0
Z = 0.0
if r > 0.0: # check for non-sensical input
X = axis_x / r
Y = axis_y / r
Z = axis_z / r
else:
angle = 0.0
#angle *= math.pi/180.0 # "angle" is assumed to be in degrees
# on second thought, let the caller worry about angle units.
c = math.cos(angle)
s = math.sin(angle)
dest[0][0] = X*X*(1-c) + c
dest[1][1] = Y*Y*(1-c) + c
dest[2][2] = Z*Z*(1-c) + c
dest[0][1] = X*Y*(1-c) - Z*s
dest[0][2] = X*Z*(1-c) + Y*s
dest[1][0] = Y*X*(1-c) + Z*s
dest[2][0] = Z*X*(1-c) - Y*s
dest[1][2] = Y*Z*(1-c) - X*s
dest[2][1] = Z*Y*(1-c) + X*s
# formula from these sources:
# http://inside.mines.edu/~gmurray/ArbitraryAxisRotation/
# also check
# http://www.manpagez.com/man/3/glRotate/
# some pdb test commands:
# from lttree_matrixstack import *
# r = [[1.0,0.0,0.0], [0.0,1.0,0.0], [0.0,0.0,1.0]]
# RotMatAXYZ(r, 90.0, 0.0, 0.0, 1.0)
def CrossProd(dest, A, B):
dest[0] = (A[1]*B[2] - B[1]*A[2])
dest[1] = (A[2]*B[0] - B[2]*A[0])
dest[2] = (A[0]*B[1] - B[0]*A[1])
def DotProd(A, B):
c = 0.0
for d in range(0, len(A)):
c += A[d]*B[d]
return c
def Length(A):
L = 0.0
for x in A:
L += x*x
return math.sqrt(L)
def Normalize(dest, source):
assert(len(dest) == len(source))
L = Length(source)
for d in range(0, len(source)):
dest[d] = source[d] / L
def RotMatXYZXYZ(dest,
xold, yold, zold,
xnew, ynew, znew):
A = [xold, yold, zold]
B = [xnew, ynew, znew]
axis = [0.0, 0.0, 0.0]
CrossProd(axis, A, B)
La = Length(A)
Lb = Length(B)
Lc = Length(axis)
sinAng = Lc / (La*Lb)
cosAng = DotProd(A,B) / (La*Lb)
if Lc > 0.0:
Normalize(axis, axis)
angle = math.atan2(sinAng, cosAng)
else:
axis = [1.0, 0.0, 0.0]
angle = 0.0
RotMatAXYZ(dest, angle, axis[0], axis[1], axis[2])
| gpl-2.0 |
cchristelis/inasafe | safe/impact_functions/volcanic/volcano_point_building/impact_function.py | 2 | 9034 | # coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Volcano Point on Building
Impact Function.
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from collections import OrderedDict
from safe.impact_functions.bases.classified_vh_classified_ve import \
ClassifiedVHClassifiedVE
from safe.impact_functions.volcanic.volcano_point_building\
.metadata_definitions import VolcanoPointBuildingFunctionMetadata
from safe.storage.vector import Vector
from safe.utilities.i18n import tr
from safe.engine.core import buffer_points
from safe.common.utilities import (
get_thousand_separator,
get_non_conflicting_attribute_name,
get_osm_building_usage)
from safe.engine.interpolation import (
assign_hazard_values_to_exposure_data)
from safe.impact_reports.building_exposure_report_mixin import (
BuildingExposureReportMixin)
from safe.common.exceptions import KeywordNotFoundError
import safe.messaging as m
from safe.messaging import styles
class VolcanoPointBuildingFunction(
ClassifiedVHClassifiedVE,
BuildingExposureReportMixin):
"""Impact Function for Volcano Point on Building."""
_metadata = VolcanoPointBuildingFunctionMetadata()
def __init__(self):
super(VolcanoPointBuildingFunction, self).__init__()
self.volcano_names = tr('Not specified in data')
self._affected_categories_volcano = []
def notes(self):
"""Return the notes section of the report.
:return: The notes that should be attached to this impact report.
:rtype: safe.messaging.Message
"""
message = m.Message(style_class='container')
message.add(m.Heading(
tr('Notes and assumptions'), **styles.INFO_STYLE))
checklist = m.BulletedList()
checklist.add(tr(
'Map shows buildings affected in each of the volcano buffered '
'zones.'))
names = tr('Volcanoes considered: %s.') % self.volcano_names
checklist.add(names)
message.add(checklist)
return message
@property
def _affected_categories(self):
"""Overwriting the affected categories, since 'unaffected' are counted.
:returns: The categories that equal effected.
:rtype: list
"""
return self._affected_categories_volcano
def run(self):
"""Counts number of building exposed to each volcano hazard zones.
:returns: Map of building exposed to volcanic hazard zones.
Table with number of buildings affected
:rtype: dict
"""
self.validate()
self.prepare()
# Hazard Zone Attribute
hazard_zone_attribute = 'radius'
# Parameters
radii = self.parameters['distances'].value
# Get parameters from layer's keywords
volcano_name_attribute = self.hazard.keyword('volcano_name_field')
# Try to get the value from keyword, if not exist, it will not fail,
# but use the old get_osm_building_usage
try:
self.exposure_class_attribute = self.exposure.keyword(
'structure_class_field')
except KeywordNotFoundError:
self.exposure_class_attribute = None
# Input checks
if not self.hazard.layer.is_point_data:
message = (
'Input hazard must be a vector point layer. I got %s '
'with layer type %s' % (
self.hazard.name, self.hazard.layer.get_geometry_name()))
raise Exception(message)
# Make hazard layer by buffering the point
centers = self.hazard.layer.get_geometry()
features = self.hazard.layer.get_data()
radii_meter = [x * 1000 for x in radii] # Convert to meters
hazard_layer = buffer_points(
centers,
radii_meter,
hazard_zone_attribute,
data_table=features)
# Category names for the impact zone
category_names = radii_meter
self._affected_categories_volcano = radii_meter[:]
category_names.append(self._not_affected_value)
# Get names of volcanoes considered
if volcano_name_attribute in hazard_layer.get_attribute_names():
volcano_name_list = set()
for row in hazard_layer.get_data():
# Run through all polygons and get unique names
volcano_name_list.add(row[volcano_name_attribute])
self.volcano_names = ', '.join(volcano_name_list)
# Find the target field name that has no conflict with the attribute
# names in the hazard layer
hazard_attribute_names = hazard_layer.get_attribute_names()
target_field = get_non_conflicting_attribute_name(
self.target_field, hazard_attribute_names)
# Run interpolation function for polygon2polygon
interpolated_layer = assign_hazard_values_to_exposure_data(
hazard_layer, self.exposure.layer)
# Extract relevant interpolated layer data
attribute_names = interpolated_layer.get_attribute_names()
features = interpolated_layer.get_data()
self.buildings = {}
self.affected_buildings = OrderedDict()
for category in radii_meter:
self.affected_buildings[category] = {}
# Iterate the interpolated building layer
for i in range(len(features)):
hazard_value = features[i][hazard_zone_attribute]
if not hazard_value:
hazard_value = self._not_affected_value
features[i][target_field] = hazard_value
# Count affected buildings by usage type if available
if (self.exposure_class_attribute and
self.exposure_class_attribute in attribute_names):
usage = features[i][self.exposure_class_attribute]
else:
usage = get_osm_building_usage(attribute_names, features[i])
if usage is [None, 'NULL', 'null', 'Null', 0]:
usage = tr('Unknown')
if usage not in self.buildings:
self.buildings[usage] = 0
for category in self.affected_buildings.keys():
self.affected_buildings[category][
usage] = OrderedDict([
(tr('Buildings Affected'), 0)])
self.buildings[usage] += 1
if hazard_value in self.affected_buildings.keys():
self.affected_buildings[hazard_value][usage][
tr('Buildings Affected')] += 1
# Lump small entries and 'unknown' into 'other' category
self._consolidate_to_other()
# Generate simple impact report
impact_summary = impact_table = self.html_report()
# Create style
colours = ['#FFFFFF', '#38A800', '#79C900', '#CEED00',
'#FFCC00', '#FF6600', '#FF0000', '#7A0000']
colours = colours[::-1] # flip
colours = colours[:len(category_names)]
style_classes = []
i = 0
for category_name in category_names:
style_class = dict()
style_class['label'] = tr(category_name)
style_class['transparency'] = 0
style_class['value'] = category_name
style_class['size'] = 1
if i >= len(category_names):
i = len(category_names) - 1
style_class['colour'] = colours[i]
i += 1
style_classes.append(style_class)
# Override style info with new classes and name
style_info = dict(
target_field=target_field,
style_classes=style_classes,
style_type='categorizedSymbol')
# For printing map purpose
map_title = tr('Buildings affected by volcanic buffered point')
legend_title = tr('Building count')
legend_units = tr('(building)')
legend_notes = tr(
'Thousand separator is represented by %s' %
get_thousand_separator())
# Create vector layer and return
impact_layer = Vector(
data=features,
projection=interpolated_layer.get_projection(),
geometry=interpolated_layer.get_geometry(),
name=tr('Buildings affected by volcanic buffered point'),
keywords={
'impact_summary': impact_summary,
'impact_table': impact_table,
'target_field': target_field,
'map_title': map_title,
'legend_notes': legend_notes,
'legend_units': legend_units,
'legend_title': legend_title},
style_info=style_info)
self._impact = impact_layer
return impact_layer
| gpl-3.0 |
ovnicraft/django-storages | storages/backends/couchdb.py | 11 | 3979 | """
This is a Custom Storage System for Django with CouchDB backend.
Created by Christian Klein.
(c) Copyright 2009 HUDORA GmbH. All Rights Reserved.
"""
import os
from cStringIO import StringIO
from urlparse import urljoin
from urllib import quote_plus
from django.conf import settings
from django.core.files import File
from django.core.files.storage import Storage
from django.core.exceptions import ImproperlyConfigured
try:
import couchdb
except ImportError:
raise ImproperlyConfigured("Could not load couchdb dependency.\
\nSee http://code.google.com/p/couchdb-python/")
DEFAULT_SERVER= getattr(settings, 'COUCHDB_DEFAULT_SERVER', 'http://couchdb.local:5984')
STORAGE_OPTIONS= getattr(settings, 'COUCHDB_STORAGE_OPTIONS', {})
class CouchDBStorage(Storage):
"""
CouchDBStorage - a Django Storage class for CouchDB.
The CouchDBStorage can be configured in settings.py, e.g.::
COUCHDB_STORAGE_OPTIONS = {
'server': "http://example.org",
'database': 'database_name'
}
Alternatively, the configuration can be passed as a dictionary.
"""
def __init__(self, **kwargs):
kwargs.update(STORAGE_OPTIONS)
self.base_url = kwargs.get('server', DEFAULT_SERVER)
server = couchdb.client.Server(self.base_url)
self.db = server[kwargs.get('database')]
def _put_file(self, name, content):
self.db[name] = {'size': len(content)}
self.db.put_attachment(self.db[name], content, filename='content')
return name
def get_document(self, name):
return self.db.get(name)
def _open(self, name, mode='rb'):
couchdb_file = CouchDBFile(name, self, mode=mode)
return couchdb_file
def _save(self, name, content):
content.open()
if hasattr(content, 'chunks'):
content_str = ''.join(chunk for chunk in content.chunks())
else:
content_str = content.read()
name = name.replace('/', '-')
return self._put_file(name, content_str)
def exists(self, name):
return name in self.db
def size(self, name):
doc = self.get_document(name)
if doc:
return doc['size']
return 0
def url(self, name):
return urljoin(self.base_url,
os.path.join(quote_plus(self.db.name),
quote_plus(name),
'content'))
def delete(self, name):
try:
del self.db[name]
except couchdb.client.ResourceNotFound:
raise IOError("File not found: %s" % name)
#def listdir(self, name):
# _all_docs?
# pass
class CouchDBFile(File):
"""
CouchDBFile - a Django File-like class for CouchDB documents.
"""
def __init__(self, name, storage, mode):
self._name = name
self._storage = storage
self._mode = mode
self._is_dirty = False
try:
self._doc = self._storage.get_document(name)
tmp, ext = os.path.split(name)
if ext:
filename = "content." + ext
else:
filename = "content"
attachment = self._storage.db.get_attachment(self._doc, filename=filename)
self.file = StringIO(attachment)
except couchdb.client.ResourceNotFound:
if 'r' in self._mode:
raise ValueError("The file cannot be reopened.")
else:
self.file = StringIO()
self._is_dirty = True
@property
def size(self):
return self._doc['size']
def write(self, content):
if 'w' not in self._mode:
raise AttributeError("File was opened for read-only access.")
self.file = StringIO(content)
self._is_dirty = True
def close(self):
if self._is_dirty:
self._storage._put_file(self._name, self.file.getvalue())
self.file.close()
| bsd-3-clause |
mach0/QGIS | python/plugins/MetaSearch/dialogs/maindialog.py | 9 | 40895 | # -*- coding: utf-8 -*-
###############################################################################
#
# CSW Client
# ---------------------------------------------------------
# QGIS Catalog Service client.
#
# Copyright (C) 2010 NextGIS (http://nextgis.org),
# Alexander Bruy (alexander.bruy@gmail.com),
# Maxim Dubinin (sim@gis-lab.info)
#
# Copyright (C) 2017 Tom Kralidis (tomkralidis@gmail.com)
#
# This source is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This code is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
###############################################################################
import json
import os.path
import warnings
from urllib.request import build_opener, install_opener, ProxyHandler
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtWidgets import (QApplication, QDialog, QComboBox,
QDialogButtonBox, QMessageBox,
QTreeWidgetItem, QWidget)
from qgis.PyQt.QtGui import QColor, QCursor
from qgis.core import (QgsApplication, QgsCoordinateReferenceSystem,
QgsCoordinateTransform, QgsGeometry, QgsPointXY,
QgsProviderRegistry, QgsSettings, QgsProject)
from qgis.gui import QgsRubberBand, QgsGui
from qgis.utils import OverrideCursor
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=ResourceWarning)
warnings.filterwarnings("ignore", category=ImportWarning)
from owslib.csw import CatalogueServiceWeb # spellok
from owslib.fes import BBox, PropertyIsLike
from owslib.ows import ExceptionReport
try:
from owslib.util import Authentication
except ImportError:
pass
from MetaSearch import link_types
from MetaSearch.dialogs.manageconnectionsdialog import ManageConnectionsDialog
from MetaSearch.dialogs.newconnectiondialog import NewConnectionDialog
from MetaSearch.dialogs.recorddialog import RecordDialog
from MetaSearch.dialogs.xmldialog import XMLDialog
from MetaSearch.util import (clean_ows_url, get_connections_from_file,
get_ui_class, get_help_url, highlight_xml,
normalize_text, open_url, render_template,
serialize_string, StaticContext)
BASE_CLASS = get_ui_class('maindialog.ui')
class MetaSearchDialog(QDialog, BASE_CLASS):
"""main dialogue"""
def __init__(self, iface):
"""init window"""
QDialog.__init__(self)
self.setupUi(self)
self.iface = iface
self.map = iface.mapCanvas()
self.settings = QgsSettings()
self.catalog = None
self.catalog_url = None
self.catalog_username = None
self.catalog_password = None
self.context = StaticContext()
self.leKeywords.setShowSearchIcon(True)
self.leKeywords.setPlaceholderText(self.tr('Search keywords'))
self.setWindowTitle(self.tr('MetaSearch'))
self.rubber_band = QgsRubberBand(self.map, True) # True = a polygon
self.rubber_band.setColor(QColor(255, 0, 0, 75))
self.rubber_band.setWidth(5)
# form inputs
self.startfrom = 1
self.maxrecords = 10
self.timeout = 10
self.disable_ssl_verification = False
self.constraints = []
# Servers tab
self.cmbConnectionsServices.activated.connect(self.save_connection)
self.cmbConnectionsSearch.activated.connect(self.save_connection)
self.btnServerInfo.clicked.connect(self.connection_info)
self.btnAddDefault.clicked.connect(self.add_default_connections)
self.btnCapabilities.clicked.connect(self.show_xml)
self.tabWidget.currentChanged.connect(self.populate_connection_list)
# server management buttons
self.btnNew.clicked.connect(self.add_connection)
self.btnEdit.clicked.connect(self.edit_connection)
self.btnDelete.clicked.connect(self.delete_connection)
self.btnLoad.clicked.connect(self.load_connections)
self.btnSave.clicked.connect(save_connections)
# Search tab
self.treeRecords.itemSelectionChanged.connect(self.record_clicked)
self.treeRecords.itemDoubleClicked.connect(self.show_metadata)
self.btnSearch.clicked.connect(self.search)
self.leKeywords.returnPressed.connect(self.search)
# prevent dialog from closing upon pressing enter
self.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
# launch help from button
self.buttonBox.helpRequested.connect(self.help)
self.btnCanvasBbox.setAutoDefault(False)
self.btnCanvasBbox.clicked.connect(self.set_bbox_from_map)
self.btnGlobalBbox.clicked.connect(self.set_bbox_global)
# navigation buttons
self.btnFirst.clicked.connect(self.navigate)
self.btnPrev.clicked.connect(self.navigate)
self.btnNext.clicked.connect(self.navigate)
self.btnLast.clicked.connect(self.navigate)
self.mActionAddWms.triggered.connect(self.add_to_ows)
self.mActionAddWfs.triggered.connect(self.add_to_ows)
self.mActionAddWcs.triggered.connect(self.add_to_ows)
self.mActionAddAms.triggered.connect(self.add_to_ows)
self.mActionAddAfs.triggered.connect(self.add_to_ows)
self.mActionAddGisFile.triggered.connect(self.add_gis_file)
self.btnShowXml.clicked.connect(self.show_xml)
self.manageGui()
def manageGui(self):
"""open window"""
self.tabWidget.setCurrentIndex(0)
self.populate_connection_list()
self.btnCapabilities.setEnabled(False)
self.spnRecords.setValue(
int(self.settings.value('/MetaSearch/returnRecords', 10)))
key = '/MetaSearch/%s' % self.cmbConnectionsSearch.currentText()
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
self.set_bbox_global()
self.reset_buttons()
# install proxy handler if specified in QGIS settings
self.install_proxy()
# Servers tab
def populate_connection_list(self):
"""populate select box with connections"""
self.settings.beginGroup('/MetaSearch/')
self.cmbConnectionsServices.clear()
self.cmbConnectionsServices.addItems(self.settings.childGroups())
self.cmbConnectionsSearch.clear()
self.cmbConnectionsSearch.addItems(self.settings.childGroups())
self.settings.endGroup()
self.set_connection_list_position()
if self.cmbConnectionsServices.count() == 0:
# no connections - disable various buttons
state_disabled = False
self.btnSave.setEnabled(state_disabled)
# and start with connection tab open
self.tabWidget.setCurrentIndex(1)
# tell the user to add services
msg = self.tr('No services/connections defined. To get '
'started with MetaSearch, create a new '
'connection by clicking \'New\' or click '
'\'Add default services\'.')
self.textMetadata.setHtml('<p><h3>%s</h3></p>' % msg)
else:
# connections - enable various buttons
state_disabled = True
self.btnServerInfo.setEnabled(state_disabled)
self.btnEdit.setEnabled(state_disabled)
self.btnDelete.setEnabled(state_disabled)
def set_connection_list_position(self):
"""set the current index to the selected connection"""
to_select = self.settings.value('/MetaSearch/selected')
conn_count = self.cmbConnectionsServices.count()
if conn_count == 0:
self.btnDelete.setEnabled(False)
self.btnServerInfo.setEnabled(False)
self.btnEdit.setEnabled(False)
# does to_select exist in cmbConnectionsServices?
exists = False
for i in range(conn_count):
if self.cmbConnectionsServices.itemText(i) == to_select:
self.cmbConnectionsServices.setCurrentIndex(i)
self.cmbConnectionsSearch.setCurrentIndex(i)
exists = True
break
# If we couldn't find the stored item, but there are some, default
# to the last item (this makes some sense when deleting items as it
# allows the user to repeatidly click on delete to remove a whole
# lot of items)
if not exists and conn_count > 0:
# If to_select is null, then the selected connection wasn't found
# by QgsSettings, which probably means that this is the first time
# the user has used CSWClient, so default to the first in the list
# of connetions. Otherwise default to the last.
if not to_select:
current_index = 0
else:
current_index = conn_count - 1
self.cmbConnectionsServices.setCurrentIndex(current_index)
self.cmbConnectionsSearch.setCurrentIndex(current_index)
def save_connection(self):
"""save connection"""
caller = self.sender().objectName()
if caller == 'cmbConnectionsServices': # servers tab
current_text = self.cmbConnectionsServices.currentText()
elif caller == 'cmbConnectionsSearch': # search tab
current_text = self.cmbConnectionsSearch.currentText()
self.settings.setValue('/MetaSearch/selected', current_text)
key = '/MetaSearch/%s' % current_text
if caller == 'cmbConnectionsSearch': # bind to service in search tab
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
if caller == 'cmbConnectionsServices': # clear server metadata
self.textMetadata.clear()
self.btnCapabilities.setEnabled(False)
def connection_info(self):
"""show connection info"""
current_text = self.cmbConnectionsServices.currentText()
key = '/MetaSearch/%s' % current_text
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
# connect to the server
if not self._get_csw():
return
if self.catalog: # display service metadata
self.btnCapabilities.setEnabled(True)
metadata = render_template('en', self.context,
self.catalog,
'service_metadata.html')
style = QgsApplication.reportStyleSheet()
self.textMetadata.clear()
self.textMetadata.document().setDefaultStyleSheet(style)
self.textMetadata.setHtml(metadata)
# clear results and disable buttons in Search tab
self.clear_results()
def add_connection(self):
"""add new service"""
conn_new = NewConnectionDialog()
conn_new.setWindowTitle(self.tr('New Catalog Service'))
if conn_new.exec_() == QDialog.Accepted: # add to service list
self.populate_connection_list()
self.textMetadata.clear()
def edit_connection(self):
"""modify existing connection"""
current_text = self.cmbConnectionsServices.currentText()
url = self.settings.value('/MetaSearch/%s/url' % current_text)
conn_edit = NewConnectionDialog(current_text)
conn_edit.setWindowTitle(self.tr('Edit Catalog Service'))
conn_edit.leName.setText(current_text)
conn_edit.leURL.setText(url)
conn_edit.leUsername.setText(self.settings.value('/MetaSearch/%s/username' % current_text))
conn_edit.lePassword.setText(self.settings.value('/MetaSearch/%s/password' % current_text))
if conn_edit.exec_() == QDialog.Accepted: # update service list
self.populate_connection_list()
def delete_connection(self):
"""delete connection"""
current_text = self.cmbConnectionsServices.currentText()
key = '/MetaSearch/%s' % current_text
msg = self.tr('Remove service {0}?').format(current_text)
result = QMessageBox.question(self, self.tr('Delete Service'), msg,
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if result == QMessageBox.Yes: # remove service from list
self.settings.remove(key)
index_to_delete = self.cmbConnectionsServices.currentIndex()
self.cmbConnectionsServices.removeItem(index_to_delete)
self.cmbConnectionsSearch.removeItem(index_to_delete)
self.set_connection_list_position()
def load_connections(self):
"""load services from list"""
ManageConnectionsDialog(1).exec_()
self.populate_connection_list()
def add_default_connections(self):
"""add default connections"""
filename = os.path.join(self.context.ppath,
'resources', 'connections-default.xml')
doc = get_connections_from_file(self, filename)
if doc is None:
return
self.settings.beginGroup('/MetaSearch/')
keys = self.settings.childGroups()
self.settings.endGroup()
for server in doc.findall('csw'):
name = server.attrib.get('name')
# check for duplicates
if name in keys:
msg = self.tr('{0} exists. Overwrite?').format(name)
res = QMessageBox.warning(self,
self.tr('Loading connections'), msg,
QMessageBox.Yes | QMessageBox.No)
if res != QMessageBox.Yes:
continue
# no dups detected or overwrite is allowed
key = '/MetaSearch/%s' % name
self.settings.setValue('%s/url' % key, server.attrib.get('url'))
self.populate_connection_list()
# Settings tab
def set_ows_save_title_ask(self):
"""save ows save strategy as save ows title, ask if duplicate"""
self.settings.setValue('/MetaSearch/ows_save_strategy', 'title_ask')
def set_ows_save_title_no_ask(self):
"""save ows save strategy as save ows title, do NOT ask if duplicate"""
self.settings.setValue('/MetaSearch/ows_save_strategy', 'title_no_ask')
def set_ows_save_temp_name(self):
"""save ows save strategy as save with a temporary name"""
self.settings.setValue('/MetaSearch/ows_save_strategy', 'temp_name')
# Search tab
def set_bbox_from_map(self):
"""set bounding box from map extent"""
crs = self.map.mapSettings().destinationCrs()
try:
crsid = int(crs.authid().split(':')[1])
except IndexError: # no projection
crsid = 4326
extent = self.map.extent()
if crsid != 4326: # reproject to EPSG:4326
src = QgsCoordinateReferenceSystem(crsid)
dest = QgsCoordinateReferenceSystem("EPSG:4326")
xform = QgsCoordinateTransform(src, dest, QgsProject.instance())
minxy = xform.transform(QgsPointXY(extent.xMinimum(),
extent.yMinimum()))
maxxy = xform.transform(QgsPointXY(extent.xMaximum(),
extent.yMaximum()))
minx, miny = minxy
maxx, maxy = maxxy
else: # 4326
minx = extent.xMinimum()
miny = extent.yMinimum()
maxx = extent.xMaximum()
maxy = extent.yMaximum()
self.leNorth.setText(str(maxy)[0:9])
self.leSouth.setText(str(miny)[0:9])
self.leWest.setText(str(minx)[0:9])
self.leEast.setText(str(maxx)[0:9])
def set_bbox_global(self):
"""set global bounding box"""
self.leNorth.setText('90')
self.leSouth.setText('-90')
self.leWest.setText('-180')
self.leEast.setText('180')
def search(self):
"""execute search"""
self.catalog = None
self.constraints = []
# clear all fields and disable buttons
self.clear_results()
# save some settings
self.settings.setValue('/MetaSearch/returnRecords',
self.spnRecords.cleanText())
# set current catalog
current_text = self.cmbConnectionsSearch.currentText()
key = '/MetaSearch/%s' % current_text
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
# start position and number of records to return
self.startfrom = 1
self.maxrecords = self.spnRecords.value()
# set timeout
self.timeout = self.spnTimeout.value()
# bbox
# CRS is WGS84 with axis order longitude, latitude
# defined by 'urn:ogc:def:crs:OGC:1.3:CRS84'
minx = self.leWest.text()
miny = self.leSouth.text()
maxx = self.leEast.text()
maxy = self.leNorth.text()
bbox = [minx, miny, maxx, maxy]
# only apply spatial filter if bbox is not global
# even for a global bbox, if a spatial filter is applied, then
# the CSW server will skip records without a bbox
if bbox != ['-180', '-90', '180', '90']:
self.constraints.append(BBox([miny, minx, maxy, maxx],
crs='urn:ogc:def:crs:EPSG::4326'))
# keywords
if self.leKeywords.text():
# TODO: handle multiple word searches
keywords = self.leKeywords.text()
self.constraints.append(PropertyIsLike('csw:AnyText', keywords))
if len(self.constraints) > 1: # exclusive search (a && b)
self.constraints = [self.constraints]
# build request
if not self._get_csw():
return
# TODO: allow users to select resources types
# to find ('service', 'dataset', etc.)
try:
with OverrideCursor(Qt.WaitCursor):
self.catalog.getrecords2(constraints=self.constraints,
maxrecords=self.maxrecords, esn='full')
except ExceptionReport as err:
QMessageBox.warning(self, self.tr('Search error'),
self.tr('Search error: {0}').format(err))
return
except Exception as err:
QMessageBox.warning(self, self.tr('Connection error'),
self.tr('Connection error: {0}').format(err))
return
if self.catalog.results['matches'] == 0:
self.lblResults.setText(self.tr('0 results'))
return
self.display_results()
def display_results(self):
"""display search results"""
self.treeRecords.clear()
position = self.catalog.results['returned'] + self.startfrom - 1
msg = self.tr('Showing {0} - {1} of %n result(s)', 'number of results',
self.catalog.results['matches']).format(self.startfrom,
position)
self.lblResults.setText(msg)
for rec in self.catalog.records:
item = QTreeWidgetItem(self.treeRecords)
if self.catalog.records[rec].type:
item.setText(0, normalize_text(self.catalog.records[rec].type))
else:
item.setText(0, 'unknown')
if self.catalog.records[rec].title:
item.setText(1,
normalize_text(self.catalog.records[rec].title))
if self.catalog.records[rec].identifier:
set_item_data(item, 'identifier',
self.catalog.records[rec].identifier)
self.btnShowXml.setEnabled(True)
if self.catalog.results["matches"] < self.maxrecords:
disabled = False
else:
disabled = True
self.btnFirst.setEnabled(disabled)
self.btnPrev.setEnabled(disabled)
self.btnNext.setEnabled(disabled)
self.btnLast.setEnabled(disabled)
def clear_results(self):
"""clear search results"""
self.lblResults.clear()
self.treeRecords.clear()
self.reset_buttons()
def record_clicked(self):
"""record clicked signal"""
# disable only service buttons
self.reset_buttons(True, False, False)
self.rubber_band.reset()
if not self.treeRecords.selectedItems():
return
item = self.treeRecords.currentItem()
if not item:
return
identifier = get_item_data(item, 'identifier')
try:
record = self.catalog.records[identifier]
except KeyError as err:
QMessageBox.warning(self,
self.tr('Record parsing error'),
'Unable to locate record identifier')
return
# if the record has a bbox, show a footprint on the map
if record.bbox is not None:
points = bbox_to_polygon(record.bbox)
if points is not None:
src = QgsCoordinateReferenceSystem("EPSG:4326")
dst = self.map.mapSettings().destinationCrs()
geom = QgsGeometry.fromWkt(points)
if src.postgisSrid() != dst.postgisSrid():
ctr = QgsCoordinateTransform(src, dst, QgsProject.instance())
try:
geom.transform(ctr)
except Exception as err:
QMessageBox.warning(
self,
self.tr('Coordinate Transformation Error'),
str(err))
self.rubber_band.setToGeometry(geom, None)
# figure out if the data is interactive and can be operated on
self.find_services(record, item)
def find_services(self, record, item):
"""scan record for WMS/WMTS|WFS|WCS endpoints"""
links = record.uris + record.references
services = {}
for link in links:
if 'scheme' in link:
link_type = link['scheme']
elif 'protocol' in link:
link_type = link['protocol']
else:
link_type = None
if link_type is not None:
link_type = link_type.upper()
wmswmst_link_types = list(map(str.upper, link_types.WMSWMST_LINK_TYPES))
wfs_link_types = list(map(str.upper, link_types.WFS_LINK_TYPES))
wcs_link_types = list(map(str.upper, link_types.WCS_LINK_TYPES))
ams_link_types = list(map(str.upper, link_types.AMS_LINK_TYPES))
afs_link_types = list(map(str.upper, link_types.AFS_LINK_TYPES))
gis_file_link_types = list(map(str.upper, link_types.GIS_FILE_LINK_TYPES))
# if the link type exists, and it is one of the acceptable
# interactive link types, then set
if all([link_type is not None,
link_type in wmswmst_link_types + wfs_link_types +
wcs_link_types + ams_link_types + afs_link_types + gis_file_link_types]):
if link_type in wmswmst_link_types:
services['wms'] = link['url']
self.mActionAddWms.setEnabled(True)
if link_type in wfs_link_types:
services['wfs'] = link['url']
self.mActionAddWfs.setEnabled(True)
if link_type in wcs_link_types:
services['wcs'] = link['url']
self.mActionAddWcs.setEnabled(True)
if link_type in ams_link_types:
services['ams'] = link['url']
self.mActionAddAms.setEnabled(True)
if link_type in afs_link_types:
services['afs'] = link['url']
self.mActionAddAfs.setEnabled(True)
if link_type in gis_file_link_types:
services['gis_file'] = link['url']
services['title'] = record.title
self.mActionAddGisFile.setEnabled(True)
self.tbAddData.setEnabled(True)
set_item_data(item, 'link', json.dumps(services))
def navigate(self):
"""manage navigation / paging"""
caller = self.sender().objectName()
if caller == 'btnFirst':
self.startfrom = 1
elif caller == 'btnLast':
self.startfrom = self.catalog.results['matches'] - self.maxrecords + 1
elif caller == 'btnNext':
if self.startfrom > self.catalog.results["matches"] - self.maxrecords:
msg = self.tr('End of results. Go to start?')
res = QMessageBox.information(self, self.tr('Navigation'),
msg,
(QMessageBox.Ok |
QMessageBox.Cancel))
if res == QMessageBox.Ok:
self.startfrom = 1
else:
return
else:
self.startfrom += self.maxrecords
elif caller == "btnPrev":
if self.startfrom == 1:
msg = self.tr('Start of results. Go to end?')
res = QMessageBox.information(self, self.tr('Navigation'),
msg,
(QMessageBox.Ok |
QMessageBox.Cancel))
if res == QMessageBox.Ok:
self.startfrom = (self.catalog.results['matches'] -
self.maxrecords + 1)
else:
return
elif self.startfrom <= self.maxrecords:
self.startfrom = 1
else:
self.startfrom -= self.maxrecords
try:
with OverrideCursor(Qt.WaitCursor):
self.catalog.getrecords2(constraints=self.constraints,
maxrecords=self.maxrecords,
startposition=self.startfrom, esn='full')
except ExceptionReport as err:
QMessageBox.warning(self, self.tr('Search error'),
self.tr('Search error: {0}').format(err))
return
except Exception as err:
QMessageBox.warning(self, self.tr('Connection error'),
self.tr('Connection error: {0}').format(err))
return
self.display_results()
def add_to_ows(self):
"""add to OWS provider connection list"""
conn_name_matches = []
item = self.treeRecords.currentItem()
if not item:
return
item_data = json.loads(get_item_data(item, 'link'))
caller = self.sender().objectName()
# stype = human name,/qgis/connections-%s,providername
if caller == 'mActionAddWms':
stype = ['OGC:WMS/OGC:WMTS', 'wms', 'wms']
data_url = item_data['wms']
elif caller == 'mActionAddWfs':
stype = ['OGC:WFS', 'wfs', 'WFS']
data_url = item_data['wfs']
elif caller == 'mActionAddWcs':
stype = ['OGC:WCS', 'wcs', 'wcs']
data_url = item_data['wcs']
elif caller == 'mActionAddAms':
stype = ['ESRI:ArcGIS:MapServer', 'ams', 'arcgismapserver']
data_url = item_data['ams'].split('MapServer')[0] + 'MapServer'
elif caller == 'mActionAddAfs':
stype = ['ESRI:ArcGIS:FeatureServer', 'afs', 'arcgisfeatureserver']
data_url = item_data['afs'].split('FeatureServer')[0] + 'FeatureServer'
sname = '%s from MetaSearch' % stype[1]
# store connection
# check if there is a connection with same name
if caller in ['mActionAddAms', 'mActionAddAfs']:
self.settings.beginGroup('/qgis/connections-%s' % stype[2])
else:
self.settings.beginGroup('/qgis/connections-%s' % stype[1])
keys = self.settings.childGroups()
self.settings.endGroup()
for key in keys:
if key.startswith(sname):
conn_name_matches.append(key)
if conn_name_matches:
sname = conn_name_matches[-1]
# check for duplicates
if sname in keys: # duplicate found
msg = self.tr('Connection {0} exists. Overwrite?').format(sname)
res = QMessageBox.warning(self, self.tr('Saving server'), msg,
QMessageBox.Yes | QMessageBox.No | QMessageBox.Cancel)
if res == QMessageBox.No: # assign new name with serial
sname = serialize_string(sname)
elif res == QMessageBox.Cancel:
return
# no dups detected or overwrite is allowed
if caller in ['mActionAddAms', 'mActionAddAfs']:
self.settings.beginGroup('/qgis/connections-%s' % stype[2])
else:
self.settings.beginGroup('/qgis/connections-%s' % stype[1])
self.settings.setValue('/%s/url' % sname, clean_ows_url(data_url))
self.settings.endGroup()
# open provider window
ows_provider = QgsGui.sourceSelectProviderRegistry().createSelectionWidget(
stype[2],
self,
Qt.Widget,
QgsProviderRegistry.WidgetMode.Embedded)
service_type = stype[0]
# connect dialog signals to iface slots
if service_type == 'OGC:WMS/OGC:WMTS':
ows_provider.addRasterLayer.connect(self.iface.addRasterLayer)
conn_cmb = ows_provider.findChild(QWidget, 'cmbConnections')
connect = 'btnConnect_clicked'
elif service_type == 'OGC:WFS':
def addVectorLayer(path, name):
self.iface.mainWindow().addVectorLayer(path, name, 'WFS')
ows_provider.addVectorLayer.connect(addVectorLayer)
conn_cmb = ows_provider.findChild(QWidget, 'cmbConnections')
connect = 'connectToServer'
elif service_type == 'OGC:WCS':
ows_provider.addRasterLayer.connect(self.iface.addRasterLayer)
conn_cmb = ows_provider.findChild(QWidget, 'mConnectionsComboBox')
connect = 'mConnectButton_clicked'
elif service_type == 'ESRI:ArcGIS:MapServer':
ows_provider.addRasterLayer.connect(self.iface.addRasterLayer)
conn_cmb = ows_provider.findChild(QComboBox)
connect = 'connectToServer'
elif service_type == 'ESRI:ArcGIS:FeatureServer':
def addAfsLayer(path, name):
self.iface.mainWindow().addVectorLayer(path, name, 'afs')
ows_provider.addVectorLayer.connect(addAfsLayer)
conn_cmb = ows_provider.findChild(QComboBox)
connect = 'connectToServer'
ows_provider.setModal(False)
ows_provider.show()
# open provider dialogue against added OWS
index = conn_cmb.findText(sname)
if index > -1:
conn_cmb.setCurrentIndex(index)
# only for wfs
if service_type == 'OGC:WFS':
ows_provider.cmbConnections_activated(index)
elif service_type in ['ESRI:ArcGIS:MapServer', 'ESRI:ArcGIS:FeatureServer']:
ows_provider.cmbConnections_activated(index)
getattr(ows_provider, connect)()
def add_gis_file(self):
"""add GIS file from result"""
item = self.treeRecords.currentItem()
if not item:
return
item_data = json.loads(get_item_data(item, 'link'))
gis_file = item_data['gis_file']
title = item_data['title']
layer = self.iface.addVectorLayer(gis_file, title, "ogr")
if not layer:
self.iface.messageBar().pushWarning(None, "Layer failed to load!")
def show_metadata(self):
"""show record metadata"""
if not self.treeRecords.selectedItems():
return
item = self.treeRecords.currentItem()
if not item:
return
identifier = get_item_data(item, 'identifier')
self.disable_ssl_verification = self.disableSSLVerification.isChecked()
auth = None
if self.disable_ssl_verification:
try:
auth = Authentication(verify=False)
except NameError:
pass
try:
with OverrideCursor(Qt.WaitCursor):
if auth is not None:
cat = CatalogueServiceWeb(self.catalog_url, timeout=self.timeout, # spellok
username=self.catalog_username,
password=self.catalog_password,
auth=auth)
else:
# older owslib version without the auth keyword
cat = CatalogueServiceWeb(self.catalog_url, timeout=self.timeout, # spellok
username=self.catalog_username,
password=self.catalog_password)
cat.getrecordbyid(
[self.catalog.records[identifier].identifier])
except ExceptionReport as err:
QMessageBox.warning(self, self.tr('GetRecords error'),
self.tr('Error getting response: {0}').format(err))
return
except KeyError as err:
QMessageBox.warning(self,
self.tr('Record parsing error'),
self.tr('Unable to locate record identifier'))
return
record = cat.records[identifier]
record.xml_url = cat.request
crd = RecordDialog()
metadata = render_template('en', self.context,
record, 'record_metadata_dc.html')
style = QgsApplication.reportStyleSheet()
crd.textMetadata.document().setDefaultStyleSheet(style)
crd.textMetadata.setHtml(metadata)
crd.exec_()
def show_xml(self):
"""show XML request / response"""
crd = XMLDialog()
request_html = highlight_xml(self.context, self.catalog.request)
response_html = highlight_xml(self.context, self.catalog.response)
style = QgsApplication.reportStyleSheet()
crd.txtbrXMLRequest.clear()
crd.txtbrXMLResponse.clear()
crd.txtbrXMLRequest.document().setDefaultStyleSheet(style)
crd.txtbrXMLResponse.document().setDefaultStyleSheet(style)
crd.txtbrXMLRequest.setHtml(request_html)
crd.txtbrXMLResponse.setHtml(response_html)
crd.exec_()
def reset_buttons(self, services=True, xml=True, navigation=True):
"""Convenience function to disable WMS/WMTS|WFS|WCS buttons"""
if services:
self.tbAddData.setEnabled(False)
self.mActionAddWms.setEnabled(False)
self.mActionAddWfs.setEnabled(False)
self.mActionAddWcs.setEnabled(False)
self.mActionAddAms.setEnabled(False)
self.mActionAddAfs.setEnabled(False)
self.mActionAddGisFile.setEnabled(False)
if xml:
self.btnShowXml.setEnabled(False)
if navigation:
self.btnFirst.setEnabled(False)
self.btnPrev.setEnabled(False)
self.btnNext.setEnabled(False)
self.btnLast.setEnabled(False)
def help(self):
"""launch help"""
open_url(get_help_url())
def reject(self):
"""back out of dialogue"""
QDialog.reject(self)
self.rubber_band.reset()
def _get_csw(self):
"""convenience function to init owslib.csw.CatalogueServiceWeb""" # spellok
self.disable_ssl_verification = self.disableSSLVerification.isChecked()
auth = None
if self.disable_ssl_verification:
try:
auth = Authentication(verify=False)
except NameError:
pass
# connect to the server
with OverrideCursor(Qt.WaitCursor):
try:
if auth is not None:
self.catalog = CatalogueServiceWeb(self.catalog_url, # spellok
timeout=self.timeout,
username=self.catalog_username,
password=self.catalog_password,
auth=auth)
else:
# older owslib version without the auth keyword
self.catalog = CatalogueServiceWeb(self.catalog_url, # spellok
timeout=self.timeout,
username=self.catalog_username,
password=self.catalog_password)
return True
except ExceptionReport as err:
msg = self.tr('Error connecting to service: {0}').format(err)
except ValueError as err:
msg = self.tr('Value Error: {0}').format(err)
except Exception as err:
msg = self.tr('Unknown Error: {0}').format(err)
QMessageBox.warning(self, self.tr('CSW Connection error'), msg)
return False
def install_proxy(self):
"""set proxy if one is set in QGIS network settings"""
# initially support HTTP for now
if self.settings.value('/proxy/proxyEnabled') == 'true':
if self.settings.value('/proxy/proxyType') == 'HttpProxy':
ptype = 'http'
else:
return
user = self.settings.value('/proxy/proxyUser')
password = self.settings.value('/proxy/proxyPassword')
host = self.settings.value('/proxy/proxyHost')
port = self.settings.value('/proxy/proxyPort')
proxy_up = ''
proxy_port = ''
if all([user != '', password != '']):
proxy_up = '%s:%s@' % (user, password)
if port != '':
proxy_port = ':%s' % port
conn = '%s://%s%s%s' % (ptype, proxy_up, host, proxy_port)
install_opener(build_opener(ProxyHandler({ptype: conn})))
def save_connections():
"""save servers to list"""
ManageConnectionsDialog(0).exec_()
def get_item_data(item, field):
"""return identifier for a QTreeWidgetItem"""
return item.data(_get_field_value(field), 32)
def set_item_data(item, field, value):
"""set identifier for a QTreeWidgetItem"""
item.setData(_get_field_value(field), 32, value)
def _get_field_value(field):
"""convenience function to return field value integer"""
value = 0
if field == 'identifier':
value = 0
if field == 'link':
value = 1
return value
def bbox_to_polygon(bbox):
"""converts OWSLib bbox object to list of QgsPointXY objects"""
if all([bbox.minx is not None,
bbox.maxx is not None,
bbox.miny is not None,
bbox.maxy is not None]):
minx = float(bbox.minx)
miny = float(bbox.miny)
maxx = float(bbox.maxx)
maxy = float(bbox.maxy)
return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' % (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny) # noqa
else:
return None
| gpl-2.0 |
cecep-edu/edx-platform | cms/djangoapps/contentstore/views/tests/test_tabs.py | 129 | 8489 | """ Tests for tab functions (just primitive). """
import json
from contentstore.views import tabs
from contentstore.tests.utils import CourseTestCase
from contentstore.utils import reverse_course_url
from xmodule.x_module import STUDENT_VIEW
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.tabs import CourseTabList
from xmodule.modulestore.django import modulestore
class TabsPageTests(CourseTestCase):
"""Test cases for Tabs (a.k.a Pages) page"""
def setUp(self):
"""Common setup for tests"""
# call super class to setup course, etc.
super(TabsPageTests, self).setUp()
# Set the URL for tests
self.url = reverse_course_url('tabs_handler', self.course.id)
# add a static tab to the course, for code coverage
self.test_tab = ItemFactory.create(
parent_location=self.course.location,
category="static_tab",
display_name="Static_1"
)
self.reload_course()
def check_invalid_tab_id_response(self, resp):
"""Verify response is an error listing the invalid_tab_id"""
self.assertEqual(resp.status_code, 400)
resp_content = json.loads(resp.content)
self.assertIn("error", resp_content)
self.assertIn("invalid_tab_id", resp_content['error'])
def test_not_implemented(self):
"""Verify not implemented errors"""
# JSON GET request not supported
with self.assertRaises(NotImplementedError):
self.client.get(self.url)
# JSON POST request not supported
with self.assertRaises(NotImplementedError):
self.client.ajax_post(
self.url,
data=json.dumps({
'tab_id_locator': {'tab_id': 'courseware'},
'unsupported_request': None,
}),
)
# invalid JSON POST request
with self.assertRaises(NotImplementedError):
self.client.ajax_post(
self.url,
data={'invalid_request': None},
)
def test_view_index(self):
"""Basic check that the Pages page responds correctly"""
resp = self.client.get_html(self.url)
self.assertEqual(resp.status_code, 200)
self.assertIn('course-nav-list', resp.content)
def test_reorder_tabs(self):
"""Test re-ordering of tabs"""
# get the original tab ids
orig_tab_ids = [tab.tab_id for tab in self.course.tabs]
tab_ids = list(orig_tab_ids)
num_orig_tabs = len(orig_tab_ids)
# make sure we have enough tabs to play around with
self.assertTrue(num_orig_tabs >= 5)
# reorder the last two tabs
tab_ids[num_orig_tabs - 1], tab_ids[num_orig_tabs - 2] = tab_ids[num_orig_tabs - 2], tab_ids[num_orig_tabs - 1]
# remove the middle tab
# (the code needs to handle the case where tabs requested for re-ordering is a subset of the tabs in the course)
removed_tab = tab_ids.pop(num_orig_tabs / 2)
self.assertTrue(len(tab_ids) == num_orig_tabs - 1)
# post the request
resp = self.client.ajax_post(
self.url,
data={'tabs': [{'tab_id': tab_id} for tab_id in tab_ids]},
)
self.assertEqual(resp.status_code, 204)
# reload the course and verify the new tab order
self.reload_course()
new_tab_ids = [tab.tab_id for tab in self.course.tabs]
self.assertEqual(new_tab_ids, tab_ids + [removed_tab])
self.assertNotEqual(new_tab_ids, orig_tab_ids)
def test_reorder_tabs_invalid_list(self):
"""Test re-ordering of tabs with invalid tab list"""
orig_tab_ids = [tab.tab_id for tab in self.course.tabs]
tab_ids = list(orig_tab_ids)
# reorder the first two tabs
tab_ids[0], tab_ids[1] = tab_ids[1], tab_ids[0]
# post the request
resp = self.client.ajax_post(
self.url,
data={'tabs': [{'tab_id': tab_id} for tab_id in tab_ids]},
)
self.assertEqual(resp.status_code, 400)
resp_content = json.loads(resp.content)
self.assertIn("error", resp_content)
def test_reorder_tabs_invalid_tab(self):
"""Test re-ordering of tabs with invalid tab"""
invalid_tab_ids = ['courseware', 'info', 'invalid_tab_id']
# post the request
resp = self.client.ajax_post(
self.url,
data={'tabs': [{'tab_id': tab_id} for tab_id in invalid_tab_ids]},
)
self.check_invalid_tab_id_response(resp)
def check_toggle_tab_visiblity(self, tab_type, new_is_hidden_setting):
"""Helper method to check changes in tab visibility"""
# find the tab
old_tab = CourseTabList.get_tab_by_type(self.course.tabs, tab_type)
# visibility should be different from new setting
self.assertNotEqual(old_tab.is_hidden, new_is_hidden_setting)
# post the request
resp = self.client.ajax_post(
self.url,
data=json.dumps({
'tab_id_locator': {'tab_id': old_tab.tab_id},
'is_hidden': new_is_hidden_setting,
}),
)
self.assertEqual(resp.status_code, 204)
# reload the course and verify the new visibility setting
self.reload_course()
new_tab = CourseTabList.get_tab_by_type(self.course.tabs, tab_type)
self.assertEqual(new_tab.is_hidden, new_is_hidden_setting)
def test_toggle_tab_visibility(self):
"""Test toggling of tab visibility"""
self.check_toggle_tab_visiblity('wiki', True)
self.check_toggle_tab_visiblity('wiki', False)
def test_toggle_invalid_tab_visibility(self):
"""Test toggling visibility of an invalid tab"""
# post the request
resp = self.client.ajax_post(
self.url,
data=json.dumps({
'tab_id_locator': {'tab_id': 'invalid_tab_id'}
}),
)
self.check_invalid_tab_id_response(resp)
def test_tab_preview_html(self):
"""
Verify that the static tab renders itself with the correct HTML
"""
preview_url = '/xblock/{}/{}'.format(self.test_tab.location, STUDENT_VIEW)
resp = self.client.get(preview_url, HTTP_ACCEPT='application/json')
self.assertEqual(resp.status_code, 200)
resp_content = json.loads(resp.content)
html = resp_content['html']
# Verify that the HTML contains the expected elements
self.assertIn('<span class="action-button-text">Edit</span>', html)
self.assertIn('<span class="sr">Duplicate this component</span>', html)
self.assertIn('<span class="sr">Delete this component</span>', html)
self.assertIn('<span data-tooltip="Drag to reorder" class="drag-handle action"></span>', html)
class PrimitiveTabEdit(ModuleStoreTestCase):
"""Tests for the primitive tab edit data manipulations"""
def test_delete(self):
"""Test primitive tab deletion."""
course = CourseFactory.create()
with self.assertRaises(ValueError):
tabs.primitive_delete(course, 0)
with self.assertRaises(ValueError):
tabs.primitive_delete(course, 1)
with self.assertRaises(IndexError):
tabs.primitive_delete(course, 6)
tabs.primitive_delete(course, 2)
self.assertFalse({u'type': u'textbooks'} in course.tabs)
# Check that discussion has shifted up
self.assertEquals(course.tabs[2], {'type': 'discussion', 'name': 'Discussion'})
def test_insert(self):
"""Test primitive tab insertion."""
course = CourseFactory.create()
tabs.primitive_insert(course, 2, 'notes', 'aname')
self.assertEquals(course.tabs[2], {'type': 'notes', 'name': 'aname'})
with self.assertRaises(ValueError):
tabs.primitive_insert(course, 0, 'notes', 'aname')
with self.assertRaises(ValueError):
tabs.primitive_insert(course, 3, 'static_tab', 'aname')
def test_save(self):
"""Test course saving."""
course = CourseFactory.create()
tabs.primitive_insert(course, 3, 'notes', 'aname')
course2 = modulestore().get_course(course.id)
self.assertEquals(course2.tabs[3], {'type': 'notes', 'name': 'aname'})
| agpl-3.0 |
autotest/virt-test | virttest/syslog_server.py | 30 | 4280 | import re
import logging
import SocketServer
SYSLOG_PORT = 514
DEFAULT_FORMAT = '[AutotestSyslog (%s.%s)] %s'
def set_default_format(message_format):
'''
Changes the default message format
:type message_format: string
:param message_format: a message format string with 3 placeholders:
facility, priority and message.
'''
global DEFAULT_FORMAT
DEFAULT_FORMAT = message_format
def get_default_format():
'''
Returns the current default message format
'''
return DEFAULT_FORMAT
class RequestHandler(SocketServer.BaseRequestHandler):
'''
A request handler that relays all received messages as DEBUG
'''
RECORD_RE = re.compile('\<(\d+)\>(.*)')
(LOG_EMERG,
LOG_ALERT,
LOG_CRIT,
LOG_ERR,
LOG_WARNING,
LOG_NOTICE,
LOG_INFO,
LOG_DEBUG) = range(8)
(LOG_KERN,
LOG_USER,
LOG_MAIL,
LOG_DAEMON,
LOG_AUTH,
LOG_SYSLOG,
LOG_LPR,
LOG_NEWS,
LOG_UUCP,
LOG_CRON,
LOG_AUTHPRIV,
LOG_FTP) = range(12)
(LOG_LOCAL0,
LOG_LOCAL1,
LOG_LOCAL2,
LOG_LOCAL3,
LOG_LOCAL4,
LOG_LOCAL5,
LOG_LOCAL6,
LOG_LOCAL7) = range(16, 24)
PRIORITY_NAMES = {
LOG_ALERT: "alert",
LOG_CRIT: "critical",
LOG_DEBUG: "debug",
LOG_EMERG: "emerg",
LOG_ERR: "err",
LOG_INFO: "info",
LOG_NOTICE: "notice",
LOG_WARNING: "warning"
}
FACILITY_NAMES = {
LOG_AUTH: "auth",
LOG_AUTHPRIV: "authpriv",
LOG_CRON: "cron",
LOG_DAEMON: "daemon",
LOG_FTP: "ftp",
LOG_KERN: "kern",
LOG_LPR: "lpr",
LOG_MAIL: "mail",
LOG_NEWS: "news",
LOG_AUTH: "security",
LOG_SYSLOG: "syslog",
LOG_USER: "user",
LOG_UUCP: "uucp",
LOG_LOCAL0: "local0",
LOG_LOCAL1: "local1",
LOG_LOCAL2: "local2",
LOG_LOCAL3: "local3",
LOG_LOCAL4: "local4",
LOG_LOCAL5: "local5",
LOG_LOCAL6: "local6",
LOG_LOCAL7: "local7",
}
def decodeFacilityPriority(self, priority):
'''
Decode both the facility and priority embedded in a syslog message
:type priority: integer
:param priority: an integer with facility and priority encoded
:return: a tuple with two strings
'''
f = priority >> 3
p = priority & 7
return (self.FACILITY_NAMES.get(f, 'unknown'),
self.PRIORITY_NAMES.get(p, 'unknown'))
def log(self, data, message_format=None):
'''
Logs the received message as a DEBUG message
'''
match = self.RECORD_RE.match(data)
if match:
if message_format is None:
message_format = get_default_format()
pri = int(match.groups()[0])
msg = match.groups()[1]
(facility_name, priority_name) = self.decodeFacilityPriority(pri)
logging.debug(message_format, facility_name, priority_name, msg)
class RequestHandlerTcp(RequestHandler):
def handle(self):
'''
Handles a single request
'''
data = self.request.recv(4096)
self.log(data)
class RequestHandlerUdp(RequestHandler):
def handle(self):
'''
Handles a single request
'''
data = self.request[0]
self.log(data)
class SysLogServerUdp(SocketServer.UDPServer):
def __init__(self, address):
SocketServer.UDPServer.__init__(self, address, RequestHandlerUdp)
class SysLogServerTcp(SocketServer.TCPServer):
def __init__(self, address):
SocketServer.TCPServer.__init__(self, address, RequestHandlerTcp)
def syslog_server(address='', port=SYSLOG_PORT,
tcp=True, terminate_callable=None):
if tcp:
klass = SysLogServerTcp
else:
klass = SysLogServerUdp
syslog = klass((address, port))
while True:
if terminate_callable is not None:
terminate = terminate_callable()
else:
terminate = False
if not terminate:
syslog.handle_request()
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
syslog_server()
| gpl-2.0 |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/OpenGL/GL/NV/texture_compression_vtc.py | 9 | 1162 | '''OpenGL extension NV.texture_compression_vtc
This module customises the behaviour of the
OpenGL.raw.GL.NV.texture_compression_vtc to provide a more
Python-friendly API
Overview (from the spec)
This extension adds support for the VTC 3D texture compression
formats, which are analogous to the S3TC texture compression formats,
with the addition of some retiling in the Z direction. VTC has the
same compression ratio as S3TC and uses 4x4x1, 4x4x2, (4x4x3 when
non-power-of-two textures are supported), or 4x4x4 blocks.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/NV/texture_compression_vtc.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.NV.texture_compression_vtc import *
from OpenGL.raw.GL.NV.texture_compression_vtc import _EXTENSION_NAME
def glInitTextureCompressionVtcNV():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | gpl-3.0 |
Argon-Zhou/django | django/conf/locale/sv/formats.py | 504 | 1569 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'Y-m-d'
SHORT_DATETIME_FORMAT = 'Y-m-d H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y', # '10/25/06'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause |
santisiri/popego | popego/popserver/popserver/sync/__init__.py | 3 | 1842 | # -*- coding: utf-8 -*-
__docformat__='restructuredtext'
import datetime
from popserver.model import Tag, UserItem
from sqlalchemy.orm import object_session
def mergeItem(item, service):
"""
Dado un ``item`` se fija si ya existe en la base.
Si existe saca de la session a ``item`` y usa el de la base.
De no existir le anexa el servicio
"""
dbItem = item.__class__.query.filter_by(external_id=item.external_id, service=service).first()
if dbItem is None:
dbItem = item
dbItem.service = service
for i in range(0, len(dbItem.tags)):
dbItem.tags[i] = mergeTag(dbItem.tags[i])
else:
item.expunge()
return dbItem
def mergeTag(tag):
dbTag = Tag.get_by(name=tag.name)
if dbTag is not None:
if object_session(tag) is not None: tag.expunge()
tag = dbTag
return tag
def mergeUserItem(userItem, account):
""" Mergea el UserItem y el Item asociado """
userItem.item = mergeItem(userItem.item, account.service)
if userItem.item.id is not None:
userItem = _mergeUserItemInstance(userItem, account.user)
# En UserItem se mergean los tags propios del UserItem (UserItem._tags).
# No se usa UserItem.tags porque sino se estarían copiando tags del Item
# al UserItem
for i in range(0, len(userItem._tags)):
userItem._tags[i] = mergeTag(userItem._tags[i])
userItem.user = account.user
return userItem
def _mergeUserItemInstance(userItem, user):
""" Mergea solo la instancia de UserItem """
dbUserItem = UserItem.query.filter_by(user=user, item=userItem.item).first()
if dbUserItem is None:
dbUserItem = userItem
else:
userItem.expunge()
return dbUserItem
| bsd-3-clause |
JoshAshby/Fla.gr | app/tests/test_seshat_baseHTMLObject.py | 1 | 3124 | #!/usr/bin/env python
"""
Test case for making sure that seshat is routing properly
and is capable of handling GET,POST,PUT and DELETE methods.
This indicates a very big problem, if one of these tests fails
because that indicates that the core of Seshat is broken
For more information, see: https://github.com/JoshAshby/
http://xkcd.com/353/
Josh Ashby
2013
http://joshashby.com
joshuaashby@joshashby.com
"""
from webtest import TestApp
from webtest.app import AppError
import nose.tools as nst
import seshat.coreApp as seshat
from seshat.route import route
from utils.baseHTMLObject import baseHTMLObject
htmlObj_urls = []
@route("/html/echo", htmlObj_urls)
class htmlObj_echo(baseHTMLObject):
"""
Returns a basic page through GET which places one parameter from env["members"]
into an HTML page, and returns 405 NOT ALLOWED for all other Methods
"""
def GET(self):
echo = self.env["members"]["echo"]
return """
<html>
<head>
<title>echo</title>
</head>
<body>
%s
</body>
</html>
""" % echo
def POST(self):
"""
Replace with test code
"""
self.head = ("405 NOT ALLOWED", [("Allow", "GET,POST"),
("Content-Type", "text/plain")])
return "This isn't allowed"
def PUT(self):
"""
Replace with test code
"""
self.head = ("405 NOT ALLOWED", [("Allow", "GET,POST"),
("Content-Type", "text/plain")])
return "This isn't allowed"
def DELETE(self):
"""
Replace with test code
"""
self.head = ("405 NOT ALLOWED", [("Allow", "GET,POST"),
("Content-Type", "text/plain")])
return "This isn't allowed"
class test_seshat_htmlObj(object):
"""
"""
@classmethod
def setup_class(cls):
"""
Make a new instance of the Seshat core app, and replace it's
URL list with our own so this test's routing is isolated
"""
cls.app = TestApp(seshat.app)
seshat.c.urls = htmlObj_urls
cls.url = "/html/echo"
cls.params = {"echo": "hello"}
@classmethod
def teardown_class(cls):
"""
Destroy the created Seshat core app instance
"""
del(cls.app)
def test_seshat_htmlObj_get(self):
"""
"""
get_reply = self.app.get(self.url, self.params)
assert get_reply.status == "200 OK"
@nst.raises(AppError)
def test_seshat_htmlObj_post(self):
"""
AppError for the 405 POST
"""
post_reply = self.app.post(self.url)
assert post_reply.status == "405 NOT ALLOWED"
@nst.raises(AppError)
def test_seshat_htmlObj_put(self):
"""
AppError for the 405 PUT
"""
put_reply = self.app.put(self.url)
assert put_reply.status == "405 NOT ALLOWED"
@nst.raises(AppError)
def test_seshat_htmlObj_delete(self):
"""
AppError for the 405 DELETE
"""
delete_reply = self.app.delete(self.url)
assert delete_reply.status == "405 NOT ALLOWED"
| mit |
seadsystem/website | web2py/gluon/languages.py | 8 | 36855 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
| Plural subsystem is created by Vladyslav Kozlovskyy (Ukraine) <dbdevelop@gmail.com>
Translation system
--------------------------------------------
"""
import os
import re
import sys
import pkgutil
import logging
from cgi import escape
from threading import RLock
try:
import copyreg as copy_reg # python 3
except ImportError:
import copy_reg # python 2
from gluon.portalocker import read_locked, LockedFile
from utf8 import Utf8
from gluon.fileutils import listdir
from gluon.cfs import getcfs
from gluon.html import XML, xmlescape
from gluon.contrib.markmin.markmin2html import render, markmin_escape
from string import maketrans
__all__ = ['translator', 'findT', 'update_all_languages']
ostat = os.stat
oslistdir = os.listdir
pjoin = os.path.join
pexists = os.path.exists
pdirname = os.path.dirname
isdir = os.path.isdir
DEFAULT_LANGUAGE = 'en'
DEFAULT_LANGUAGE_NAME = 'English'
# DEFAULT PLURAL-FORMS RULES:
# language doesn't use plural forms
DEFAULT_NPLURALS = 1
# only one singular/plural form is used
DEFAULT_GET_PLURAL_ID = lambda n: 0
# word is unchangeable
DEFAULT_CONSTRUCT_PLURAL_FORM = lambda word, plural_id: word
NUMBERS = (int, long, float)
# pattern to find T(blah blah blah) expressions
PY_STRING_LITERAL_RE = r'(?<=[^\w]T\()(?P<name>'\
+ r"[uU]?[rR]?(?:'''(?:[^']|'{1,2}(?!'))*''')|"\
+ r"(?:'(?:[^'\\]|\\.)*')|" + r'(?:"""(?:[^"]|"{1,2}(?!"))*""")|'\
+ r'(?:"(?:[^"\\]|\\.)*"))'
PY_M_STRING_LITERAL_RE = r'(?<=[^\w]T\.M\()(?P<name>'\
+ r"[uU]?[rR]?(?:'''(?:[^']|'{1,2}(?!'))*''')|"\
+ r"(?:'(?:[^'\\]|\\.)*')|" + r'(?:"""(?:[^"]|"{1,2}(?!"))*""")|'\
+ r'(?:"(?:[^"\\]|\\.)*"))'
regex_translate = re.compile(PY_STRING_LITERAL_RE, re.DOTALL)
regex_translate_m = re.compile(PY_M_STRING_LITERAL_RE, re.DOTALL)
regex_param = re.compile(r'{(?P<s>.+?)}')
# pattern for a valid accept_language
regex_language = \
re.compile('([a-z]{2,3}(?:\-[a-z]{2})?(?:\-[a-z]{2})?)(?:[,;]|$)')
regex_langfile = re.compile('^[a-z]{2,3}(-[a-z]{2})?\.py$')
regex_backslash = re.compile(r"\\([\\{}%])")
regex_plural = re.compile('%({.+?})')
regex_plural_dict = re.compile('^{(?P<w>[^()[\]][^()[\]]*?)\((?P<n>[^()\[\]]+)\)}$') # %%{word(varname or number)}
regex_plural_tuple = re.compile(
'^{(?P<w>[^[\]()]+)(?:\[(?P<i>\d+)\])?}$') # %%{word[index]} or %%{word}
regex_plural_file = re.compile('^plural-[a-zA-Z]{2}(-[a-zA-Z]{2})?\.py$')
def is_writable():
""" returns True if and only if the filesystem is writable """
from gluon.settings import global_settings
return not global_settings.web2py_runtime_gae
def safe_eval(text):
if text.strip():
try:
import ast
return ast.literal_eval(text)
except ImportError:
return eval(text, {}, {})
return None
# used as default filter in translator.M()
def markmin(s):
def markmin_aux(m):
return '{%s}' % markmin_escape(m.group('s'))
return render(regex_param.sub(markmin_aux, s),
sep='br', autolinks=None, id_prefix='')
# UTF8 helper functions
def upper_fun(s):
return unicode(s, 'utf-8').upper().encode('utf-8')
def title_fun(s):
return unicode(s, 'utf-8').title().encode('utf-8')
def cap_fun(s):
return unicode(s, 'utf-8').capitalize().encode('utf-8')
ttab_in = maketrans("\\%{}", '\x1c\x1d\x1e\x1f')
ttab_out = maketrans('\x1c\x1d\x1e\x1f', "\\%{}")
# cache of translated messages:
# global_language_cache:
# { 'languages/xx.py':
# ( {"def-message": "xx-message",
# ...
# "def-message": "xx-message"}, lock_object )
# 'languages/yy.py': ( {dict}, lock_object )
# ...
# }
global_language_cache = {}
def get_from_cache(cache, val, fun):
lang_dict, lock = cache
lock.acquire()
try:
result = lang_dict.get(val)
finally:
lock.release()
if result:
return result
lock.acquire()
try:
result = lang_dict.setdefault(val, fun())
finally:
lock.release()
return result
def clear_cache(filename):
cache = global_language_cache.setdefault(
filename, ({}, RLock()))
lang_dict, lock = cache
lock.acquire()
try:
lang_dict.clear()
finally:
lock.release()
def read_dict_aux(filename):
lang_text = read_locked(filename).replace('\r\n', '\n')
clear_cache(filename)
try:
return safe_eval(lang_text) or {}
except Exception:
e = sys.exc_info()[1]
status = 'Syntax error in %s (%s)' % (filename, e)
logging.error(status)
return {'__corrupted__': status}
def read_dict(filename):
""" Returns dictionary with translation messages
"""
return getcfs('lang:' + filename, filename,
lambda: read_dict_aux(filename))
def read_possible_plural_rules():
"""
Creates list of all possible plural rules files
The result is cached in PLURAL_RULES dictionary to increase speed
"""
plurals = {}
try:
import gluon.contrib.plural_rules as package
for importer, modname, ispkg in pkgutil.iter_modules(package.__path__):
if len(modname) == 2:
module = __import__(package.__name__ + '.' + modname,
fromlist=[modname])
lang = modname
pname = modname + '.py'
nplurals = getattr(module, 'nplurals', DEFAULT_NPLURALS)
get_plural_id = getattr(
module, 'get_plural_id',
DEFAULT_GET_PLURAL_ID)
construct_plural_form = getattr(
module, 'construct_plural_form',
DEFAULT_CONSTRUCT_PLURAL_FORM)
plurals[lang] = (lang, nplurals, get_plural_id,
construct_plural_form)
except ImportError:
e = sys.exc_info()[1]
logging.warn('Unable to import plural rules: %s' % e)
return plurals
PLURAL_RULES = read_possible_plural_rules()
def read_possible_languages_aux(langdir):
def get_lang_struct(lang, langcode, langname, langfile_mtime):
if lang == 'default':
real_lang = langcode.lower()
else:
real_lang = lang
(prules_langcode,
nplurals,
get_plural_id,
construct_plural_form
) = PLURAL_RULES.get(real_lang[:2], ('default',
DEFAULT_NPLURALS,
DEFAULT_GET_PLURAL_ID,
DEFAULT_CONSTRUCT_PLURAL_FORM))
if prules_langcode != 'default':
(pluraldict_fname,
pluraldict_mtime) = plurals.get(real_lang,
plurals.get(real_lang[:2],
('plural-%s.py' % real_lang, 0)))
else:
pluraldict_fname = None
pluraldict_mtime = 0
return (langcode, # language code from !langcode!
langname,
# language name in national spelling from !langname!
langfile_mtime, # m_time of language file
pluraldict_fname, # name of plural dictionary file or None (when default.py is not exist)
pluraldict_mtime, # m_time of plural dictionary file or 0 if file is not exist
prules_langcode, # code of plural rules language or 'default'
nplurals, # nplurals for current language
get_plural_id, # get_plural_id() for current language
construct_plural_form) # construct_plural_form() for current language
plurals = {}
flist = oslistdir(langdir) if isdir(langdir) else []
# scan languages directory for plural dict files:
for pname in flist:
if regex_plural_file.match(pname):
plurals[pname[7:-3]] = (pname,
ostat(pjoin(langdir, pname)).st_mtime)
langs = {}
# scan languages directory for langfiles:
for fname in flist:
if regex_langfile.match(fname) or fname == 'default.py':
fname_with_path = pjoin(langdir, fname)
d = read_dict(fname_with_path)
lang = fname[:-3]
langcode = d.get('!langcode!', lang if lang != 'default'
else DEFAULT_LANGUAGE)
langname = d.get('!langname!', langcode)
langfile_mtime = ostat(fname_with_path).st_mtime
langs[lang] = get_lang_struct(lang, langcode,
langname, langfile_mtime)
if 'default' not in langs:
# if default.py is not found,
# add DEFAULT_LANGUAGE as default language:
langs['default'] = get_lang_struct('default', DEFAULT_LANGUAGE,
DEFAULT_LANGUAGE_NAME, 0)
deflang = langs['default']
deflangcode = deflang[0]
if deflangcode not in langs:
# create language from default.py:
langs[deflangcode] = deflang[:2] + (0,) + deflang[3:]
return langs
def read_possible_languages(langpath):
return getcfs('langs:' + langpath, langpath,
lambda: read_possible_languages_aux(langpath))
def read_plural_dict_aux(filename):
lang_text = read_locked(filename).replace('\r\n', '\n')
try:
return eval(lang_text) or {}
except Exception:
e = sys.exc_info()[1]
status = 'Syntax error in %s (%s)' % (filename, e)
logging.error(status)
return {'__corrupted__': status}
def read_plural_dict(filename):
return getcfs('plurals:' + filename, filename,
lambda: read_plural_dict_aux(filename))
def write_plural_dict(filename, contents):
if '__corrupted__' in contents:
return
fp = None
try:
fp = LockedFile(filename, 'w')
fp.write('#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n{\n# "singular form (0)": ["first plural form (1)", "second plural form (2)", ...],\n')
for key in sorted(contents, sort_function):
forms = '[' + ','.join([repr(Utf8(form))
for form in contents[key]]) + ']'
fp.write('%s: %s,\n' % (repr(Utf8(key)), forms))
fp.write('}\n')
except (IOError, OSError):
if is_writable():
logging.warning('Unable to write to file %s' % filename)
return
finally:
if fp:
fp.close()
def sort_function(x, y):
return cmp(unicode(x, 'utf-8').lower(), unicode(y, 'utf-8').lower())
def write_dict(filename, contents):
if '__corrupted__' in contents:
return
fp = None
try:
fp = LockedFile(filename, 'w')
fp.write('# -*- coding: utf-8 -*-\n{\n')
for key in sorted(contents, sort_function):
fp.write('%s: %s,\n' % (repr(Utf8(key)),
repr(Utf8(contents[key]))))
fp.write('}\n')
except (IOError, OSError):
if is_writable():
logging.warning('Unable to write to file %s' % filename)
return
finally:
if fp:
fp.close()
class lazyT(object):
"""
Never to be called explicitly, returned by
translator.__call__() or translator.M()
"""
m = s = T = f = t = None
M = is_copy = False
def __init__(
self,
message,
symbols={},
T=None,
filter=None,
ftag=None,
M=False
):
if isinstance(message, lazyT):
self.m = message.m
self.s = message.s
self.T = message.T
self.f = message.f
self.t = message.t
self.M = message.M
self.is_copy = True
else:
self.m = message
self.s = symbols
self.T = T
self.f = filter
self.t = ftag
self.M = M
self.is_copy = False
def __repr__(self):
return "<lazyT %s>" % (repr(Utf8(self.m)), )
def __str__(self):
return str(self.T.apply_filter(self.m, self.s, self.f, self.t) if self.M else
self.T.translate(self.m, self.s))
def __eq__(self, other):
return str(self) == str(other)
def __ne__(self, other):
return str(self) != str(other)
def __add__(self, other):
return '%s%s' % (self, other)
def __radd__(self, other):
return '%s%s' % (other, self)
def __mul__(self, other):
return str(self) * other
def __cmp__(self, other):
return cmp(str(self), str(other))
def __hash__(self):
return hash(str(self))
def __getattr__(self, name):
return getattr(str(self), name)
def __getitem__(self, i):
return str(self)[i]
def __getslice__(self, i, j):
return str(self)[i:j]
def __iter__(self):
for c in str(self):
yield c
def __len__(self):
return len(str(self))
def xml(self):
return str(self) if self.M else escape(str(self))
def encode(self, *a, **b):
return str(self).encode(*a, **b)
def decode(self, *a, **b):
return str(self).decode(*a, **b)
def read(self):
return str(self)
def __mod__(self, symbols):
if self.is_copy:
return lazyT(self)
return lazyT(self.m, symbols, self.T, self.f, self.t, self.M)
def pickle_lazyT(c):
return str, (c.xml(),)
copy_reg.pickle(lazyT, pickle_lazyT)
class translator(object):
"""
This class is instantiated by gluon.compileapp.build_environment
as the T object
Example:
T.force(None) # turns off translation
T.force('fr, it') # forces web2py to translate using fr.py or it.py
T("Hello World") # translates "Hello World" using the selected file
Note:
- there is no need to force since, by default, T uses
http_accept_language to determine a translation file.
- en and en-en are considered different languages!
- if language xx-yy is not found force() probes other similar languages
using such algorithm: `xx-yy.py -> xx.py -> xx-yy*.py -> xx*.py`
"""
def __init__(self, langpath, http_accept_language):
self.langpath = langpath
self.http_accept_language = http_accept_language
# filled in self.force():
# ------------------------
# self.cache
# self.accepted_language
# self.language_file
# self.plural_language
# self.nplurals
# self.get_plural_id
# self.construct_plural_form
# self.plural_file
# self.plural_dict
# self.requested_languages
# ----------------------------------------
# filled in self.set_current_languages():
# ----------------------------------------
# self.default_language_file
# self.default_t
# self.current_languages
self.set_current_languages()
self.lazy = True
self.otherTs = {}
self.filter = markmin
self.ftag = 'markmin'
self.ns = None
self.is_writable = True
def get_possible_languages_info(self, lang=None):
"""
Returns info for selected language or dictionary with all
possible languages info from `APP/languages/*.py`
It Returns:
- a tuple containing::
langcode, langname, langfile_mtime,
pluraldict_fname, pluraldict_mtime,
prules_langcode, nplurals,
get_plural_id, construct_plural_form
or None
- if *lang* is NOT defined a dictionary with all possible
languages::
{ langcode(from filename):
( langcode, # language code from !langcode!
langname,
# language name in national spelling from !langname!
langfile_mtime, # m_time of language file
pluraldict_fname,# name of plural dictionary file or None (when default.py is not exist)
pluraldict_mtime,# m_time of plural dictionary file or 0 if file is not exist
prules_langcode, # code of plural rules language or 'default'
nplurals, # nplurals for current language
get_plural_id, # get_plural_id() for current language
construct_plural_form) # construct_plural_form() for current language
}
Args:
lang (str): language
"""
info = read_possible_languages(self.langpath)
if lang:
info = info.get(lang)
return info
def get_possible_languages(self):
""" Gets list of all possible languages for current application """
return list(set(self.current_languages +
[lang for lang in read_possible_languages(self.langpath).iterkeys()
if lang != 'default']))
def set_current_languages(self, *languages):
"""
Sets current AKA "default" languages
Setting one of this languages makes the force() function to turn
translation off
"""
if len(languages) == 1 and isinstance(languages[0], (tuple, list)):
languages = languages[0]
if not languages or languages[0] is None:
# set default language from default.py/DEFAULT_LANGUAGE
pl_info = self.get_possible_languages_info('default')
if pl_info[2] == 0: # langfile_mtime
# if languages/default.py is not found
self.default_language_file = self.langpath
self.default_t = {}
self.current_languages = [DEFAULT_LANGUAGE]
else:
self.default_language_file = pjoin(self.langpath,
'default.py')
self.default_t = read_dict(self.default_language_file)
self.current_languages = [pl_info[0]] # !langcode!
else:
self.current_languages = list(languages)
self.force(self.http_accept_language)
def plural(self, word, n):
"""
Gets plural form of word for number *n*
invoked from T()/T.M() in `%%{}` tag
Note:
"word" MUST be defined in current language (T.accepted_language)
Args:
word (str): word in singular
n (numeric): number plural form created for
Returns:
word (str): word in appropriate singular/plural form
"""
if int(n) == 1:
return word
elif word:
id = self.get_plural_id(abs(int(n)))
# id = 0 singular form
# id = 1 first plural form
# id = 2 second plural form
# etc.
if id != 0:
forms = self.plural_dict.get(word, [])
if len(forms) >= id:
# have this plural form:
return forms[id - 1]
else:
# guessing this plural form
forms += [''] * (self.nplurals - len(forms) - 1)
form = self.construct_plural_form(word, id)
forms[id - 1] = form
self.plural_dict[word] = forms
if self.is_writable and is_writable() and self.plural_file:
write_plural_dict(self.plural_file,
self.plural_dict)
return form
return word
def force(self, *languages):
"""
Selects language(s) for translation
if a list of languages is passed as a parameter,
the first language from this list that matches the ones
from the possible_languages dictionary will be
selected
default language will be selected if none
of them matches possible_languages.
"""
pl_info = read_possible_languages(self.langpath)
def set_plural(language):
"""
initialize plural forms subsystem
"""
lang_info = pl_info.get(language)
if lang_info:
(pname,
pmtime,
self.plural_language,
self.nplurals,
self.get_plural_id,
self.construct_plural_form
) = lang_info[3:]
pdict = {}
if pname:
pname = pjoin(self.langpath, pname)
if pmtime != 0:
pdict = read_plural_dict(pname)
self.plural_file = pname
self.plural_dict = pdict
else:
self.plural_language = 'default'
self.nplurals = DEFAULT_NPLURALS
self.get_plural_id = DEFAULT_GET_PLURAL_ID
self.construct_plural_form = DEFAULT_CONSTRUCT_PLURAL_FORM
self.plural_file = None
self.plural_dict = {}
language = ''
if len(languages) == 1 and isinstance(languages[0], str):
languages = regex_language.findall(languages[0].lower())
elif not languages or languages[0] is None:
languages = []
self.requested_languages = languages = tuple(languages)
if languages:
all_languages = set(lang for lang in pl_info.iterkeys()
if lang != 'default') \
| set(self.current_languages)
for lang in languages:
# compare "aa-bb" | "aa" from *language* parameter
# with strings from langlist using such alghorythm:
# xx-yy.py -> xx.py -> xx*.py
lang5 = lang[:5]
if lang5 in all_languages:
language = lang5
else:
lang2 = lang[:2]
if len(lang5) > 2 and lang2 in all_languages:
language = lang2
else:
for l in all_languages:
if l[:2] == lang2:
language = l
if language:
if language in self.current_languages:
break
self.language_file = pjoin(self.langpath, language + '.py')
self.t = read_dict(self.language_file)
self.cache = global_language_cache.setdefault(
self.language_file,
({}, RLock()))
set_plural(language)
self.accepted_language = language
return languages
self.accepted_language = language
if not language:
if self.current_languages:
self.accepted_language = self.current_languages[0]
else:
self.accepted_language = DEFAULT_LANGUAGE
self.language_file = self.default_language_file
self.cache = global_language_cache.setdefault(self.language_file,
({}, RLock()))
self.t = self.default_t
set_plural(self.accepted_language)
return languages
def __call__(self, message, symbols={}, language=None, lazy=None, ns=None):
"""
get cached translated plain text message with inserted parameters(symbols)
if lazy==True lazyT object is returned
"""
if lazy is None:
lazy = self.lazy
if not language and not ns:
if lazy:
return lazyT(message, symbols, self)
else:
return self.translate(message, symbols)
else:
if ns:
if ns != self.ns:
self.langpath = os.path.join(self.langpath, ns)
if self.ns is None:
self.ns = ns
otherT = self.__get_otherT__(language, ns)
return otherT(message, symbols, lazy=lazy)
def __get_otherT__(self, language=None, namespace=None):
if not language and not namespace:
raise Exception('Incorrect parameters')
if namespace:
if language:
index = '%s/%s' % (namespace, language)
else:
index = namespace
else:
index = language
try:
otherT = self.otherTs[index]
except KeyError:
otherT = self.otherTs[index] = translator(self.langpath,
self.http_accept_language)
if language:
otherT.force(language)
return otherT
def apply_filter(self, message, symbols={}, filter=None, ftag=None):
def get_tr(message, prefix, filter):
s = self.get_t(message, prefix)
return filter(s) if filter else self.filter(s)
if filter:
prefix = '@' + (ftag or 'userdef') + '\x01'
else:
prefix = '@' + self.ftag + '\x01'
message = get_from_cache(
self.cache, prefix + message,
lambda: get_tr(message, prefix, filter))
if symbols or symbols == 0 or symbols == "":
if isinstance(symbols, dict):
symbols.update(
(key, xmlescape(value).translate(ttab_in))
for key, value in symbols.iteritems()
if not isinstance(value, NUMBERS))
else:
if not isinstance(symbols, tuple):
symbols = (symbols,)
symbols = tuple(
value if isinstance(value, NUMBERS)
else xmlescape(value).translate(ttab_in)
for value in symbols)
message = self.params_substitution(message, symbols)
return XML(message.translate(ttab_out))
def M(self, message, symbols={}, language=None,
lazy=None, filter=None, ftag=None, ns=None):
"""
Gets cached translated markmin-message with inserted parametes
if lazy==True lazyT object is returned
"""
if lazy is None:
lazy = self.lazy
if not language and not ns:
if lazy:
return lazyT(message, symbols, self, filter, ftag, True)
else:
return self.apply_filter(message, symbols, filter, ftag)
else:
if ns:
self.langpath = os.path.join(self.langpath, ns)
otherT = self.__get_otherT__(language, ns)
return otherT.M(message, symbols, lazy=lazy)
def get_t(self, message, prefix=''):
"""
Use ## to add a comment into a translation string
the comment can be useful do discriminate different possible
translations for the same string (for example different locations)::
T(' hello world ') -> ' hello world '
T(' hello world ## token') -> ' hello world '
T('hello ## world## token') -> 'hello ## world'
the ## notation is ignored in multiline strings and strings that
start with ##. This is needed to allow markmin syntax to be translated
"""
if isinstance(message, unicode):
message = message.encode('utf8')
if isinstance(prefix, unicode):
prefix = prefix.encode('utf8')
key = prefix + message
mt = self.t.get(key, None)
if mt is not None:
return mt
# we did not find a translation
if message.find('##') > 0 and not '\n' in message:
# remove comments
message = message.rsplit('##', 1)[0]
# guess translation same as original
self.t[key] = mt = self.default_t.get(key, message)
# update language file for latter translation
if self.is_writable and is_writable() and \
self.language_file != self.default_language_file:
write_dict(self.language_file, self.t)
return regex_backslash.sub(
lambda m: m.group(1).translate(ttab_in), mt)
def params_substitution(self, message, symbols):
"""
Substitutes parameters from symbols into message using %.
also parse `%%{}` placeholders for plural-forms processing.
Returns:
string with parameters
Note:
*symbols* MUST BE OR tuple OR dict of parameters!
"""
def sub_plural(m):
"""String in `%{}` is transformed by this rules:
If string starts with `\\`, `!` or `?` such transformations
take place::
"!string of words" -> "String of word" (Capitalize)
"!!string of words" -> "String Of Word" (Title)
"!!!string of words" -> "STRING OF WORD" (Upper)
"\\!string of words" -> "!string of word"
(remove \\ and disable transformations)
"?word?number" -> "word" (return word, if number == 1)
"?number" or "??number" -> "" (remove number,
if number == 1)
"?word?number" -> "number" (if number != 1)
"""
def sub_tuple(m):
""" word[number], !word[number], !!word[number], !!!word[number]
word, !word, !!word, !!!word, ?word?number, ??number, ?number
?word?word[number], ?word?[number], ??word[number]
"""
w, i = m.group('w', 'i')
c = w[0]
if c not in '!?':
return self.plural(w, symbols[int(i or 0)])
elif c == '?':
(p1, sep, p2) = w[1:].partition("?")
part1 = p1 if sep else ""
(part2, sep, part3) = (p2 if sep else p1).partition("?")
if not sep:
part3 = part2
if i is None:
# ?[word]?number[?number] or ?number
if not part2:
return m.group(0)
num = int(part2)
else:
# ?[word]?word2[?word3][number]
num = int(symbols[int(i or 0)])
return part1 if num == 1 else part3 if num == 0 else part2
elif w.startswith('!!!'):
word = w[3:]
fun = upper_fun
elif w.startswith('!!'):
word = w[2:]
fun = title_fun
else:
word = w[1:]
fun = cap_fun
if i is not None:
return fun(self.plural(word, symbols[int(i)]))
return fun(word)
def sub_dict(m):
""" word(var), !word(var), !!word(var), !!!word(var)
word(num), !word(num), !!word(num), !!!word(num)
?word2(var), ?word1?word2(var), ?word1?word2?word0(var)
?word2(num), ?word1?word2(num), ?word1?word2?word0(num)
"""
w, n = m.group('w', 'n')
c = w[0]
n = int(n) if n.isdigit() else symbols[n]
if c not in '!?':
return self.plural(w, n)
elif c == '?':
# ?[word1]?word2[?word0](var or num), ?[word1]?word2(var or num) or ?word2(var or num)
(p1, sep, p2) = w[1:].partition("?")
part1 = p1 if sep else ""
(part2, sep, part3) = (p2 if sep else p1).partition("?")
if not sep:
part3 = part2
num = int(n)
return part1 if num == 1 else part3 if num == 0 else part2
elif w.startswith('!!!'):
word = w[3:]
fun = upper_fun
elif w.startswith('!!'):
word = w[2:]
fun = title_fun
else:
word = w[1:]
fun = cap_fun
return fun(self.plural(word, n))
s = m.group(1)
part = regex_plural_tuple.sub(sub_tuple, s)
if part == s:
part = regex_plural_dict.sub(sub_dict, s)
if part == s:
return m.group(0)
return part
message = message % symbols
message = regex_plural.sub(sub_plural, message)
return message
def translate(self, message, symbols):
"""
Gets cached translated message with inserted parameters(symbols)
"""
message = get_from_cache(self.cache, message,
lambda: self.get_t(message))
if symbols or symbols == 0 or symbols == "":
if isinstance(symbols, dict):
symbols.update(
(key, str(value).translate(ttab_in))
for key, value in symbols.iteritems()
if not isinstance(value, NUMBERS))
else:
if not isinstance(symbols, tuple):
symbols = (symbols,)
symbols = tuple(
value if isinstance(value, NUMBERS)
else str(value).translate(ttab_in)
for value in symbols)
message = self.params_substitution(message, symbols)
return message.translate(ttab_out)
def findT(path, language=DEFAULT_LANGUAGE):
"""
Note:
Must be run by the admin app
"""
lang_file = pjoin(path, 'languages', language + '.py')
sentences = read_dict(lang_file)
mp = pjoin(path, 'models')
cp = pjoin(path, 'controllers')
vp = pjoin(path, 'views')
mop = pjoin(path, 'modules')
for filename in \
listdir(mp, '^.+\.py$', 0) + listdir(cp, '^.+\.py$', 0)\
+ listdir(vp, '^.+\.html$', 0) + listdir(mop, '^.+\.py$', 0):
data = read_locked(filename)
items = regex_translate.findall(data)
items += regex_translate_m.findall(data)
for item in items:
try:
message = safe_eval(item)
except:
continue # silently ignore inproperly formatted strings
if not message.startswith('#') and not '\n' in message:
tokens = message.rsplit('##', 1)
else:
# this allows markmin syntax in translations
tokens = [message]
if len(tokens) == 2:
message = tokens[0].strip() + '##' + tokens[1].strip()
if message and not message in sentences:
sentences[message] = message
if not '!langcode!' in sentences:
sentences['!langcode!'] = (
DEFAULT_LANGUAGE if language in ('default', DEFAULT_LANGUAGE) else language)
if not '!langname!' in sentences:
sentences['!langname!'] = (
DEFAULT_LANGUAGE_NAME if language in ('default', DEFAULT_LANGUAGE)
else sentences['!langcode!'])
write_dict(lang_file, sentences)
def update_all_languages(application_path):
"""
Note:
Must be run by the admin app
"""
path = pjoin(application_path, 'languages/')
for language in oslistdir(path):
if regex_langfile.match(language):
findT(application_path, language[:-3])
def update_from_langfile(target, source, force_update=False):
"""this will update untranslated messages in target from source (where both are language files)
this can be used as first step when creating language file for new but very similar language
or if you want update your app from welcome app of newer web2py version
or in non-standard scenarios when you work on target and from any reason you have partial translation in source
Args:
force_update: if False existing translations remain unchanged, if True existing translations will update from source
"""
src = read_dict(source)
sentences = read_dict(target)
for key in sentences:
val = sentences[key]
if not val or val == key or force_update:
new_val = src.get(key)
if new_val and new_val != val:
sentences[key] = new_val
write_dict(target, sentences)
if __name__ == '__main__':
import doctest
doctest.testmod()
| mit |
mdrumond/tensorflow | tensorflow/contrib/slim/python/slim/evaluation.py | 26 | 11161 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains functions for evaluation and summarization of metrics.
The evaluation.py module contains helper functions for evaluating TensorFlow
modules using a variety of metrics and summarizing the results.
**********************
* Evaluating Metrics *
**********************
In the simplest use case, we use a model to create the predictions, then specify
the metrics and finally call the `evaluation` method:
# Create model and obtain the predictions:
images, labels = LoadData(...)
predictions = MyModel(images)
# Choose the metrics to compute:
names_to_values, names_to_updates = slim.metrics.aggregate_metric_map({
"accuracy": slim.metrics.accuracy(predictions, labels),
"mse": slim.metrics.mean_squared_error(predictions, labels),
})
inital_op = tf.group(
tf.global_variables_initializer(),
tf.local_variables_initializer())
with tf.Session() as sess:
metric_values = slim.evaluation(
sess,
num_evals=1,
inital_op=initial_op,
eval_op=names_to_updates.values(),
final_op=name_to_values.values())
for metric, value in zip(names_to_values.keys(), metric_values):
logging.info('Metric %s has value: %f', metric, value)
************************************************
* Evaluating a Checkpointed Model with Metrics *
************************************************
Often, one wants to evaluate a model checkpoint saved on disk. This can be
performed once or repeatedly on a set schedule.
To evaluate a particular model, users define zero or more metrics and zero or
more summaries and call the evaluation_loop method:
# Create model and obtain the predictions:
images, labels = LoadData(...)
predictions = MyModel(images)
# Choose the metrics to compute:
names_to_values, names_to_updates = slim.metrics.aggregate_metric_map({
"accuracy": slim.metrics.accuracy(predictions, labels),
"mse": slim.metrics.mean_squared_error(predictions, labels),
})
# Define the summaries to write:
for metric_name, metric_value in metrics_to_values.iteritems():
tf.summary.scalar(metric_name, metric_value)
checkpoint_dir = '/tmp/my_model_dir/'
log_dir = '/tmp/my_model_eval/'
# We'll evaluate 1000 batches:
num_evals = 1000
# Evaluate every 10 minutes:
slim.evaluation_loop(
'',
checkpoint_dir,
logdir,
num_evals=num_evals,
eval_op=names_to_updates.values(),
summary_op=tf.contrib.deprecated.merge_summary(summary_ops),
eval_interval_secs=600)
**************************************************
* Evaluating a Checkpointed Model with Summaries *
**************************************************
At times, an evaluation can be performed without metrics at all but rather
with only summaries. The user need only leave out the 'eval_op' argument:
# Create model and obtain the predictions:
images, labels = LoadData(...)
predictions = MyModel(images)
# Define the summaries to write:
tf.summary.scalar(...)
tf.summary.histogram(...)
checkpoint_dir = '/tmp/my_model_dir/'
log_dir = '/tmp/my_model_eval/'
# Evaluate once every 10 minutes.
slim.evaluation_loop(
master='',
checkpoint_dir,
logdir,
num_evals=1,
summary_op=tf.contrib.deprecated.merge_summary(summary_ops),
eval_interval_secs=600)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.training.python.training import evaluation
from tensorflow.python.summary import summary
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver as tf_saver
__all__ = [
'evaluate_once',
'evaluation_loop',
'wait_for_new_checkpoint',
'checkpoints_iterator',
]
wait_for_new_checkpoint = evaluation.wait_for_new_checkpoint
checkpoints_iterator = evaluation.checkpoints_iterator
_USE_DEFAULT = 0
def evaluate_once(master,
checkpoint_path,
logdir,
num_evals=1,
initial_op=None,
initial_op_feed_dict=None,
eval_op=None,
eval_op_feed_dict=None,
final_op=None,
final_op_feed_dict=None,
summary_op=_USE_DEFAULT,
summary_op_feed_dict=None,
variables_to_restore=None,
session_config=None):
"""Evaluates the model at the given checkpoint path.
Args:
master: The BNS address of the TensorFlow master.
checkpoint_path: The path to a checkpoint to use for evaluation.
logdir: The directory where the TensorFlow summaries are written to.
num_evals: The number of times to run `eval_op`.
initial_op: An operation run at the beginning of evaluation.
initial_op_feed_dict: A feed dictionary to use when executing `initial_op`.
eval_op: A operation run `num_evals` times.
eval_op_feed_dict: The feed dictionary to use when executing the `eval_op`.
final_op: An operation to execute after all of the `eval_op` executions. The
value of `final_op` is returned.
final_op_feed_dict: A feed dictionary to use when executing `final_op`.
summary_op: The summary_op to evaluate after running TF-Slims metric ops. By
default the summary_op is set to tf.summary.merge_all().
summary_op_feed_dict: An optional feed dictionary to use when running the
`summary_op`.
variables_to_restore: A list of TensorFlow variables to restore during
evaluation. If the argument is left as `None` then
slim.variables.GetVariablesToRestore() is used.
session_config: An instance of `tf.ConfigProto` that will be used to
configure the `Session`. If left as `None`, the default will be used.
Returns:
The value of `final_op` or `None` if `final_op` is `None`.
"""
if summary_op == _USE_DEFAULT:
summary_op = summary.merge_all()
hooks = [evaluation.StopAfterNEvalsHook(num_evals),]
if summary_op is not None:
hooks.append(evaluation.SummaryAtEndHook(
log_dir=logdir, summary_op=summary_op, feed_dict=summary_op_feed_dict))
saver = None
if variables_to_restore is not None:
saver = tf_saver.Saver(variables_to_restore)
return evaluation.evaluate_once(
checkpoint_path,
master=master,
scaffold=monitored_session.Scaffold(
init_op=initial_op, init_feed_dict=initial_op_feed_dict, saver=saver),
eval_ops=eval_op,
feed_dict=eval_op_feed_dict,
final_ops=final_op,
final_ops_feed_dict=final_op_feed_dict,
hooks=hooks,
config=session_config)
def evaluation_loop(master,
checkpoint_dir,
logdir,
num_evals=1,
initial_op=None,
initial_op_feed_dict=None,
init_fn=None,
eval_op=None,
eval_op_feed_dict=None,
final_op=None,
final_op_feed_dict=None,
summary_op=_USE_DEFAULT,
summary_op_feed_dict=None,
variables_to_restore=None,
eval_interval_secs=60,
max_number_of_evaluations=None,
session_config=None,
timeout=None,
hooks=None):
"""Runs TF-Slim's Evaluation Loop.
Args:
master: The BNS address of the TensorFlow master.
checkpoint_dir: The directory where checkpoints are stored.
logdir: The directory where the TensorFlow summaries are written to.
num_evals: The number of times to run `eval_op`.
initial_op: An operation run at the beginning of evaluation.
initial_op_feed_dict: A feed dictionary to use when executing `initial_op`.
init_fn: An optional callable to be executed after `init_op` is called. The
callable must accept one argument, the session being initialized.
eval_op: A operation run `num_evals` times.
eval_op_feed_dict: The feed dictionary to use when executing the `eval_op`.
final_op: An operation to execute after all of the `eval_op` executions. The
value of `final_op` is returned.
final_op_feed_dict: A feed dictionary to use when executing `final_op`.
summary_op: The summary_op to evaluate after running TF-Slims metric ops. By
default the summary_op is set to tf.summary.merge_all().
summary_op_feed_dict: An optional feed dictionary to use when running the
`summary_op`.
variables_to_restore: A list of TensorFlow variables to restore during
evaluation. If the argument is left as `None` then
slim.variables.GetVariablesToRestore() is used.
eval_interval_secs: The minimum number of seconds between evaluations.
max_number_of_evaluations: the max number of iterations of the evaluation.
If the value is left as 'None', the evaluation continues indefinitely.
session_config: An instance of `tf.ConfigProto` that will be used to
configure the `Session`. If left as `None`, the default will be used.
timeout: The maximum amount of time to wait between checkpoints. If left as
`None`, then the process will wait indefinitely.
hooks: A list of additional SessionRunHook objects to pass during
repeated evaluations.
Returns:
The value of `final_op` or `None` if `final_op` is `None`.
"""
if summary_op == _USE_DEFAULT:
summary_op = summary.merge_all()
all_hooks = [evaluation.StopAfterNEvalsHook(num_evals),]
if summary_op is not None:
all_hooks.append(evaluation.SummaryAtEndHook(
log_dir=logdir, summary_op=summary_op, feed_dict=summary_op_feed_dict))
if hooks is not None:
# Add custom hooks if provided.
all_hooks.extend(hooks)
saver = None
if variables_to_restore is not None:
saver = tf_saver.Saver(variables_to_restore)
return evaluation.evaluate_repeatedly(
checkpoint_dir,
master=master,
scaffold=monitored_session.Scaffold(
init_op=initial_op, init_feed_dict=initial_op_feed_dict,
init_fn=init_fn, saver=saver),
eval_ops=eval_op,
feed_dict=eval_op_feed_dict,
final_ops=final_op,
final_ops_feed_dict=final_op_feed_dict,
eval_interval_secs=eval_interval_secs,
hooks=all_hooks,
config=session_config,
max_number_of_evaluations=max_number_of_evaluations,
timeout=timeout)
| apache-2.0 |
gundalow/ansible | lib/ansible/vars/hostvars.py | 51 | 5183 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.common._collections_compat import Mapping
from ansible.template import Templar, AnsibleUndefined
STATIC_VARS = [
'ansible_version',
'ansible_play_hosts',
'ansible_dependent_role_names',
'ansible_play_role_names',
'ansible_role_names',
'inventory_hostname',
'inventory_hostname_short',
'inventory_file',
'inventory_dir',
'groups',
'group_names',
'omit',
'playbook_dir',
'play_hosts',
'role_names',
'ungrouped',
]
__all__ = ['HostVars', 'HostVarsVars']
# Note -- this is a Mapping, not a MutableMapping
class HostVars(Mapping):
''' A special view of vars_cache that adds values from the inventory when needed. '''
def __init__(self, inventory, variable_manager, loader):
self._inventory = inventory
self._loader = loader
self._variable_manager = variable_manager
variable_manager._hostvars = self
def set_variable_manager(self, variable_manager):
self._variable_manager = variable_manager
variable_manager._hostvars = self
def set_inventory(self, inventory):
self._inventory = inventory
def _find_host(self, host_name):
# does not use inventory.hosts so it can create localhost on demand
return self._inventory.get_host(host_name)
def raw_get(self, host_name):
'''
Similar to __getitem__, however the returned data is not run through
the templating engine to expand variables in the hostvars.
'''
host = self._find_host(host_name)
if host is None:
return AnsibleUndefined(name="hostvars['%s']" % host_name)
return self._variable_manager.get_vars(host=host, include_hostvars=False)
def __setstate__(self, state):
self.__dict__.update(state)
# Methods __getstate__ and __setstate__ of VariableManager do not
# preserve _loader and _hostvars attributes to improve pickle
# performance and memory utilization. Since HostVars holds values
# of those attributes already, assign them if needed.
if self._variable_manager._loader is None:
self._variable_manager._loader = self._loader
if self._variable_manager._hostvars is None:
self._variable_manager._hostvars = self
def __getitem__(self, host_name):
data = self.raw_get(host_name)
if isinstance(data, AnsibleUndefined):
return data
return HostVarsVars(data, loader=self._loader)
def set_host_variable(self, host, varname, value):
self._variable_manager.set_host_variable(host, varname, value)
def set_nonpersistent_facts(self, host, facts):
self._variable_manager.set_nonpersistent_facts(host, facts)
def set_host_facts(self, host, facts):
self._variable_manager.set_host_facts(host, facts)
def __contains__(self, host_name):
# does not use inventory.hosts so it can create localhost on demand
return self._find_host(host_name) is not None
def __iter__(self):
for host in self._inventory.hosts:
yield host
def __len__(self):
return len(self._inventory.hosts)
def __repr__(self):
out = {}
for host in self._inventory.hosts:
out[host] = self.get(host)
return repr(out)
def __deepcopy__(self, memo):
# We do not need to deepcopy because HostVars is immutable,
# however we have to implement the method so we can deepcopy
# variables' dicts that contain HostVars.
return self
class HostVarsVars(Mapping):
def __init__(self, variables, loader):
self._vars = variables
self._loader = loader
def __getitem__(self, var):
templar = Templar(variables=self._vars, loader=self._loader)
foo = templar.template(self._vars[var], fail_on_undefined=False, static_vars=STATIC_VARS)
return foo
def __contains__(self, var):
return (var in self._vars)
def __iter__(self):
for var in self._vars.keys():
yield var
def __len__(self):
return len(self._vars.keys())
def __repr__(self):
templar = Templar(variables=self._vars, loader=self._loader)
return repr(templar.template(self._vars, fail_on_undefined=False, static_vars=STATIC_VARS))
| gpl-3.0 |
pford68/nupic.research | drive/drive/motor.py | 16 | 1705 | import random
class Motor(object):
def __init__(self, noise=(0.0, 0.0)):
self.noise = noise
self.noiseAmount = 0.0
def move(self, motorValue, vehicle):
raise NotImplementedError
class AccelerationMotor(Motor):
def __init__(self, frictionCoefficient=0.1, noise=(0.0, 0.0)):
super(AccelerationMotor, self).__init__(noise=noise)
self.frictionCoefficient = frictionCoefficient
def move(self, motorValue, vehicle):
self.noiseAmount = random.gauss(*self.noise)
vehicle.acceleration = motorValue + self.noiseAmount
friction = vehicle.velocity * self.frictionCoefficient
vehicle.velocity += vehicle.acceleration - friction
vehicle.position += vehicle.velocity
vehicle.position = vehicle.position % vehicle.field.width
class JerkMotor(Motor):
# TODO: Refactor with AccelerationMotor
def __init__(self, frictionCoefficient=0.1, noise=(0.0, 0.0),
scale=0.3):
super(JerkMotor, self).__init__(noise=noise)
self.frictionCoefficient = frictionCoefficient
self.scale = scale
def move(self, motorValue, vehicle):
self.noiseAmount = random.gauss(*self.noise)
vehicle.jerk = motorValue * self.scale + self.noiseAmount
vehicle.acceleration += vehicle.jerk
friction = vehicle.velocity * self.frictionCoefficient
vehicle.velocity += vehicle.acceleration - friction
vehicle.position += vehicle.velocity
vehicle.position = vehicle.position % vehicle.field.width
class PositionMotor(Motor):
def move(self, motorValue, vehicle):
self.noiseAmount = random.gauss(*self.noise)
vehicle.position += motorValue + self.noiseAmount
vehicle.position = vehicle.position % vehicle.field.width
| gpl-3.0 |
gjtempleton/matasano_cryptopals | set1/challenge_3.py | 1 | 1749 | import binascii
from .challenge_2 import hex_xor
# Source: http://www.math.cornell.edu/~mec/2003-2004/cryptography/subs/frequencies.html
REL_ENG_CHAR_FREQS = {
'E': .1202,
'T': .0910,
'A': .0812,
'O': .0768,
'I': .0731,
'N': .0695,
'S': .0628,
'R': .0602,
'H': .0592,
'D': .0432,
'L': .0398,
'U': .0288,
'C': .0271,
'M': .0261,
'F': .0230,
'Y': .0211,
'W': .0209,
'G': .0203,
'P': .0182,
'B': .0149,
'V': .0111,
'K': .0069,
'X': .0017,
'Q': .0011,
'J': .0010,
'Z': .0007
}
def single_byte_xor_reverse(hex_string):
diff_score = 10000
result = ""
keyrange = [byte for byte in range(0, 255)]
for key in keyrange:
string, new_score = _score_key(hex_string, bytes(key))
if new_score < diff_score:
diff_score = new_score
result = ""
return result
def _score_key(byte_string, key):
dif_score = 0
i = 0
key_array = []
while i< len(byte_string):
key_array.append(key)
i += 1
xored = hex_xor(byte_string, binascii.b2a_hex(bytearray(key_array)))
rel_freqs = get_char_freqs(xored)
for k in rel_freqs.keys():
dif_score += abs(rel_freqs[k]-REL_ENG_CHAR_FREQS[k])**2
return dif_score
def get_char_freqs(string):
length = len(string)
string = string.upper()
counts = {}
for k in REL_ENG_CHAR_FREQS.keys():
counts[k] = 0
for c in string:
if c == k:
counts[k] += 1
counts[k] = length/counts[k]
return counts
def main():
print(single_byte_xor_reverse('1b37373331363f78151b7f2b783431333d78397828372d363c78373e783a393b3736'))
if __name__ == "__main__":
main()
| mit |
liangazhou/django-rdp | packages/Django-1.8.6/build/lib/django/template/engine.py | 38 | 10794 | import warnings
from django.core.exceptions import ImproperlyConfigured
from django.utils import lru_cache, six
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.functional import cached_property
from django.utils.module_loading import import_string
from .base import Context, Lexer, Parser, Template, TemplateDoesNotExist
from .context import _builtin_context_processors
_context_instance_undefined = object()
_dictionary_undefined = object()
_dirs_undefined = object()
class Engine(object):
def __init__(self, dirs=None, app_dirs=False,
allowed_include_roots=None, context_processors=None,
debug=False, loaders=None, string_if_invalid='',
file_charset='utf-8'):
if dirs is None:
dirs = []
if allowed_include_roots is None:
allowed_include_roots = []
if context_processors is None:
context_processors = []
if loaders is None:
loaders = ['django.template.loaders.filesystem.Loader']
if app_dirs:
loaders += ['django.template.loaders.app_directories.Loader']
else:
if app_dirs:
raise ImproperlyConfigured(
"app_dirs must not be set when loaders is defined.")
if isinstance(allowed_include_roots, six.string_types):
raise ImproperlyConfigured(
"allowed_include_roots must be a tuple, not a string.")
self.dirs = dirs
self.app_dirs = app_dirs
self.allowed_include_roots = allowed_include_roots
self.context_processors = context_processors
self.debug = debug
self.loaders = loaders
self.string_if_invalid = string_if_invalid
self.file_charset = file_charset
@staticmethod
@lru_cache.lru_cache()
def get_default():
"""
When only one DjangoTemplates backend is configured, returns it.
Raises ImproperlyConfigured otherwise.
This is required for preserving historical APIs that rely on a
globally available, implicitly configured engine such as:
>>> from django.template import Context, Template
>>> template = Template("Hello {{ name }}!")
>>> context = Context({'name': "world"})
>>> template.render(context)
'Hello world!'
"""
# Since Engine is imported in django.template and since
# DjangoTemplates is a wrapper around this Engine class,
# local imports are required to avoid import loops.
from django.template import engines
from django.template.backends.django import DjangoTemplates
django_engines = [engine for engine in engines.all()
if isinstance(engine, DjangoTemplates)]
if len(django_engines) == 1:
# Unwrap the Engine instance inside DjangoTemplates
return django_engines[0].engine
elif len(django_engines) == 0:
raise ImproperlyConfigured(
"No DjangoTemplates backend is configured.")
else:
raise ImproperlyConfigured(
"Several DjangoTemplates backends are configured. "
"You must select one explicitly.")
@cached_property
def template_context_processors(self):
context_processors = _builtin_context_processors
context_processors += tuple(self.context_processors)
return tuple(import_string(path) for path in context_processors)
@cached_property
def template_loaders(self):
return self.get_template_loaders(self.loaders)
def get_template_loaders(self, template_loaders):
loaders = []
for template_loader in template_loaders:
loader = self.find_template_loader(template_loader)
if loader is not None:
loaders.append(loader)
return loaders
def find_template_loader(self, loader):
if isinstance(loader, (tuple, list)):
args = list(loader[1:])
loader = loader[0]
else:
args = []
if isinstance(loader, six.string_types):
loader_class = import_string(loader)
if getattr(loader_class, '_accepts_engine_in_init', False):
args.insert(0, self)
else:
warnings.warn(
"%s inherits from django.template.loader.BaseLoader "
"instead of django.template.loaders.base.Loader. " %
loader, RemovedInDjango110Warning, stacklevel=2)
loader_instance = loader_class(*args)
if not loader_instance.is_usable:
warnings.warn(
"Your template loaders configuration includes %r, but "
"your Python installation doesn't support that type of "
"template loading. Consider removing that line from "
"your settings." % loader)
return None
else:
return loader_instance
else:
raise ImproperlyConfigured(
"Invalid value in template loaders configuration: %r" % loader)
def find_template(self, name, dirs=None):
for loader in self.template_loaders:
try:
source, display_name = loader(name, dirs)
origin = self.make_origin(display_name, loader, name, dirs)
return source, origin
except TemplateDoesNotExist:
pass
raise TemplateDoesNotExist(name)
def from_string(self, template_code):
"""
Returns a compiled Template object for the given template code,
handling template inheritance recursively.
"""
return Template(template_code, engine=self)
def get_template(self, template_name, dirs=_dirs_undefined):
"""
Returns a compiled Template object for the given template name,
handling template inheritance recursively.
"""
if dirs is _dirs_undefined:
dirs = None
else:
warnings.warn(
"The dirs argument of get_template is deprecated.",
RemovedInDjango110Warning, stacklevel=2)
template, origin = self.find_template(template_name, dirs)
if not hasattr(template, 'render'):
# template needs to be compiled
template = Template(template, origin, template_name, engine=self)
return template
# This method was originally a function defined in django.template.loader.
# It was moved here in Django 1.8 when encapsulating the Django template
# engine in this Engine class. It's still called by deprecated code but it
# will be removed in Django 1.10. It's superseded by a new render_to_string
# function in django.template.loader.
def render_to_string(self, template_name, context=None,
context_instance=_context_instance_undefined,
dirs=_dirs_undefined,
dictionary=_dictionary_undefined):
if context_instance is _context_instance_undefined:
context_instance = None
else:
warnings.warn(
"The context_instance argument of render_to_string is "
"deprecated.", RemovedInDjango110Warning, stacklevel=2)
if dirs is _dirs_undefined:
# Do not set dirs to None here to avoid triggering the deprecation
# warning in select_template or get_template.
pass
else:
warnings.warn(
"The dirs argument of render_to_string is deprecated.",
RemovedInDjango110Warning, stacklevel=2)
if dictionary is _dictionary_undefined:
dictionary = None
else:
warnings.warn(
"The dictionary argument of render_to_string was renamed to "
"context.", RemovedInDjango110Warning, stacklevel=2)
context = dictionary
if isinstance(template_name, (list, tuple)):
t = self.select_template(template_name, dirs)
else:
t = self.get_template(template_name, dirs)
if not context_instance:
# Django < 1.8 accepted a Context in `context` even though that's
# unintended. Preserve this ability but don't rewrap `context`.
if isinstance(context, Context):
return t.render(context)
else:
return t.render(Context(context))
if not context:
return t.render(context_instance)
# Add the context to the context stack, ensuring it gets removed again
# to keep the context_instance in the same state it started in.
with context_instance.push(context):
return t.render(context_instance)
def select_template(self, template_name_list, dirs=_dirs_undefined):
"""
Given a list of template names, returns the first that can be loaded.
"""
if dirs is _dirs_undefined:
# Do not set dirs to None here to avoid triggering the deprecation
# warning in get_template.
pass
else:
warnings.warn(
"The dirs argument of select_template is deprecated.",
RemovedInDjango110Warning, stacklevel=2)
if not template_name_list:
raise TemplateDoesNotExist("No template names provided")
not_found = []
for template_name in template_name_list:
try:
return self.get_template(template_name, dirs)
except TemplateDoesNotExist as exc:
if exc.args[0] not in not_found:
not_found.append(exc.args[0])
continue
# If we get here, none of the templates could be loaded
raise TemplateDoesNotExist(', '.join(not_found))
def compile_string(self, template_string, origin):
"""
Compiles template_string into a NodeList ready for rendering.
"""
if self.debug:
from .debug import DebugLexer, DebugParser
lexer_class, parser_class = DebugLexer, DebugParser
else:
lexer_class, parser_class = Lexer, Parser
lexer = lexer_class(template_string, origin)
tokens = lexer.tokenize()
parser = parser_class(tokens)
return parser.parse()
def make_origin(self, display_name, loader, name, dirs):
if self.debug and display_name:
# Inner import to avoid circular dependency
from .loader import LoaderOrigin
return LoaderOrigin(display_name, loader, name, dirs)
else:
return None
| apache-2.0 |
mrry/tensorflow | tensorflow/tools/pip_package/simple_console.py | 605 | 1028 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Start a simple interactive console with TensorFlow available."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import code
import sys
def main(_):
"""Run an interactive console."""
code.interact()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 |
rspavel/spack | var/spack/repos/builtin/packages/maker/package.py | 5 | 4141 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Maker(Package):
"""MAKER is a portable and easily configurable genome annotation pipeline.
It's purpose is to allow smaller eukaryotic and prokaryotic genomeprojects
to independently annotate their genomes and to create genome databases.
MAKER identifies repeats, aligns ESTs and proteins to a genome, produces
ab-initio gene predictions and automatically synthesizes these data into
gene annotations having evidence-based quality values. MAKER is also easily
trainable: outputs of preliminary runs can be used to automatically retrain
its gene prediction algorithm, producing higher quality gene-models on
subsequent runs. MAKER's inputs are minimal and its ouputs can be directly
loaded into a GMOD database. They can also be viewed in the Apollo genome
browser; this feature of MAKER provides an easy means to annotate, view and
edit individual contigs and BACs without the overhead of a database. MAKER
should prove especially useful for emerging model organism projects with
minimal bioinformatics expertise and computer resources.
Note: MAKER requires registration. Fill out the form at
http://yandell.topaz.genetics.utah.edu/cgi-bin/maker_license.cgi to get a
download link. Spack will search your current directory for the download
file. Alternatively, add this file to a mirror so that Spack can find it.
For instructions on how to set up a mirror, see
http://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "http://www.yandell-lab.org/software/maker.html"
version('2.31.10', sha256='d3979af9710d61754a3b53f6682d0e2052c6c3f36be6f2df2286d2587406f07d')
def url_for_version(self, version):
return "file://{0}/maker-{1}.tgz".format(os.getcwd(), version)
variant('mpi', default=True, description='Build with MPI support')
patch('install.patch')
patch('mpi.patch')
patch('MpiChunk.patch')
depends_on('perl', type=('build', 'run'))
depends_on('perl-module-build', type='build')
depends_on('perl-dbi', type=('build', 'run'))
depends_on('perl-dbd-pg', type=('build', 'run'))
depends_on('perl-dbd-sqlite', type=('build', 'run'))
depends_on('perl-forks', type=('build', 'run'))
depends_on('perl-file-which', type=('build', 'run'))
depends_on('perl-perl-unsafe-signals', type=('build', 'run'))
depends_on('perl-bit-vector', type=('build', 'run'))
depends_on('perl-inline-c', type=('build', 'run'))
depends_on('perl-io-all', type=('build', 'run'))
depends_on('perl-io-prompt', type=('build', 'run'))
depends_on('perl-bioperl', type=('build', 'run'))
depends_on('blast-plus')
depends_on('snap-korf')
depends_on('repeatmasker')
depends_on('exonerate')
depends_on('augustus')
depends_on('interproscan@:4.8')
depends_on('mpi', when='+mpi')
def install(self, spec, prefix):
if '+mpi' in spec:
with working_dir('src'):
pattern = r'my \$go = 0;'
repl = 'my $go = 1;'
filter_file(pattern, repl, 'Build.PL', backup=False)
perl = which('perl')
rm = which('rm')
with working_dir('src'):
perl('Build.PL', '--install_base', prefix)
perl('Build', 'install')
install_tree('lib', join_path(prefix, 'perl', 'lib'))
# Remove scripts that do not work. The 'mpi_evaluator' and
# 'mpi_iprscan' scripts depend on a custom perl module that is not
# shipped with maker. The 'maker2chado' script depends on setting up a
# CHADO database which is out of scope here.
for package in ('maker2chado', 'maker2jbrowse', 'maker2wap',
'mpi_evaluator', 'mpi_iprscan'):
rm('-f', join_path(prefix.bin, package))
# Remove old IO::Prompt perl module
rm('-r', '-f', join_path(prefix, 'perl', 'lib', 'IO'))
| lgpl-2.1 |
jmakov/ggrc-core | src/ggrc_workflows/migrations/versions/20140804163110_2b1ba26f2123_clean_up_taskgrouptask.py | 5 | 1750 | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Clean up TaskGroupTask
Revision ID: 2b1ba26f2123
Revises: eab2ab6a0fc
Create Date: 2014-08-04 16:31:10.190834
"""
# revision identifiers, used by Alembic.
revision = '2b1ba26f2123'
down_revision = 'eab2ab6a0fc'
from alembic import op
import sqlalchemy as sa
def upgrade():
# `status` was from Stateful mixin, no longer needed
op.drop_column('task_group_tasks', 'status')
# Remove linkage to `tasks` table, but this is weird because the index
# enforcing the (`task_group_id`, `task_id`) uniqueness constraint is
# reused for the `task_group_id` foreign key constraint, so we have to
# remove the `task_group_id` constraint, then the uniqueness, then rebuild
# the `task_group_id` foreign key constraint
op.drop_constraint('fk_task_group_tasks_task_id', table_name='task_group_tasks', type_='foreignkey')
op.drop_constraint('fk_task_group_tasks_task_group_id', table_name='task_group_tasks', type_='foreignkey')
op.drop_constraint('task_group_id', table_name='task_group_tasks', type_='unique')
op.create_foreign_key('fk_task_group_tasks_task_group_id', 'task_group_tasks', 'task_groups', ['task_group_id'], ['id'])
op.drop_column('task_group_tasks', 'task_id')
# Add indexes for other columns
op.create_index('fk_task_group_tasks_contact', 'task_group_tasks', ['contact_id'], unique=False)
# Ignore `Task.slug` uniqueness?
#op.create_unique_constraint('uq_task_group_tasks', 'task_group_tasks', ['slug'])
def downgrade():
pass
| apache-2.0 |
LiaoPan/scikit-learn | examples/linear_model/plot_robust_fit.py | 238 | 2414 | """
Robust linear estimator fitting
===============================
Here a sine function is fit with a polynomial of order 3, for values
close to zero.
Robust fitting is demoed in different situations:
- No measurement errors, only modelling errors (fitting a sine with a
polynomial)
- Measurement errors in X
- Measurement errors in y
The median absolute deviation to non corrupt new data is used to judge
the quality of the prediction.
What we can see that:
- RANSAC is good for strong outliers in the y direction
- TheilSen is good for small outliers, both in direction X and y, but has
a break point above which it performs worst than OLS.
"""
from matplotlib import pyplot as plt
import numpy as np
from sklearn import linear_model, metrics
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import make_pipeline
np.random.seed(42)
X = np.random.normal(size=400)
y = np.sin(X)
# Make sure that it X is 2D
X = X[:, np.newaxis]
X_test = np.random.normal(size=200)
y_test = np.sin(X_test)
X_test = X_test[:, np.newaxis]
y_errors = y.copy()
y_errors[::3] = 3
X_errors = X.copy()
X_errors[::3] = 3
y_errors_large = y.copy()
y_errors_large[::3] = 10
X_errors_large = X.copy()
X_errors_large[::3] = 10
estimators = [('OLS', linear_model.LinearRegression()),
('Theil-Sen', linear_model.TheilSenRegressor(random_state=42)),
('RANSAC', linear_model.RANSACRegressor(random_state=42)), ]
x_plot = np.linspace(X.min(), X.max())
for title, this_X, this_y in [
('Modeling errors only', X, y),
('Corrupt X, small deviants', X_errors, y),
('Corrupt y, small deviants', X, y_errors),
('Corrupt X, large deviants', X_errors_large, y),
('Corrupt y, large deviants', X, y_errors_large)]:
plt.figure(figsize=(5, 4))
plt.plot(this_X[:, 0], this_y, 'k+')
for name, estimator in estimators:
model = make_pipeline(PolynomialFeatures(3), estimator)
model.fit(this_X, this_y)
mse = metrics.mean_squared_error(model.predict(X_test), y_test)
y_plot = model.predict(x_plot[:, np.newaxis])
plt.plot(x_plot, y_plot,
label='%s: error = %.3f' % (name, mse))
plt.legend(loc='best', frameon=False,
title='Error: mean absolute deviation\n to non corrupt data')
plt.xlim(-4, 10.2)
plt.ylim(-2, 10.2)
plt.title(title)
plt.show()
| bsd-3-clause |
Changaco/oh-mainline | vendor/packages/twisted/twisted/internet/test/reactormixins.py | 18 | 8088 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for implementations of L{IReactorTime}.
"""
__metaclass__ = type
import signal
from twisted.internet.defer import TimeoutError
from twisted.trial.unittest import TestCase, SkipTest
from twisted.python.runtime import platformType
from twisted.python.reflect import namedAny
from twisted.python import log
from twisted.python.failure import Failure
# Access private APIs.
if platformType == 'posix':
from twisted.internet import process
else:
process = None
class ReactorBuilder:
"""
L{TestCase} mixin which provides a reactor-creation API. This mixin
defines C{setUp} and C{tearDown}, so mix it in before L{TestCase} or call
its methods from the overridden ones in the subclass.
@cvar skippedReactors: A dict mapping FQPN strings of reactors for
which the tests defined by this class will be skipped to strings
giving the skip message.
@cvar requiredInterfaces: A C{list} of interfaces which the reactor must
provide or these tests will be skipped. The default, C{None}, means
that no interfaces are required.
@ivar reactorFactory: A no-argument callable which returns the reactor to
use for testing.
@ivar originalHandler: The SIGCHLD handler which was installed when setUp
ran and which will be re-installed when tearDown runs.
@ivar _reactors: A list of FQPN strings giving the reactors for which
TestCases will be created.
"""
_reactors = ["twisted.internet.selectreactor.SelectReactor",
"twisted.internet.pollreactor.PollReactor",
"twisted.internet.epollreactor.EPollReactor",
"twisted.internet.glib2reactor.Glib2Reactor",
"twisted.internet.gtk2reactor.Gtk2Reactor",
"twisted.internet.kqreactor.KQueueReactor",
"twisted.internet.win32eventreactor.Win32Reactor",
"twisted.internet.iocpreactor.reactor.IOCPReactor",
"twisted.internet.cfreactor.CFReactor"]
reactorFactory = None
originalHandler = None
requiredInterfaces = None
skippedReactors = {}
def setUp(self):
"""
Clear the SIGCHLD handler, if there is one, to ensure an environment
like the one which exists prior to a call to L{reactor.run}.
"""
if platformType == 'posix':
self.originalHandler = signal.signal(signal.SIGCHLD, signal.SIG_DFL)
def tearDown(self):
"""
Restore the original SIGCHLD handler and reap processes as long as
there seem to be any remaining.
"""
if self.originalHandler is not None:
signal.signal(signal.SIGCHLD, self.originalHandler)
if process is not None:
while process.reapProcessHandlers:
log.msg(
"ReactorBuilder.tearDown reaping some processes %r" % (
process.reapProcessHandlers,))
process.reapAllProcesses()
def unbuildReactor(self, reactor):
"""
Clean up any resources which may have been allocated for the given
reactor by its creation or by a test which used it.
"""
# Chris says:
#
# XXX These explicit calls to clean up the waker (and any other
# internal readers) should become obsolete when bug #3063 is
# fixed. -radix, 2008-02-29. Fortunately it should probably cause an
# error when bug #3063 is fixed, so it should be removed in the same
# branch that fixes it.
#
# -exarkun
reactor._uninstallHandler()
if getattr(reactor, '_internalReaders', None) is not None:
for reader in reactor._internalReaders:
reactor.removeReader(reader)
reader.connectionLost(None)
reactor._internalReaders.clear()
# Here's an extra thing unrelated to wakers but necessary for
# cleaning up after the reactors we make. -exarkun
reactor.disconnectAll()
# It would also be bad if any timed calls left over were allowed to
# run.
calls = reactor.getDelayedCalls()
for c in calls:
c.cancel()
def buildReactor(self):
"""
Create and return a reactor using C{self.reactorFactory}.
"""
try:
from twisted.internet.cfreactor import CFReactor
from twisted.internet import reactor as globalReactor
except ImportError:
pass
else:
if (isinstance(globalReactor, CFReactor)
and self.reactorFactory is CFReactor):
raise SkipTest(
"CFReactor uses APIs which manipulate global state, "
"so it's not safe to run its own reactor-builder tests "
"under itself")
try:
reactor = self.reactorFactory()
except:
# Unfortunately, not all errors which result in a reactor
# being unusable are detectable without actually
# instantiating the reactor. So we catch some more here
# and skip the test if necessary. We also log it to aid
# with debugging, but flush the logged error so the test
# doesn't fail.
log.err(None, "Failed to install reactor")
self.flushLoggedErrors()
raise SkipTest(Failure().getErrorMessage())
else:
if self.requiredInterfaces is not None:
missing = filter(
lambda required: not required.providedBy(reactor),
self.requiredInterfaces)
if missing:
self.unbuildReactor(reactor)
raise SkipTest("%r does not provide %s" % (
reactor, ",".join([repr(x) for x in missing])))
self.addCleanup(self.unbuildReactor, reactor)
return reactor
def runReactor(self, reactor, timeout=None):
"""
Run the reactor for at most the given amount of time.
@param reactor: The reactor to run.
@type timeout: C{int} or C{float}
@param timeout: The maximum amount of time, specified in seconds, to
allow the reactor to run. If the reactor is still running after
this much time has elapsed, it will be stopped and an exception
raised. If C{None}, the default test method timeout imposed by
Trial will be used. This depends on the L{IReactorTime}
implementation of C{reactor} for correct operation.
@raise TimeoutError: If the reactor is still running after C{timeout}
seconds.
"""
if timeout is None:
timeout = self.getTimeout()
timedOut = []
def stop():
timedOut.append(None)
reactor.stop()
reactor.callLater(timeout, stop)
reactor.run()
if timedOut:
raise TimeoutError(
"reactor still running after %s seconds" % (timeout,))
def makeTestCaseClasses(cls):
"""
Create a L{TestCase} subclass which mixes in C{cls} for each known
reactor and return a dict mapping their names to them.
"""
classes = {}
for reactor in cls._reactors:
shortReactorName = reactor.split(".")[-1]
name = (cls.__name__ + "." + shortReactorName).replace(".", "_")
class testcase(cls, TestCase):
__module__ = cls.__module__
if reactor in cls.skippedReactors:
skip = cls.skippedReactors[reactor]
try:
reactorFactory = namedAny(reactor)
except:
skip = Failure().getErrorMessage()
testcase.__name__ = name
classes[testcase.__name__] = testcase
return classes
makeTestCaseClasses = classmethod(makeTestCaseClasses)
__all__ = ['ReactorBuilder']
| agpl-3.0 |
GinnyN/Team-Fortress-RPG-Generators | tests/regressiontests/admin_filters/tests.py | 9 | 33641 | from __future__ import absolute_import
import datetime
from django.contrib.admin import (site, ModelAdmin, SimpleListFilter,
BooleanFieldListFilter)
from django.contrib.admin.views.main import ChangeList
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, RequestFactory
from django.test.utils import override_settings
from django.utils.encoding import force_unicode
from .models import Book, Department, Employee
def select_by(dictlist, key, value):
return [x for x in dictlist if x[key] == value][0]
class DecadeListFilter(SimpleListFilter):
def lookups(self, request, model_admin):
return (
('the 80s', "the 1980's"),
('the 90s', "the 1990's"),
('the 00s', "the 2000's"),
('other', "other decades"),
)
def queryset(self, request, queryset):
decade = self.value()
if decade == 'the 80s':
return queryset.filter(year__gte=1980, year__lte=1989)
if decade == 'the 90s':
return queryset.filter(year__gte=1990, year__lte=1999)
if decade == 'the 00s':
return queryset.filter(year__gte=2000, year__lte=2009)
class DecadeListFilterWithTitleAndParameter(DecadeListFilter):
title = 'publication decade'
parameter_name = 'publication-decade'
class DecadeListFilterWithoutTitle(DecadeListFilter):
parameter_name = 'publication-decade'
class DecadeListFilterWithoutParameter(DecadeListFilter):
title = 'publication decade'
class DecadeListFilterWithNoneReturningLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
pass
class DecadeListFilterWithFailingQueryset(DecadeListFilterWithTitleAndParameter):
def queryset(self, request, queryset):
raise 1/0
class DecadeListFilterWithQuerysetBasedLookups(DecadeListFilterWithTitleAndParameter):
def lookups(self, request, model_admin):
qs = model_admin.queryset(request)
if qs.filter(year__gte=1980, year__lte=1989).exists():
yield ('the 80s', "the 1980's")
if qs.filter(year__gte=1990, year__lte=1999).exists():
yield ('the 90s', "the 1990's")
if qs.filter(year__gte=2000, year__lte=2009).exists():
yield ('the 00s', "the 2000's")
class DecadeListFilterParameterEndsWith__In(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__in' # Ends with '__in"
class DecadeListFilterParameterEndsWith__Isnull(DecadeListFilter):
title = 'publication decade'
parameter_name = 'decade__isnull' # Ends with '__isnull"
class CustomUserAdmin(UserAdmin):
list_filter = ('books_authored', 'books_contributed')
class BookAdmin(ModelAdmin):
list_filter = ('year', 'author', 'contributors', 'is_best_seller', 'date_registered', 'no')
ordering = ('-id',)
class BookAdminWithTupleBooleanFilter(BookAdmin):
list_filter = ('year', 'author', 'contributors', ('is_best_seller', BooleanFieldListFilter), 'date_registered', 'no')
class DecadeFilterBookAdmin(ModelAdmin):
list_filter = ('author', DecadeListFilterWithTitleAndParameter)
ordering = ('-id',)
class DecadeFilterBookAdminWithoutTitle(ModelAdmin):
list_filter = (DecadeListFilterWithoutTitle,)
class DecadeFilterBookAdminWithoutParameter(ModelAdmin):
list_filter = (DecadeListFilterWithoutParameter,)
class DecadeFilterBookAdminWithNoneReturningLookups(ModelAdmin):
list_filter = (DecadeListFilterWithNoneReturningLookups,)
class DecadeFilterBookAdminWithFailingQueryset(ModelAdmin):
list_filter = (DecadeListFilterWithFailingQueryset,)
class DecadeFilterBookAdminWithQuerysetBasedLookups(ModelAdmin):
list_filter = (DecadeListFilterWithQuerysetBasedLookups,)
class DecadeFilterBookAdminParameterEndsWith__In(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__In,)
class DecadeFilterBookAdminParameterEndsWith__Isnull(ModelAdmin):
list_filter = (DecadeListFilterParameterEndsWith__Isnull,)
class EmployeeAdmin(ModelAdmin):
list_display = ['name', 'department']
list_filter = ['department']
class ListFiltersTests(TestCase):
def setUp(self):
self.today = datetime.date.today()
self.tomorrow = self.today + datetime.timedelta(days=1)
self.one_week_ago = self.today - datetime.timedelta(days=7)
self.request_factory = RequestFactory()
# Users
self.alfred = User.objects.create_user('alfred', 'alfred@example.com')
self.bob = User.objects.create_user('bob', 'bob@example.com')
self.lisa = User.objects.create_user('lisa', 'lisa@example.com')
# Books
self.djangonaut_book = Book.objects.create(title='Djangonaut: an art of living', year=2009, author=self.alfred, is_best_seller=True, date_registered=self.today)
self.bio_book = Book.objects.create(title='Django: a biography', year=1999, author=self.alfred, is_best_seller=False, no=207)
self.django_book = Book.objects.create(title='The Django Book', year=None, author=self.bob, is_best_seller=None, date_registered=self.today, no=103)
self.gipsy_book = Book.objects.create(title='Gipsy guitar for dummies', year=2002, is_best_seller=True, date_registered=self.one_week_ago)
self.gipsy_book.contributors = [self.bob, self.lisa]
self.gipsy_book.save()
def get_changelist(self, request, model, modeladmin):
return ChangeList(request, model, modeladmin.list_display, modeladmin.list_display_links,
modeladmin.list_filter, modeladmin.date_hierarchy, modeladmin.search_fields,
modeladmin.list_select_related, modeladmin.list_per_page, modeladmin.list_max_show_all, modeladmin.list_editable, modeladmin)
def test_datefieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'date_registered__gte': self.today,
'date_registered__lt': self.tomorrow})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_unicode(filterspec.title), u'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Today")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (self.today, self.tomorrow))
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(day=1),
'date_registered__lt': self.tomorrow})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
if (self.today.year, self.today.month) == (self.one_week_ago.year, self.one_week_ago.month):
# In case one week ago is in the same month.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_unicode(filterspec.title), u'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This month")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (self.today.replace(day=1), self.tomorrow))
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(month=1, day=1),
'date_registered__lt': self.tomorrow})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
if self.today.year == self.one_week_ago.year:
# In case one week ago is in the same year.
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
else:
self.assertEqual(list(queryset), [self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_unicode(filterspec.title), u'date registered')
choice = select_by(filterspec.choices(changelist), "display", "This year")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (self.today.replace(month=1, day=1), self.tomorrow))
request = self.request_factory.get('/', {'date_registered__gte': str(self.one_week_ago),
'date_registered__lt': str(self.tomorrow)})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.django_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][4]
self.assertEqual(force_unicode(filterspec.title), u'date registered')
choice = select_by(filterspec.choices(changelist), "display", "Past 7 days")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
'&date_registered__lt=%s'
% (str(self.one_week_ago), str(self.tomorrow)))
@override_settings(USE_TZ=True)
def test_datefieldlistfilter_with_time_zone_support(self):
# Regression for #17830
self.test_datefieldlistfilter()
def test_allvaluesfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'year__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_unicode(filterspec.title), u'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?year__isnull=True')
request = self.request_factory.get('/', {'year': '2002'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_unicode(filterspec.title), u'year')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?year=2002')
def test_relatedfieldlistfilter_foreignkey(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'author__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.gipsy_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEquals(force_unicode(filterspec.title), u'Verbose Author')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?author__isnull=True')
request = self.request_factory.get('/', {'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEquals(force_unicode(filterspec.title), u'Verbose Author')
# order of choices depends on User model, which has no order
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?author__id__exact=%d' % self.alfred.pk)
def test_relatedfieldlistfilter_manytomany(self):
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'contributors__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.django_book, self.bio_book, self.djangonaut_book])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEquals(force_unicode(filterspec.title), u'Verbose Contributors')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?contributors__isnull=True')
request = self.request_factory.get('/', {'contributors__id__exact': self.bob.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][2]
self.assertEquals(force_unicode(filterspec.title), u'Verbose Contributors')
choice = select_by(filterspec.choices(changelist), "display", "bob")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?contributors__id__exact=%d' % self.bob.pk)
def test_relatedfieldlistfilter_reverse_relationships(self):
modeladmin = CustomUserAdmin(User, site)
# FK relationship -----
request = self.request_factory.get('/', {'books_authored__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.lisa])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_unicode(filterspec.title), u'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_authored__isnull=True')
request = self.request_factory.get('/', {'books_authored__id__exact': self.bio_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_unicode(filterspec.title), u'book')
choice = select_by(filterspec.choices(changelist), "display", self.bio_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_authored__id__exact=%d' % self.bio_book.pk)
# M2M relationship -----
request = self.request_factory.get('/', {'books_contributed__isnull': 'True'})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.alfred])
# Make sure the last choice is None and is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_unicode(filterspec.title), u'book')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[-1]['selected'], True)
self.assertEqual(choices[-1]['query_string'], '?books_contributed__isnull=True')
request = self.request_factory.get('/', {'books_contributed__id__exact': self.django_book.pk})
changelist = self.get_changelist(request, User, modeladmin)
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_unicode(filterspec.title), u'book')
choice = select_by(filterspec.choices(changelist), "display", self.django_book.title)
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?books_contributed__id__exact=%d' % self.django_book.pk)
def test_booleanfieldlistfilter(self):
modeladmin = BookAdmin(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def test_booleanfieldlistfilter_tuple(self):
modeladmin = BookAdminWithTupleBooleanFilter(Book, site)
self.verify_booleanfieldlistfilter(modeladmin)
def verify_booleanfieldlistfilter(self, modeladmin):
request = self.request_factory.get('/')
changelist = self.get_changelist(request, Book, modeladmin)
request = self.request_factory.get('/', {'is_best_seller__exact': 0})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_unicode(filterspec.title), u'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "No")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=0')
request = self.request_factory.get('/', {'is_best_seller__exact': 1})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_unicode(filterspec.title), u'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Yes")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__exact=1')
request = self.request_factory.get('/', {'is_best_seller__isnull': 'True'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.django_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][3]
self.assertEqual(force_unicode(filterspec.title), u'is best seller')
choice = select_by(filterspec.choices(changelist), "display", "Unknown")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?is_best_seller__isnull=True')
def test_simplelistfilter(self):
modeladmin = DecadeFilterBookAdmin(Book, site)
# Make sure that the first option is 'All' ---------------------------
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), list(Book.objects.all().order_by('-id')))
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], u'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
# Look for books in the 1980s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 80s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[1]['display'], u'the 1980\'s')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+80s')
# Look for books in the 1990s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], u'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+90s')
# Look for books in the 2000s ----------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.gipsy_book, self.djangonaut_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], u'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(choices[3]['query_string'], '?publication-decade=the+00s')
# Combine multiple filters -------------------------------------------
request = self.request_factory.get('/', {'publication-decade': 'the 00s', 'author__id__exact': self.alfred.pk})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.djangonaut_book])
# Make sure the correct choices are selected
filterspec = changelist.get_filters(request)[0][1]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[3]['display'], u'the 2000\'s')
self.assertEqual(choices[3]['selected'], True)
self.assertEqual(choices[3]['query_string'], '?publication-decade=the+00s&author__id__exact=%s' % self.alfred.pk)
filterspec = changelist.get_filters(request)[0][0]
self.assertEquals(force_unicode(filterspec.title), u'Verbose Author')
choice = select_by(filterspec.choices(changelist), "display", "alfred")
self.assertEqual(choice['selected'], True)
self.assertEqual(choice['query_string'], '?publication-decade=the+00s&author__id__exact=%s' % self.alfred.pk)
def test_listfilter_without_title(self):
"""
Any filter must define a title.
"""
modeladmin = DecadeFilterBookAdminWithoutTitle(Book, site)
request = self.request_factory.get('/', {})
self.assertRaisesRegexp(ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutTitle' does not specify a 'title'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_without_parameter(self):
"""
Any SimpleListFilter must define a parameter_name.
"""
modeladmin = DecadeFilterBookAdminWithoutParameter(Book, site)
request = self.request_factory.get('/', {})
self.assertRaisesRegexp(ImproperlyConfigured,
"The list filter 'DecadeListFilterWithoutParameter' does not specify a 'parameter_name'.",
self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_none_returning_lookups(self):
"""
A SimpleListFilter lookups method can return None but disables the
filter completely.
"""
modeladmin = DecadeFilterBookAdminWithNoneReturningLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0]
self.assertEqual(len(filterspec), 0)
def test_filter_with_failing_queryset(self):
"""
Ensure that when a filter's queryset method fails, it fails loudly and
the corresponding exception doesn't get swallowed.
Refs #17828.
"""
modeladmin = DecadeFilterBookAdminWithFailingQueryset(Book, site)
request = self.request_factory.get('/', {})
self.assertRaises(ZeroDivisionError, self.get_changelist, request, Book, modeladmin)
def test_simplelistfilter_with_queryset_based_lookups(self):
modeladmin = DecadeFilterBookAdminWithQuerysetBasedLookups(Book, site)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Book, modeladmin)
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(len(choices), 3)
self.assertEqual(choices[0]['display'], u'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], u'the 1990\'s')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?publication-decade=the+90s')
self.assertEqual(choices[2]['display'], u'the 2000\'s')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?publication-decade=the+00s')
def test_two_characters_long_field(self):
"""
Ensure that list_filter works with two-characters long field names.
Refs #16080.
"""
modeladmin = BookAdmin(Book, site)
request = self.request_factory.get('/', {'no': '207'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.bio_book])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_unicode(filterspec.title), u'number')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?no=207')
def test_parameter_ends_with__in__or__isnull(self):
"""
Ensure that a SimpleListFilter's parameter name is not mistaken for a
model field if it ends with '__isnull' or '__in'.
Refs #17091.
"""
# When it ends with '__in' -----------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__In(Book, site)
request = self.request_factory.get('/', {'decade__in': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], u'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__in=the+90s')
# When it ends with '__isnull' ---------------------------------------
modeladmin = DecadeFilterBookAdminParameterEndsWith__Isnull(Book, site)
request = self.request_factory.get('/', {'decade__isnull': 'the 90s'})
changelist = self.get_changelist(request, Book, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [self.bio_book])
# Make sure the correct choice is selected
filterspec = changelist.get_filters(request)[0][0]
self.assertEqual(force_unicode(filterspec.title), u'publication decade')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[2]['display'], u'the 1990\'s')
self.assertEqual(choices[2]['selected'], True)
self.assertEqual(choices[2]['query_string'], '?decade__isnull=the+90s')
def test_fk_with_to_field(self):
"""
Ensure that a filter on a FK respects the FK's to_field attribute.
Refs #17972.
"""
modeladmin = EmployeeAdmin(Employee, site)
dev = Department.objects.create(code='DEV', description='Development')
design = Department.objects.create(code='DSN', description='Design')
john = Employee.objects.create(name='John Blue', department=dev)
jack = Employee.objects.create(name='Jack Red', department=design)
request = self.request_factory.get('/', {})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [jack, john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_unicode(filterspec.title), u'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], u'All')
self.assertEqual(choices[0]['selected'], True)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], u'Development')
self.assertEqual(choices[1]['selected'], False)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], u'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
# Filter by Department=='Development' --------------------------------
request = self.request_factory.get('/', {'department__code__exact': 'DEV'})
changelist = self.get_changelist(request, Employee, modeladmin)
# Make sure the correct queryset is returned
queryset = changelist.get_query_set(request)
self.assertEqual(list(queryset), [john])
filterspec = changelist.get_filters(request)[0][-1]
self.assertEqual(force_unicode(filterspec.title), u'department')
choices = list(filterspec.choices(changelist))
self.assertEqual(choices[0]['display'], u'All')
self.assertEqual(choices[0]['selected'], False)
self.assertEqual(choices[0]['query_string'], '?')
self.assertEqual(choices[1]['display'], u'Development')
self.assertEqual(choices[1]['selected'], True)
self.assertEqual(choices[1]['query_string'], '?department__code__exact=DEV')
self.assertEqual(choices[2]['display'], u'Design')
self.assertEqual(choices[2]['selected'], False)
self.assertEqual(choices[2]['query_string'], '?department__code__exact=DSN')
| bsd-3-clause |
FederatedAI/FATE | examples/pipeline/hetero_nn/pipeline-hetero-nn-train-binary-floating_point_precision.py | 1 | 4822 | #
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from pipeline.runtime.entity import JobParameters
from tensorflow.keras import initializers
from tensorflow.keras import optimizers
from tensorflow.keras.layers import Dense
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataIO
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data
from pipeline.interface import Model
from pipeline.utils.tools import load_job_config
def main(config="../../config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
backend = config.backend
work_mode = config.work_mode
guest_train_data = {"name": "breast_hetero_guest", "namespace": f"experiment{namespace}"}
host_train_data = {"name": "breast_hetero_host", "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
dataio_0 = DataIO(name="dataio_0")
dataio_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
dataio_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=100,
interactive_layer_lr=0.15, batch_size=-1, early_stop="diff",
floating_point_precision=23)
guest_nn_0 = hetero_nn_0.get_party_instance(role='guest', party_id=guest)
guest_nn_0.add_bottom_model(Dense(units=3, input_shape=(10,), activation="relu",
kernel_initializer=initializers.Constant(value=1)))
guest_nn_0.set_interactve_layer(Dense(units=2, input_shape=(2,),
kernel_initializer=initializers.Constant(value=1)))
guest_nn_0.add_top_model(Dense(units=1, input_shape=(2,), activation="sigmoid",
kernel_initializer=initializers.Constant(value=1)))
host_nn_0 = hetero_nn_0.get_party_instance(role='host', party_id=host)
host_nn_0.add_bottom_model(Dense(units=3, input_shape=(20,), activation="relu",
kernel_initializer=initializers.Constant(value=1)))
host_nn_0.set_interactve_layer(Dense(units=2, input_shape=(2,),
kernel_initializer=initializers.Constant(value=1)))
hetero_nn_0.compile(optimizer=optimizers.SGD(lr=0.15), metrics=["AUC"], loss="binary_crossentropy")
hetero_nn_1 = HeteroNN(name="hetero_nn_1")
evaluation_0 = Evaluation(name="evaluation_0")
pipeline.add_component(reader_0)
pipeline.add_component(dataio_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=dataio_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(hetero_nn_1, data=Data(test_data=intersection_0.output.data),
model=Model(model=hetero_nn_0.output.model))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
job_parameters = JobParameters(backend=backend, work_mode=work_mode)
pipeline.fit(job_parameters)
print(hetero_nn_0.get_config(roles={"guest": [guest],
"host": [host]}))
print(pipeline.get_component("hetero_nn_0").get_summary())
if __name__ == "__main__":
parser = argparse.ArgumentParser("PIPELINE DEMO")
parser.add_argument("-config", type=str,
help="config file")
args = parser.parse_args()
if args.config is not None:
main(args.config)
else:
main()
| apache-2.0 |
amenonsen/ansible | lib/ansible/modules/remote_management/oneview/oneview_fc_network.py | 151 | 3885 | #!/usr/bin/python
# Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: oneview_fc_network
short_description: Manage OneView Fibre Channel Network resources.
description:
- Provides an interface to manage Fibre Channel Network resources. Can create, update, and delete.
version_added: "2.4"
requirements:
- "hpOneView >= 4.0.0"
author: "Felipe Bulsoni (@fgbulsoni)"
options:
state:
description:
- Indicates the desired state for the Fibre Channel Network resource.
C(present) will ensure data properties are compliant with OneView.
C(absent) will remove the resource from OneView, if it exists.
choices: ['present', 'absent']
data:
description:
- List with the Fibre Channel Network properties.
required: true
extends_documentation_fragment:
- oneview
- oneview.validateetag
'''
EXAMPLES = '''
- name: Ensure that the Fibre Channel Network is present using the default configuration
oneview_fc_network:
config: "{{ config_file_path }}"
state: present
data:
name: 'New FC Network'
- name: Ensure that the Fibre Channel Network is present with fabricType 'DirectAttach'
oneview_fc_network:
config: "{{ config_file_path }}"
state: present
data:
name: 'New FC Network'
fabricType: 'DirectAttach'
- name: Ensure that the Fibre Channel Network is present and is inserted in the desired scopes
oneview_fc_network:
config: "{{ config_file_path }}"
state: present
data:
name: 'New FC Network'
scopeUris:
- '/rest/scopes/00SC123456'
- '/rest/scopes/01SC123456'
- name: Ensure that the Fibre Channel Network is absent
oneview_fc_network:
config: "{{ config_file_path }}"
state: absent
data:
name: 'New FC Network'
'''
RETURN = '''
fc_network:
description: Has the facts about the managed OneView FC Network.
returned: On state 'present'. Can be null.
type: dict
'''
from ansible.module_utils.oneview import OneViewModuleBase
class FcNetworkModule(OneViewModuleBase):
MSG_CREATED = 'FC Network created successfully.'
MSG_UPDATED = 'FC Network updated successfully.'
MSG_DELETED = 'FC Network deleted successfully.'
MSG_ALREADY_PRESENT = 'FC Network is already present.'
MSG_ALREADY_ABSENT = 'FC Network is already absent.'
RESOURCE_FACT_NAME = 'fc_network'
def __init__(self):
additional_arg_spec = dict(data=dict(required=True, type='dict'),
state=dict(
required=True,
choices=['present', 'absent']))
super(FcNetworkModule, self).__init__(additional_arg_spec=additional_arg_spec,
validate_etag_support=True)
self.resource_client = self.oneview_client.fc_networks
def execute_module(self):
resource = self.get_by_name(self.data['name'])
if self.state == 'present':
return self._present(resource)
else:
return self.resource_absent(resource)
def _present(self, resource):
scope_uris = self.data.pop('scopeUris', None)
result = self.resource_present(resource, self.RESOURCE_FACT_NAME)
if scope_uris is not None:
result = self.resource_scopes_set(result, 'fc_network', scope_uris)
return result
def main():
FcNetworkModule().run()
if __name__ == '__main__':
main()
| gpl-3.0 |
bgris/ODL_bgris | lib/python3.5/reprlib.py | 46 | 5336 | """Redo the builtin repr() (representation) but with limits on most sizes."""
__all__ = ["Repr", "repr", "recursive_repr"]
import builtins
from itertools import islice
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
# Can't use functools.wraps() here because of bootstrap issues
wrapper.__module__ = getattr(user_function, '__module__')
wrapper.__doc__ = getattr(user_function, '__doc__')
wrapper.__name__ = getattr(user_function, '__name__')
wrapper.__qualname__ = getattr(user_function, '__qualname__')
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
return wrapper
return decorating_function
class Repr:
def __init__(self):
self.maxlevel = 6
self.maxtuple = 6
self.maxlist = 6
self.maxarray = 5
self.maxdict = 4
self.maxset = 6
self.maxfrozenset = 6
self.maxdeque = 6
self.maxstring = 30
self.maxlong = 40
self.maxother = 30
def repr(self, x):
return self.repr1(x, self.maxlevel)
def repr1(self, x, level):
typename = type(x).__name__
if ' ' in typename:
parts = typename.split()
typename = '_'.join(parts)
if hasattr(self, 'repr_' + typename):
return getattr(self, 'repr_' + typename)(x, level)
else:
return self.repr_instance(x, level)
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
n = len(x)
if level <= 0 and n:
s = '...'
else:
newlevel = level - 1
repr1 = self.repr1
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
if n > maxiter: pieces.append('...')
s = ', '.join(pieces)
if n == 1 and trail: right = trail + right
return '%s%s%s' % (left, s, right)
def repr_tuple(self, x, level):
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
def repr_list(self, x, level):
return self._repr_iterable(x, level, '[', ']', self.maxlist)
def repr_array(self, x, level):
if not x:
return "array('%s')" % x.typecode
header = "array('%s', [" % x.typecode
return self._repr_iterable(x, level, header, '])', self.maxarray)
def repr_set(self, x, level):
if not x:
return 'set()'
x = _possibly_sorted(x)
return self._repr_iterable(x, level, '{', '}', self.maxset)
def repr_frozenset(self, x, level):
if not x:
return 'frozenset()'
x = _possibly_sorted(x)
return self._repr_iterable(x, level, 'frozenset({', '})',
self.maxfrozenset)
def repr_deque(self, x, level):
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
def repr_dict(self, x, level):
n = len(x)
if n == 0: return '{}'
if level <= 0: return '{...}'
newlevel = level - 1
repr1 = self.repr1
pieces = []
for key in islice(_possibly_sorted(x), self.maxdict):
keyrepr = repr1(key, newlevel)
valrepr = repr1(x[key], newlevel)
pieces.append('%s: %s' % (keyrepr, valrepr))
if n > self.maxdict: pieces.append('...')
s = ', '.join(pieces)
return '{%s}' % (s,)
def repr_str(self, x, level):
s = builtins.repr(x[:self.maxstring])
if len(s) > self.maxstring:
i = max(0, (self.maxstring-3)//2)
j = max(0, self.maxstring-3-i)
s = builtins.repr(x[:i] + x[len(x)-j:])
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_int(self, x, level):
s = builtins.repr(x) # XXX Hope this isn't too slow...
if len(s) > self.maxlong:
i = max(0, (self.maxlong-3)//2)
j = max(0, self.maxlong-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_instance(self, x, level):
try:
s = builtins.repr(x)
# Bugs in x.__repr__() can cause arbitrary
# exceptions -- then make up something
except Exception:
return '<%s instance at %#x>' % (x.__class__.__name__, id(x))
if len(s) > self.maxother:
i = max(0, (self.maxother-3)//2)
j = max(0, self.maxother-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def _possibly_sorted(x):
# Since not all sequences of items can be sorted and comparison
# functions may raise arbitrary exceptions, return an unsorted
# sequence in that case.
try:
return sorted(x)
except Exception:
return list(x)
aRepr = Repr()
repr = aRepr.repr
| gpl-3.0 |
jlillest/geodjango-tigerleaflet-example | geodjango_tigerleaflet_example/contrib/sites/migrations/0001_initial.py | 378 | 1134 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.sites.models
from django.contrib.sites.models import _simple_domain_name_validator
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name='Site',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('domain', models.CharField(
max_length=100, verbose_name='domain name', validators=[_simple_domain_name_validator]
)),
('name', models.CharField(max_length=50, verbose_name='display name')),
],
options={
'ordering': ('domain',),
'db_table': 'django_site',
'verbose_name': 'site',
'verbose_name_plural': 'sites',
},
bases=(models.Model,),
managers=[
('objects', django.contrib.sites.models.SiteManager()),
],
),
]
| mit |
broferek/ansible | lib/ansible/module_utils/dimensiondata.py | 47 | 10978 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Dimension Data
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# - Aimon Bustardo <aimon.bustardo@dimensiondata.com>
# - Mark Maglana <mmaglana@gmail.com>
# - Adam Friedman <tintoy@tintoy.io>
#
# Common functionality to be used by versious module components
import os
import re
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.six.moves import configparser
from os.path import expanduser
from uuid import UUID
LIBCLOUD_IMP_ERR = None
try:
from libcloud.common.dimensiondata import API_ENDPOINTS, DimensionDataAPIException, DimensionDataStatus
from libcloud.compute.base import Node, NodeLocation
from libcloud.compute.providers import get_driver
from libcloud.compute.types import Provider
import libcloud.security
HAS_LIBCLOUD = True
except ImportError:
LIBCLOUD_IMP_ERR = traceback.format_exc()
HAS_LIBCLOUD = False
# MCP 2.x version patten for location (datacenter) names.
#
# Note that this is not a totally reliable way of determining MCP version.
# Unfortunately, libcloud's NodeLocation currently makes no provision for extended properties.
# At some point we may therefore want to either enhance libcloud or enable overriding mcp_version
# by specifying it in the module parameters.
MCP_2_LOCATION_NAME_PATTERN = re.compile(r".*MCP\s?2.*")
class DimensionDataModule(object):
"""
The base class containing common functionality used by Dimension Data modules for Ansible.
"""
def __init__(self, module):
"""
Create a new DimensionDataModule.
Will fail if Apache libcloud is not present.
:param module: The underlying Ansible module.
:type module: AnsibleModule
"""
self.module = module
if not HAS_LIBCLOUD:
self.module.fail_json(msg=missing_required_lib('libcloud'), exception=LIBCLOUD_IMP_ERR)
# Credentials are common to all Dimension Data modules.
credentials = self.get_credentials()
self.user_id = credentials['user_id']
self.key = credentials['key']
# Region and location are common to all Dimension Data modules.
region = self.module.params['region']
self.region = 'dd-{0}'.format(region)
self.location = self.module.params['location']
libcloud.security.VERIFY_SSL_CERT = self.module.params['validate_certs']
self.driver = get_driver(Provider.DIMENSIONDATA)(
self.user_id,
self.key,
region=self.region
)
# Determine the MCP API version (this depends on the target datacenter).
self.mcp_version = self.get_mcp_version(self.location)
# Optional "wait-for-completion" arguments
if 'wait' in self.module.params:
self.wait = self.module.params['wait']
self.wait_time = self.module.params['wait_time']
self.wait_poll_interval = self.module.params['wait_poll_interval']
else:
self.wait = False
self.wait_time = 0
self.wait_poll_interval = 0
def get_credentials(self):
"""
Get user_id and key from module configuration, environment, or dotfile.
Order of priority is module, environment, dotfile.
To set in environment:
export MCP_USER='myusername'
export MCP_PASSWORD='mypassword'
To set in dot file place a file at ~/.dimensiondata with
the following contents:
[dimensiondatacloud]
MCP_USER: myusername
MCP_PASSWORD: mypassword
"""
if not HAS_LIBCLOUD:
self.module.fail_json(msg='libcloud is required for this module.')
user_id = None
key = None
# First, try the module configuration
if 'mcp_user' in self.module.params:
if 'mcp_password' not in self.module.params:
self.module.fail_json(
msg='"mcp_user" parameter was specified, but not "mcp_password" (either both must be specified, or neither).'
)
user_id = self.module.params['mcp_user']
key = self.module.params['mcp_password']
# Fall back to environment
if not user_id or not key:
user_id = os.environ.get('MCP_USER', None)
key = os.environ.get('MCP_PASSWORD', None)
# Finally, try dotfile (~/.dimensiondata)
if not user_id or not key:
home = expanduser('~')
config = configparser.RawConfigParser()
config.read("%s/.dimensiondata" % home)
try:
user_id = config.get("dimensiondatacloud", "MCP_USER")
key = config.get("dimensiondatacloud", "MCP_PASSWORD")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
# One or more credentials not found. Function can't recover from this
# so it has to raise an error instead of fail silently.
if not user_id:
raise MissingCredentialsError("Dimension Data user id not found")
elif not key:
raise MissingCredentialsError("Dimension Data key not found")
# Both found, return data
return dict(user_id=user_id, key=key)
def get_mcp_version(self, location):
"""
Get the MCP version for the specified location.
"""
location = self.driver.ex_get_location_by_id(location)
if MCP_2_LOCATION_NAME_PATTERN.match(location.name):
return '2.0'
return '1.0'
def get_network_domain(self, locator, location):
"""
Retrieve a network domain by its name or Id.
"""
if is_uuid(locator):
network_domain = self.driver.ex_get_network_domain(locator)
else:
matching_network_domains = [
network_domain for network_domain in self.driver.ex_list_network_domains(location=location)
if network_domain.name == locator
]
if matching_network_domains:
network_domain = matching_network_domains[0]
else:
network_domain = None
if network_domain:
return network_domain
raise UnknownNetworkError("Network '%s' could not be found" % locator)
def get_vlan(self, locator, location, network_domain):
"""
Get a VLAN object by its name or id
"""
if is_uuid(locator):
vlan = self.driver.ex_get_vlan(locator)
else:
matching_vlans = [
vlan for vlan in self.driver.ex_list_vlans(location, network_domain)
if vlan.name == locator
]
if matching_vlans:
vlan = matching_vlans[0]
else:
vlan = None
if vlan:
return vlan
raise UnknownVLANError("VLAN '%s' could not be found" % locator)
@staticmethod
def argument_spec(**additional_argument_spec):
"""
Build an argument specification for a Dimension Data module.
:param additional_argument_spec: An optional dictionary representing the specification for additional module arguments (if any).
:return: A dict containing the argument specification.
"""
spec = dict(
region=dict(type='str', default='na'),
mcp_user=dict(type='str', required=False),
mcp_password=dict(type='str', required=False, no_log=True),
location=dict(type='str', required=True),
validate_certs=dict(type='bool', required=False, default=True)
)
if additional_argument_spec:
spec.update(additional_argument_spec)
return spec
@staticmethod
def argument_spec_with_wait(**additional_argument_spec):
"""
Build an argument specification for a Dimension Data module that includes "wait for completion" arguments.
:param additional_argument_spec: An optional dictionary representing the specification for additional module arguments (if any).
:return: A dict containing the argument specification.
"""
spec = DimensionDataModule.argument_spec(
wait=dict(type='bool', required=False, default=False),
wait_time=dict(type='int', required=False, default=600),
wait_poll_interval=dict(type='int', required=False, default=2)
)
if additional_argument_spec:
spec.update(additional_argument_spec)
return spec
@staticmethod
def required_together(*additional_required_together):
"""
Get the basic argument specification for Dimension Data modules indicating which arguments are must be specified together.
:param additional_required_together: An optional list representing the specification for additional module arguments that must be specified together.
:return: An array containing the argument specifications.
"""
required_together = [
['mcp_user', 'mcp_password']
]
if additional_required_together:
required_together.extend(additional_required_together)
return required_together
class LibcloudNotFound(Exception):
"""
Exception raised when Apache libcloud cannot be found.
"""
pass
class MissingCredentialsError(Exception):
"""
Exception raised when credentials for Dimension Data CloudControl cannot be found.
"""
pass
class UnknownNetworkError(Exception):
"""
Exception raised when a network or network domain cannot be found.
"""
pass
class UnknownVLANError(Exception):
"""
Exception raised when a VLAN cannot be found.
"""
pass
def get_dd_regions():
"""
Get the list of available regions whose vendor is Dimension Data.
"""
# Get endpoints
all_regions = API_ENDPOINTS.keys()
# Only Dimension Data endpoints (no prefix)
regions = [region[3:] for region in all_regions if region.startswith('dd-')]
return regions
def is_uuid(u, version=4):
"""
Test if valid v4 UUID
"""
try:
uuid_obj = UUID(u, version=version)
return str(uuid_obj) == u
except ValueError:
return False
| gpl-3.0 |
sgzsh269/django | django/utils/inspect.py | 323 | 4195 | from __future__ import absolute_import
import inspect
from django.utils import six
def getargspec(func):
if six.PY2:
return inspect.getargspec(func)
sig = inspect.signature(func)
args = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
varargs = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_POSITIONAL
]
varargs = varargs[0] if varargs else None
varkw = [
p.name for p in sig.parameters.values()
if p.kind == inspect.Parameter.VAR_KEYWORD
]
varkw = varkw[0] if varkw else None
defaults = [
p.default for p in sig.parameters.values()
if p.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD and p.default is not p.empty
] or None
return args, varargs, varkw, defaults
def get_func_args(func):
if six.PY2:
argspec = inspect.getargspec(func)
return argspec.args[1:] # ignore 'self'
sig = inspect.signature(func)
return [
arg_name for arg_name, param in sig.parameters.items()
if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD
]
def get_func_full_args(func):
"""
Return a list of (argument name, default value) tuples. If the argument
does not have a default value, omit it in the tuple. Arguments such as
*args and **kwargs are also included.
"""
if six.PY2:
argspec = inspect.getargspec(func)
args = argspec.args[1:] # ignore 'self'
defaults = argspec.defaults or []
# Split args into two lists depending on whether they have default value
no_default = args[:len(args) - len(defaults)]
with_default = args[len(args) - len(defaults):]
# Join the two lists and combine it with default values
args = [(arg,) for arg in no_default] + zip(with_default, defaults)
# Add possible *args and **kwargs and prepend them with '*' or '**'
varargs = [('*' + argspec.varargs,)] if argspec.varargs else []
kwargs = [('**' + argspec.keywords,)] if argspec.keywords else []
return args + varargs + kwargs
sig = inspect.signature(func)
args = []
for arg_name, param in sig.parameters.items():
name = arg_name
# Ignore 'self'
if name == 'self':
continue
if param.kind == inspect.Parameter.VAR_POSITIONAL:
name = '*' + name
elif param.kind == inspect.Parameter.VAR_KEYWORD:
name = '**' + name
if param.default != inspect.Parameter.empty:
args.append((name, param.default))
else:
args.append((name,))
return args
def func_accepts_kwargs(func):
if six.PY2:
# Not all callables are inspectable with getargspec, so we'll
# try a couple different ways but in the end fall back on assuming
# it is -- we don't want to prevent registration of valid but weird
# callables.
try:
argspec = inspect.getargspec(func)
except TypeError:
try:
argspec = inspect.getargspec(func.__call__)
except (TypeError, AttributeError):
argspec = None
return not argspec or argspec[2] is not None
return any(
p for p in inspect.signature(func).parameters.values()
if p.kind == p.VAR_KEYWORD
)
def func_accepts_var_args(func):
"""
Return True if function 'func' accepts positional arguments *args.
"""
if six.PY2:
return inspect.getargspec(func)[1] is not None
return any(
p for p in inspect.signature(func).parameters.values()
if p.kind == p.VAR_POSITIONAL
)
def func_has_no_args(func):
args = inspect.getargspec(func)[0] if six.PY2 else [
p for p in inspect.signature(func).parameters.values()
if p.kind == p.POSITIONAL_OR_KEYWORD
]
return len(args) == 1
def func_supports_parameter(func, parameter):
if six.PY3:
return parameter in inspect.signature(func).parameters
else:
args, varargs, varkw, defaults = inspect.getargspec(func)
return parameter in args
| bsd-3-clause |
jensck/fluidity | fluidity/defs.py | 1 | 6493 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# Copyright (C) 2010 - Jens Knutson <jens.knutson at gmail dot com>
# This software is licensed under the GNU General Public License
# version 3 or later (see the file COPYING).
"""Misc. app-wide constants."""
from __future__ import absolute_import, division, print_function
__author__ = 'Jens Knutson'
import os
import string
import sys
import uuid
import pathlib2
import yaml
from xdg import BaseDirectory
# Py3K compat.
if not hasattr(string, 'ascii_lowercase'):
# yep, we're monkey patching. deal with it.
string.ascii_lowercase = string.lowercase
def _find_app_data_path():
"""Determine (hackishly) if we're running from a proper install or not."""
data_path = ""
exec_path = os.path.dirname(os.path.realpath(sys.modules[__name__].__file__))
uninstalled_data_path = os.path.join(os.path.dirname(exec_path), 'data')
if os.path.exists(uninstalled_data_path):
data_path = uninstalled_data_path
else:
data_path = os.path.join(sys.prefix, "share", "fluidity")
return data_path
def _get_read_review_path():
# This is rather primitive, but I refuse to do more than this
# for now - it'll work fine in 90%+ of cases.
path = ""
dirs_file = os.path.join(os.getenv('HOME'), BaseDirectory.xdg_config_dirs[0],
'user-dirs.dirs')
with open(dirs_file, 'r') as dirs:
for line in dirs:
if "XDG_DOCUMENTS_DIR" in line:
path = line
path = path.strip()
path = path.replace("$HOME", os.getenv('HOME'))
path = path.replace('"', '')
path = path.split('=')[1]
path = os.path.join(path, "Read-Review")
return path
def _get_yaml_loader_dumper():
# WTF? How can a Fedora install NOT have the YAML C dumper?!
# I can't wait for the day when Android is usable as a full workstation.
# Desktop Linux: the only things that suck even harder than this are
# the alternatives.
try:
return yaml.CLoader, yaml.CDumper
except AttributeError:
return yaml.Loader, yaml.Dumper
APP_NAME = 'Fluidity'
DBUS_BUS_NAME = 'org.solemnsilence.Fluidity'
DBUS_OBJECT_PATH = '/org/solemnsilence/Fluidity'
UUID_NAMESPACE_URL = "http://solemnsilence.org/fluidity"
# FIXME: should be datetime objs. grr.
FITY_EPOCH = 1230768000.0
CREATION_EPOCH = 1262325600.0
# this just indicates "this is not a real context UUID, it's 'faked-out'"
FAKE_CONTEXT_UUID = uuid.UUID(
bytes='\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xDE\xAD\xBE\xEF')
YAML_LOADER, YAML_DUMPER = _get_yaml_loader_dumper()
### NOTES APP STUFF ###
# FIXME: this is lame. Figure out /real/ Tomboy vs. Gnote handling later
# For now, the value below must be either "tomboy" or "gnote" (all in lowercase)
NOTES_APP = "Tomboy"
NOTES_BUS_NAME = 'org.gnome.' + NOTES_APP
NOTES_OBJECT_PATH = '/org/gnome/' + NOTES_APP + '/RemoteControl'
NOTES_INTERFACE_NAME = 'org.gnome.{}.RemoteControl'.format(NOTES_APP)
NEW_PROJECT_NOTE_TEMPLATE_NOTE_TITLE = "Projects - active Notebook Template"
### MISC TEXT FIELD VALUES AND TEMPLATES ###
# FIXME: almost all of these should go somewhere else as I refactor Fity
AUTOSAVE_INTERVAL = int(60 * 1) # minutes between autosaves of the data file
GTK_DATE_TEXT_TEMPLATE = "%B %d, %Y"
DEFAULT_TIME_EST = 10.0
UNRECOGNIZED_DATE_TEXT = "(date unrecognized)"
# represents "there is no AOF assigned to this project", i.e.: "unfiled"
NO_AOF_ASSIGNED = "No AOF Assigned"
ENGAGE_TOTALS_TEMPLATE = "Tasks shown: {0} Total time: {1}h:{2}m"
ARCHIVED_SINGLETONS_TIME_TMPLT = '-%Y-%m-%d-%H:%M'
SANITARY_CHARS = string.ascii_lowercase + string.digits + " "
### PATHS ###
HOME_DIR = os.path.expanduser("~")
HOME_PATH = pathlib2.Path(HOME_DIR)
APP_DATA_PATH = _find_app_data_path()
USER_DATA_PATH = BaseDirectory.save_data_path("fluidity")
LOG_FILE_PATH = os.path.join(USER_DATA_PATH, 'fluidity_debug.log')
RECURRENCE_DATA = os.path.join(USER_DATA_PATH, 'recurring_tasks.yaml')
USER_DATA_MAIN_FNAME = 'fluidity.pkl'
USER_DATA_MAIN_FILE = os.path.join(USER_DATA_PATH, USER_DATA_MAIN_FNAME)
PROCESSED_STUFF_FILE_NAME = 'processed_stuff.pkl'
BACKUPS_PATH = os.path.join(USER_DATA_PATH, "backups")
ARCHIVED_SINGLETONS_FNAME = 'archived_singletons{0}.pkl'
DROPBOX_PATH = pathlib2.Path(HOME_PATH, 'Dropbox')
DROPBOX_INBOX_PATH = pathlib2.Path(DROPBOX_PATH, 'Inbox')
HACK_HACK_HACK_DROPBOX_PATH = pathlib2.Path(DROPBOX_PATH, "Fluidity")
# PROJECT SUPPORT FILE PATHS
READ_REVIEW_PATH = _get_read_review_path()
INBOX_FOLDER = os.path.join(HOME_DIR, "Inbox")
NOTE_SLIDER_FOLDER = os.path.join(USER_DATA_PATH, 'slider-inbox')
MAIN_PRJ_SUPPORT_FOLDER = os.path.join(HOME_DIR, "Projects")
ACTIVE_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Active")
COMPLETED_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Completed")
INCUBATING_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Incubating")
QUEUED_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Queued")
WAITING_FOR_FOLDER = os.path.join(MAIN_PRJ_SUPPORT_FOLDER, "Waiting For")
SINGLETON_FILES = os.path.join(ACTIVE_FOLDER, "singletons")
PROJECT_FOLDER_DELETION_WARNING_FILE_NAME = "DO_NOT_DELETE_THIS_FOLDER.txt"
PROJECT_FOLDER_DELETION_WARNING_PATH = os.path.join(
APP_DATA_PATH, PROJECT_FOLDER_DELETION_WARNING_FILE_NAME)
# doesn't include USER_DATA_PATH since BaseDirectory.save_data_path takes
# care of ensuring that path exists
# FIXME: once a global Inbox folder is implemented for people other than me
ALL_DATA_FOLDERS = [
NOTE_SLIDER_FOLDER,
MAIN_PRJ_SUPPORT_FOLDER,
ACTIVE_FOLDER,
COMPLETED_FOLDER,
INCUBATING_FOLDER,
QUEUED_FOLDER,
WAITING_FOR_FOLDER,
BACKUPS_PATH,
INBOX_FOLDER,
]
IGNORED_INBOX_PATHS = [
"0 - Eventually sort. bah",
"1 - To be processed when Fity is ready",
"3 - Torrents",
"2 - Receipts to process",
"90 Day Storage",
]
# ugh, this file is getting INSANE.
# These are basically enums. Gotta find one of the better solutions to this though.
class Priority:
HIGH = 1
MEDIUM = 2
LOW = 3
class EnergyEstimate:
LOW = 0
MEDIUM = 1
HIGH = 2
class ProjectStatus:
# this is an "enum" - if you write to these values, I will hurt you.
# (eventually we should pick a Python enum class that's useful and
# agreeable, but until then, just *pretend* those are read-only values.
ACTIVE = 'active'
INCUBATING = 'incubating'
WAITING_FOR = 'waiting_for'
QUEUED = 'queued'
COMPLETED = 'completed'
| gpl-3.0 |
mizzao/ggplot | ggplot/tests/test_theme_mpl.py | 12 | 3907 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import matplotlib as mpl
import six
from nose.tools import assert_true
from ggplot.tests import image_comparison, cleanup
from ggplot import *
def _diff(a, b):
ret = {}
for key, val in a.items():
if key in b:
if b[key] != val:
ret[key] = "%s: %s -> %s" % (key, val, b[key])
else:
ret[key] = "%s: %s -> %s" % (key, val, "--")
for key, val in b.items():
if key not in a:
ret[key] = "%s: %s -> %s" % (key, "--", val)
return ret
@cleanup
def test_theme_matplotlib():
gg = ggplot(aes(x='date', y='beef'), data=meat)
a = mpl.rcParams.copy()
_theme = theme_matplotlib({"font.family": "serif"}, matplotlib_defaults=False)
assert_true(len(_theme._rcParams) < 2, "setting font.family changed more than that in the theme. %s" % list(six.iterkeys(_theme._rcParams))[:5])
gg = gg + _theme
b = mpl.rcParams.copy()
assert_true(len(_diff(a,b)) < 2, "setting font.family changed more than that in ggplot object: %s" % list(six.iterkeys(_diff(a,b)))[:5])
@image_comparison(baseline_images=['theme_clean', 'theme_mpl_completly'])
def test_theme_matplotlib2():
gg = ggplot(aes(x='date', y='beef'), data=meat) + \
geom_point(color='lightblue') + \
stat_smooth(span=.15, color='black', se=True) + \
ggtitle("Beef: It's What's for Dinner") + \
xlab("Date") + \
ylab("Head of Cattle Slaughtered")
a = mpl.rcParams.copy()
print(gg)
b = mpl.rcParams.copy()
assert_true(len(_diff(a,b)) < 1, "Just plotting changed something in the ggplot object: %s" % list(six.iterkeys(_diff(a,b)))[:5])
print(gg + theme_matplotlib())
@image_comparison(baseline_images=['theme_clean2', 'theme_mpl_only_one'])
def test_theme_matplotlib3():
gg = ggplot(aes(x='date', y='beef'), data=meat) + \
geom_point(color='lightblue') + \
stat_smooth(span=.15, color='black', se=True) + \
ggtitle("Beef: It's What's for Dinner") + \
xlab("Date") + \
ylab("Head of Cattle Slaughtered")
a = mpl.rcParams.copy()
print(gg)
b = mpl.rcParams.copy()
assert_true(len(_diff(a,b)) < 1, "Just plotting changed something in the ggplot object: %s" % list(six.iterkeys(_diff(a,b)))[:5])
_theme = theme_matplotlib({"font.family": "serif"}, matplotlib_defaults=False)
gg = gg + _theme
b = mpl.rcParams.copy()
assert_true(len(_diff(a,b)) < 2, "Setting just one param changed more in the ggplot object: %s" % list(six.iterkeys(_diff(a,b)))[:5])
print(gg)
b = mpl.rcParams.copy()
assert_true(len(_diff(a,b)) < 2, "Plotting after setting just one param changed more in the ggplot object: %s" % list(six.iterkeys(_diff(a,b)))[:5])
@image_comparison(baseline_images=['theme_mpl_all_before', 'theme_mpl_all_after'])
def test_theme_matplotlib4():
gg = ggplot(aes(x='date', y='beef'), data=meat) + \
geom_point(color='lightblue') + \
stat_smooth(span=.15, color='black', se=True) + \
ggtitle("Beef: It's What's for Dinner") + \
xlab("Date") + \
ylab("Head of Cattle Slaughtered")
print(gg + theme_matplotlib())
print(gg + theme_matplotlib({"font.family": "serif"}, matplotlib_defaults=False))
@image_comparison(baseline_images=['theme_mpl_all_before'])
def test_theme_matplotlib5():
# Make sure the last complete theme wins.
gg = ggplot(aes(x='date', y='beef'), data=meat) + \
geom_point(color='lightblue') + \
stat_smooth(span=.15, color='black', se=True) + \
ggtitle("Beef: It's What's for Dinner") + \
xlab("Date") + \
ylab("Head of Cattle Slaughtered")
print(gg + theme_gray() + theme_matplotlib())
def test_theme_matplotlib6():
tmpl = theme_matplotlib()
assert_true(tmpl.complete)
| bsd-2-clause |
jonparrott/google-cloud-python | tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py | 3 | 76626 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/tasks_v2beta2/proto/cloudtasks.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.cloud.tasks_v2beta2.proto import queue_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2
from google.cloud.tasks_v2beta2.proto import task_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2
from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2
from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2
from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.rpc import code_pb2 as google_dot_rpc_dot_code__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/cloud/tasks_v2beta2/proto/cloudtasks.proto',
package='google.cloud.tasks.v2beta2',
syntax='proto3',
serialized_pb=_b('\n1google/cloud/tasks_v2beta2/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/tasks_v2beta2/proto/queue.proto\x1a+google/cloud/tasks_v2beta2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x15google/rpc/code.proto\"Z\n\x11ListQueuesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\x1f\n\x0fGetQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"V\n\x12\x43reateQueueRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\"\n\x12\x44\x65leteQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PurgeQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PauseQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\"\n\x12ResumeQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x87\x01\n\x10ListTasksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\\\n\x0eGetTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"\x91\x01\n\x11\x43reateTaskRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"!\n\x11\x44\x65leteTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xb7\x01\n\x11LeaseTasksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tmax_tasks\x18\x02 \x01(\x05\x12\x31\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\"E\n\x12LeaseTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\"Y\n\x16\x41\x63knowledgeTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xc5\x01\n\x11RenewLeaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"\x93\x01\n\x12\x43\x61ncelLeaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"\\\n\x0eRunTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View2\xf0\x19\n\nCloudTasks\x12\xa4\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta2.ListQueuesRequest\x1a..google.cloud.tasks.v2beta2.ListQueuesResponse\"7\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{parent=projects/*/locations/*}/queues\x12\x93\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta2.GetQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"7\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{name=projects/*/locations/*/queues/*}\x12\xa0\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta2.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\">\x82\xd3\xe4\x93\x02\x38\"//v2beta2/{parent=projects/*/locations/*}/queues:\x05queue\x12\xa6\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta2.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"D\x82\xd3\xe4\x93\x02>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\x12\x8e\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty\"7\x82\xd3\xe4\x93\x02\x31*//v2beta2/{name=projects/*/locations/*/queues/*}\x12\xa0\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta2.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"@\x82\xd3\xe4\x93\x02:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\x01*\x12\xa0\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta2.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"@\x82\xd3\xe4\x93\x02:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\x01*\x12\xa3\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta2.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"A\x82\xd3\xe4\x93\x02;\"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\x01*\x12\x96\x01\n\x0cGetIamPolicy\x12\".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy\"K\x82\xd3\xe4\x93\x02\x45\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\x12\x96\x01\n\x0cSetIamPolicy\x12\".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy\"K\x82\xd3\xe4\x93\x02\x45\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\x12\xbc\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse\"Q\x82\xd3\xe4\x93\x02K\"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\x12\xa9\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta2.ListTasksRequest\x1a-.google.cloud.tasks.v2beta2.ListTasksResponse\"?\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\x12\x98\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta2.GetTaskRequest\x1a .google.cloud.tasks.v2beta2.Task\"?\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\x12\xa1\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta2.CreateTaskRequest\x1a .google.cloud.tasks.v2beta2.Task\"B\x82\xd3\xe4\x93\x02<\"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\x12\x94\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty\"?\x82\xd3\xe4\x93\x02\x39*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\x12\xb5\x01\n\nLeaseTasks\x12-.google.cloud.tasks.v2beta2.LeaseTasksRequest\x1a..google.cloud.tasks.v2beta2.LeaseTasksResponse\"H\x82\xd3\xe4\x93\x02\x42\"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\x01*\x12\xad\x01\n\x0f\x41\x63knowledgeTask\x12\x32.google.cloud.tasks.v2beta2.AcknowledgeTaskRequest\x1a\x16.google.protobuf.Empty\"N\x82\xd3\xe4\x93\x02H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\x01*\x12\xac\x01\n\nRenewLease\x12-.google.cloud.tasks.v2beta2.RenewLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task\"M\x82\xd3\xe4\x93\x02G\"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\x01*\x12\xaf\x01\n\x0b\x43\x61ncelLease\x12..google.cloud.tasks.v2beta2.CancelLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task\"N\x82\xd3\xe4\x93\x02H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\x01*\x12\x9f\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta2.RunTaskRequest\x1a .google.cloud.tasks.v2beta2.Task\"F\x82\xd3\xe4\x93\x02@\";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*B|\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\xa2\x02\x05TASKSb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.DESCRIPTOR,google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.DESCRIPTOR,google_dot_iam_dot_v1_dot_iam__policy__pb2.DESCRIPTOR,google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_code__pb2.DESCRIPTOR,])
_LISTQUEUESREQUEST = _descriptor.Descriptor(
name='ListQueuesRequest',
full_name='google.cloud.tasks.v2beta2.ListQueuesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='filter', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.filter', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_size', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.page_size', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_token', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.page_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=413,
serialized_end=503,
)
_LISTQUEUESRESPONSE = _descriptor.Descriptor(
name='ListQueuesResponse',
full_name='google.cloud.tasks.v2beta2.ListQueuesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='queues', full_name='google.cloud.tasks.v2beta2.ListQueuesResponse.queues', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=505,
serialized_end=601,
)
_GETQUEUEREQUEST = _descriptor.Descriptor(
name='GetQueueRequest',
full_name='google.cloud.tasks.v2beta2.GetQueueRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.GetQueueRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=603,
serialized_end=634,
)
_CREATEQUEUEREQUEST = _descriptor.Descriptor(
name='CreateQueueRequest',
full_name='google.cloud.tasks.v2beta2.CreateQueueRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='google.cloud.tasks.v2beta2.CreateQueueRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='queue', full_name='google.cloud.tasks.v2beta2.CreateQueueRequest.queue', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=636,
serialized_end=722,
)
_UPDATEQUEUEREQUEST = _descriptor.Descriptor(
name='UpdateQueueRequest',
full_name='google.cloud.tasks.v2beta2.UpdateQueueRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='queue', full_name='google.cloud.tasks.v2beta2.UpdateQueueRequest.queue', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='update_mask', full_name='google.cloud.tasks.v2beta2.UpdateQueueRequest.update_mask', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=724,
serialized_end=843,
)
_DELETEQUEUEREQUEST = _descriptor.Descriptor(
name='DeleteQueueRequest',
full_name='google.cloud.tasks.v2beta2.DeleteQueueRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.DeleteQueueRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=845,
serialized_end=879,
)
_PURGEQUEUEREQUEST = _descriptor.Descriptor(
name='PurgeQueueRequest',
full_name='google.cloud.tasks.v2beta2.PurgeQueueRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.PurgeQueueRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=881,
serialized_end=914,
)
_PAUSEQUEUEREQUEST = _descriptor.Descriptor(
name='PauseQueueRequest',
full_name='google.cloud.tasks.v2beta2.PauseQueueRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.PauseQueueRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=916,
serialized_end=949,
)
_RESUMEQUEUEREQUEST = _descriptor.Descriptor(
name='ResumeQueueRequest',
full_name='google.cloud.tasks.v2beta2.ResumeQueueRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.ResumeQueueRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=951,
serialized_end=985,
)
_LISTTASKSREQUEST = _descriptor.Descriptor(
name='ListTasksRequest',
full_name='google.cloud.tasks.v2beta2.ListTasksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response_view', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.response_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_size', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.page_size', index=2,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='page_token', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.page_token', index=3,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=988,
serialized_end=1123,
)
_LISTTASKSRESPONSE = _descriptor.Descriptor(
name='ListTasksResponse',
full_name='google.cloud.tasks.v2beta2.ListTasksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tasks', full_name='google.cloud.tasks.v2beta2.ListTasksResponse.tasks', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='next_page_token', full_name='google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1125,
serialized_end=1218,
)
_GETTASKREQUEST = _descriptor.Descriptor(
name='GetTaskRequest',
full_name='google.cloud.tasks.v2beta2.GetTaskRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.GetTaskRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response_view', full_name='google.cloud.tasks.v2beta2.GetTaskRequest.response_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1220,
serialized_end=1312,
)
_CREATETASKREQUEST = _descriptor.Descriptor(
name='CreateTaskRequest',
full_name='google.cloud.tasks.v2beta2.CreateTaskRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='google.cloud.tasks.v2beta2.CreateTaskRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task', full_name='google.cloud.tasks.v2beta2.CreateTaskRequest.task', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response_view', full_name='google.cloud.tasks.v2beta2.CreateTaskRequest.response_view', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1315,
serialized_end=1460,
)
_DELETETASKREQUEST = _descriptor.Descriptor(
name='DeleteTaskRequest',
full_name='google.cloud.tasks.v2beta2.DeleteTaskRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.DeleteTaskRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1462,
serialized_end=1495,
)
_LEASETASKSREQUEST = _descriptor.Descriptor(
name='LeaseTasksRequest',
full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='parent', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.parent', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_tasks', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.max_tasks', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lease_duration', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response_view', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='filter', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.filter', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1498,
serialized_end=1681,
)
_LEASETASKSRESPONSE = _descriptor.Descriptor(
name='LeaseTasksResponse',
full_name='google.cloud.tasks.v2beta2.LeaseTasksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tasks', full_name='google.cloud.tasks.v2beta2.LeaseTasksResponse.tasks', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1683,
serialized_end=1752,
)
_ACKNOWLEDGETASKREQUEST = _descriptor.Descriptor(
name='AcknowledgeTaskRequest',
full_name='google.cloud.tasks.v2beta2.AcknowledgeTaskRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.AcknowledgeTaskRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='schedule_time', full_name='google.cloud.tasks.v2beta2.AcknowledgeTaskRequest.schedule_time', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1754,
serialized_end=1843,
)
_RENEWLEASEREQUEST = _descriptor.Descriptor(
name='RenewLeaseRequest',
full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='schedule_time', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.schedule_time', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lease_duration', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.lease_duration', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response_view', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.response_view', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1846,
serialized_end=2043,
)
_CANCELLEASEREQUEST = _descriptor.Descriptor(
name='CancelLeaseRequest',
full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='schedule_time', full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest.schedule_time', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response_view', full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest.response_view', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2046,
serialized_end=2193,
)
_RUNTASKREQUEST = _descriptor.Descriptor(
name='RunTaskRequest',
full_name='google.cloud.tasks.v2beta2.RunTaskRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.cloud.tasks.v2beta2.RunTaskRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='response_view', full_name='google.cloud.tasks.v2beta2.RunTaskRequest.response_view', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2195,
serialized_end=2287,
)
_LISTQUEUESRESPONSE.fields_by_name['queues'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE
_CREATEQUEUEREQUEST.fields_by_name['queue'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE
_UPDATEQUEUEREQUEST.fields_by_name['queue'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE
_UPDATEQUEUEREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
_LISTTASKSREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW
_LISTTASKSRESPONSE.fields_by_name['tasks'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK
_GETTASKREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW
_CREATETASKREQUEST.fields_by_name['task'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK
_CREATETASKREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW
_LEASETASKSREQUEST.fields_by_name['lease_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_LEASETASKSREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW
_LEASETASKSRESPONSE.fields_by_name['tasks'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK
_ACKNOWLEDGETASKREQUEST.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_RENEWLEASEREQUEST.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_RENEWLEASEREQUEST.fields_by_name['lease_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION
_RENEWLEASEREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW
_CANCELLEASEREQUEST.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
_CANCELLEASEREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW
_RUNTASKREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW
DESCRIPTOR.message_types_by_name['ListQueuesRequest'] = _LISTQUEUESREQUEST
DESCRIPTOR.message_types_by_name['ListQueuesResponse'] = _LISTQUEUESRESPONSE
DESCRIPTOR.message_types_by_name['GetQueueRequest'] = _GETQUEUEREQUEST
DESCRIPTOR.message_types_by_name['CreateQueueRequest'] = _CREATEQUEUEREQUEST
DESCRIPTOR.message_types_by_name['UpdateQueueRequest'] = _UPDATEQUEUEREQUEST
DESCRIPTOR.message_types_by_name['DeleteQueueRequest'] = _DELETEQUEUEREQUEST
DESCRIPTOR.message_types_by_name['PurgeQueueRequest'] = _PURGEQUEUEREQUEST
DESCRIPTOR.message_types_by_name['PauseQueueRequest'] = _PAUSEQUEUEREQUEST
DESCRIPTOR.message_types_by_name['ResumeQueueRequest'] = _RESUMEQUEUEREQUEST
DESCRIPTOR.message_types_by_name['ListTasksRequest'] = _LISTTASKSREQUEST
DESCRIPTOR.message_types_by_name['ListTasksResponse'] = _LISTTASKSRESPONSE
DESCRIPTOR.message_types_by_name['GetTaskRequest'] = _GETTASKREQUEST
DESCRIPTOR.message_types_by_name['CreateTaskRequest'] = _CREATETASKREQUEST
DESCRIPTOR.message_types_by_name['DeleteTaskRequest'] = _DELETETASKREQUEST
DESCRIPTOR.message_types_by_name['LeaseTasksRequest'] = _LEASETASKSREQUEST
DESCRIPTOR.message_types_by_name['LeaseTasksResponse'] = _LEASETASKSRESPONSE
DESCRIPTOR.message_types_by_name['AcknowledgeTaskRequest'] = _ACKNOWLEDGETASKREQUEST
DESCRIPTOR.message_types_by_name['RenewLeaseRequest'] = _RENEWLEASEREQUEST
DESCRIPTOR.message_types_by_name['CancelLeaseRequest'] = _CANCELLEASEREQUEST
DESCRIPTOR.message_types_by_name['RunTaskRequest'] = _RUNTASKREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ListQueuesRequest = _reflection.GeneratedProtocolMessageType('ListQueuesRequest', (_message.Message,), dict(
DESCRIPTOR = _LISTQUEUESREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues].
Attributes:
parent:
Required. The location name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID``
filter:
``filter`` can be used to specify a subset of queues. Any
[Queue][google.cloud.tasks.v2beta2.Queue] field can be used as
a filter and several operators as supported. For example:
``<=, <, >=, >, !=, =, :``. The filter syntax is the same as
described in `Stackdriver's Advanced Logs Filters <https://clo
ud.google.com/logging/docs/view/advanced_filters>`_. Sample
filter "app\_engine\_http\_target: \*". Note that using
filters might cause fewer queues than the requested\_page size
to be returned.
page_size:
Requested page size. The maximum page size is 9800. If
unspecified, the page size will be the maximum. Fewer queues
than requested might be returned, even if more queues exist;
use the [next\_page\_token][google.cloud.tasks.v2beta2.ListQue
uesResponse.next\_page\_token] in the response to determine if
more queues exist.
page_token:
A token identifying the page of results to return. To request
the first page results, page\_token must be empty. To request
the next page of results, page\_token must be the value of [ne
xt\_page\_token][google.cloud.tasks.v2beta2.ListQueuesResponse
.next\_page\_token] returned from the previous call to
[ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]
method. It is an error to switch the value of the
[filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter]
while iterating through pages.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListQueuesRequest)
))
_sym_db.RegisterMessage(ListQueuesRequest)
ListQueuesResponse = _reflection.GeneratedProtocolMessageType('ListQueuesResponse', (_message.Message,), dict(
DESCRIPTOR = _LISTQUEUESRESPONSE,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Response message for
[ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues].
Attributes:
queues:
The list of queues.
next_page_token:
A token to retrieve next page of results. To return the next
page of results, call
[ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]
with this value as the [page\_token][google.cloud.tasks.v2beta
2.ListQueuesRequest.page\_token]. If the next\_page\_token is
empty, there are no more results. The page token is valid for
only 2 hours.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListQueuesResponse)
))
_sym_db.RegisterMessage(ListQueuesResponse)
GetQueueRequest = _reflection.GeneratedProtocolMessageType('GetQueueRequest', (_message.Message,), dict(
DESCRIPTOR = _GETQUEUEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue].
Attributes:
name:
Required. The resource name of the queue. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.GetQueueRequest)
))
_sym_db.RegisterMessage(GetQueueRequest)
CreateQueueRequest = _reflection.GeneratedProtocolMessageType('CreateQueueRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATEQUEUEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue].
Attributes:
parent:
Required. The location name in which the queue will be
created. For example:
``projects/PROJECT_ID/locations/LOCATION_ID`` The list of
allowed locations can be obtained by calling Cloud Tasks'
implementation of [ListLocations][google.cloud.location.Locati
ons.ListLocations].
queue:
Required. The queue to create. [Queue's
name][google.cloud.tasks.v2beta2.Queue.name] cannot be the
same as an existing queue.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CreateQueueRequest)
))
_sym_db.RegisterMessage(CreateQueueRequest)
UpdateQueueRequest = _reflection.GeneratedProtocolMessageType('UpdateQueueRequest', (_message.Message,), dict(
DESCRIPTOR = _UPDATEQUEUEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue].
Attributes:
queue:
Required. The queue to create or update. The queue's
[name][google.cloud.tasks.v2beta2.Queue.name] must be
specified. Output only fields cannot be modified using
UpdateQueue. Any value specified for an output only field will
be ignored. The queue's
[name][google.cloud.tasks.v2beta2.Queue.name] cannot be
changed.
update_mask:
A mask used to specify which fields of the queue are being
updated. If empty, then all fields will be updated.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.UpdateQueueRequest)
))
_sym_db.RegisterMessage(UpdateQueueRequest)
DeleteQueueRequest = _reflection.GeneratedProtocolMessageType('DeleteQueueRequest', (_message.Message,), dict(
DESCRIPTOR = _DELETEQUEUEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue].
Attributes:
name:
Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.DeleteQueueRequest)
))
_sym_db.RegisterMessage(DeleteQueueRequest)
PurgeQueueRequest = _reflection.GeneratedProtocolMessageType('PurgeQueueRequest', (_message.Message,), dict(
DESCRIPTOR = _PURGEQUEUEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue].
Attributes:
name:
Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PurgeQueueRequest)
))
_sym_db.RegisterMessage(PurgeQueueRequest)
PauseQueueRequest = _reflection.GeneratedProtocolMessageType('PauseQueueRequest', (_message.Message,), dict(
DESCRIPTOR = _PAUSEQUEUEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue].
Attributes:
name:
Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PauseQueueRequest)
))
_sym_db.RegisterMessage(PauseQueueRequest)
ResumeQueueRequest = _reflection.GeneratedProtocolMessageType('ResumeQueueRequest', (_message.Message,), dict(
DESCRIPTOR = _RESUMEQUEUEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue].
Attributes:
name:
Required. The queue name. For example:
``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID``
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ResumeQueueRequest)
))
_sym_db.RegisterMessage(ResumeQueueRequest)
ListTasksRequest = _reflection.GeneratedProtocolMessageType('ListTasksRequest', (_message.Message,), dict(
DESCRIPTOR = _LISTTASKSREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for listing tasks using
[ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks].
Attributes:
parent:
Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
response_view:
The response\_view specifies which subset of the
[Task][google.cloud.tasks.v2beta2.Task] will be returned. By
default response\_view is
[BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed
because of its large size or because of the sensitivity of
data that it contains. Authorization for
[FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
``cloudtasks.tasks.fullView`` `Google IAM
<https://cloud.google.com/iam/>`_ permission on the
[Task][google.cloud.tasks.v2beta2.Task] resource.
page_size:
Requested page size. Fewer tasks than requested might be
returned. The maximum page size is 1000. If unspecified, the
page size will be the maximum. Fewer tasks than requested
might be returned, even if more tasks exist; use [next\_page\_
token][google.cloud.tasks.v2beta2.ListTasksResponse.next\_page
\_token] in the response to determine if more tasks exist.
page_token:
A token identifying the page of results to return. To request
the first page results, page\_token must be empty. To request
the next page of results, page\_token must be the value of [ne
xt\_page\_token][google.cloud.tasks.v2beta2.ListTasksResponse.
next\_page\_token] returned from the previous call to
[ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]
method. The page token is valid for only 2 hours.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListTasksRequest)
))
_sym_db.RegisterMessage(ListTasksRequest)
ListTasksResponse = _reflection.GeneratedProtocolMessageType('ListTasksResponse', (_message.Message,), dict(
DESCRIPTOR = _LISTTASKSRESPONSE,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Response message for listing tasks using
[ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks].
Attributes:
tasks:
The list of tasks.
next_page_token:
A token to retrieve next page of results. To return the next
page of results, call
[ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]
with this value as the [page\_token][google.cloud.tasks.v2beta
2.ListTasksRequest.page\_token]. If the next\_page\_token is
empty, there are no more results.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListTasksResponse)
))
_sym_db.RegisterMessage(ListTasksResponse)
GetTaskRequest = _reflection.GeneratedProtocolMessageType('GetTaskRequest', (_message.Message,), dict(
DESCRIPTOR = _GETTASKREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for getting a task using
[GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask].
Attributes:
name:
Required. The task name. For example: ``projects/PROJECT_ID/l
ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
response_view:
The response\_view specifies which subset of the
[Task][google.cloud.tasks.v2beta2.Task] will be returned. By
default response\_view is
[BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed
because of its large size or because of the sensitivity of
data that it contains. Authorization for
[FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
``cloudtasks.tasks.fullView`` `Google IAM
<https://cloud.google.com/iam/>`_ permission on the
[Task][google.cloud.tasks.v2beta2.Task] resource.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.GetTaskRequest)
))
_sym_db.RegisterMessage(GetTaskRequest)
CreateTaskRequest = _reflection.GeneratedProtocolMessageType('CreateTaskRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATETASKREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for
[CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask].
Attributes:
parent:
Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
The queue must already exist.
task:
Required. The task to add. Task names have the following
format: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUE
UE_ID/tasks/TASK_ID``. The user can optionally specify a task
[name][google.cloud.tasks.v2beta2.Task.name]. If a name is not
specified then the system will generate a random unique task
id, which will be set in the task returned in the
[response][google.cloud.tasks.v2beta2.Task.name]. If [schedul
e\_time][google.cloud.tasks.v2beta2.Task.schedule\_time] is
not set or is in the past then Cloud Tasks will set it to the
current time. Task De-duplication: Explicitly specifying a
task ID enables task de-duplication. If a task's ID is
identical to that of an existing task or a task that was
deleted or completed recently then the call will fail with
[ALREADY\_EXISTS][google.rpc.Code.ALREADY\_EXISTS]. If the
task's queue was created using Cloud Tasks, then another task
with the same name can't be created for ~1hour after the
original task was deleted or completed. If the task's queue
was created using queue.yaml or queue.xml, then another task
with the same name can't be created for ~9days after the
original task was deleted or completed. Because there is an
extra lookup cost to identify duplicate task names, these
[CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]
calls have significantly increased latency. Using hashed
strings for the task id or for the prefix of the task id is
recommended. Choosing task ids that are sequential or have
sequential prefixes, for example using a timestamp, causes an
increase in latency and error rates in all task commands. The
infrastructure relies on an approximately uniform distribution
of task ids to store and serve tasks efficiently.
response_view:
The response\_view specifies which subset of the
[Task][google.cloud.tasks.v2beta2.Task] will be returned. By
default response\_view is
[BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed
because of its large size or because of the sensitivity of
data that it contains. Authorization for
[FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
``cloudtasks.tasks.fullView`` `Google IAM
<https://cloud.google.com/iam/>`_ permission on the
[Task][google.cloud.tasks.v2beta2.Task] resource.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CreateTaskRequest)
))
_sym_db.RegisterMessage(CreateTaskRequest)
DeleteTaskRequest = _reflection.GeneratedProtocolMessageType('DeleteTaskRequest', (_message.Message,), dict(
DESCRIPTOR = _DELETETASKREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for deleting a task using
[DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask].
Attributes:
name:
Required. The task name. For example: ``projects/PROJECT_ID/l
ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.DeleteTaskRequest)
))
_sym_db.RegisterMessage(DeleteTaskRequest)
LeaseTasksRequest = _reflection.GeneratedProtocolMessageType('LeaseTasksRequest', (_message.Message,), dict(
DESCRIPTOR = _LEASETASKSREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for leasing tasks using
[LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks].
Attributes:
parent:
Required. The queue name. For example:
``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID``
max_tasks:
The maximum number of tasks to lease. The system will make a
best effort to return as close to as ``max_tasks`` as
possible. The largest that ``max_tasks`` can be is 1000.
lease_duration:
After the worker has successfully finished the work associated
with the task, the worker must call via [AcknowledgeTask][goog
le.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] before the
[schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim
e]. Otherwise the task will be returned to a later
[LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]
call so that another worker can retry it. The maximum lease
duration is 1 week. ``lease_duration`` will be truncated to
the nearest second.
response_view:
The response\_view specifies which subset of the
[Task][google.cloud.tasks.v2beta2.Task] will be returned. By
default response\_view is
[BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed
because of its large size or because of the sensitivity of
data that it contains. Authorization for
[FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
``cloudtasks.tasks.fullView`` `Google IAM
<https://cloud.google.com/iam/>`_ permission on the
[Task][google.cloud.tasks.v2beta2.Task] resource.
filter:
``filter`` can be used to specify a subset of tasks to lease.
When ``filter`` is set to ``tag=<my-tag>`` then the
[response][google.cloud.tasks.v2beta2.LeaseTasksResponse] will
contain only tasks whose
[tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal to
``<my-tag>``. ``<my-tag>`` must be less than 500 characters.
When ``filter`` is set to ``tag_function=oldest_tag()``, only
tasks which have the same tag as the task with the oldest [sch
edule\_time][google.cloud.tasks.v2beta2.Task.schedule\_time]
will be returned. Grammar Syntax: - ``filter = "tag=" tag |
"tag_function=" function`` - ``tag = string`` - ``function
= "oldest_tag()"`` The ``oldest_tag()`` function returns
tasks which have the same tag as the oldest task (ordered by
schedule time). SDK compatibility: Although the SDK allows
tags to be either string or `bytes <https://cloud.google.com/a
ppengine/docs/standard/java/javadoc/com/google/appengine/api/t
askqueue/TaskOptions.html#tag-byte:A->`_, only UTF-8 encoded
tags can be used in Cloud Tasks. Tag which aren't UTF-8
encoded can't be used in the
[filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]
and the task's
[tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be
displayed as empty in Cloud Tasks.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.LeaseTasksRequest)
))
_sym_db.RegisterMessage(LeaseTasksRequest)
LeaseTasksResponse = _reflection.GeneratedProtocolMessageType('LeaseTasksResponse', (_message.Message,), dict(
DESCRIPTOR = _LEASETASKSRESPONSE,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Response message for leasing tasks using
[LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks].
Attributes:
tasks:
The leased tasks.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.LeaseTasksResponse)
))
_sym_db.RegisterMessage(LeaseTasksResponse)
AcknowledgeTaskRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeTaskRequest', (_message.Message,), dict(
DESCRIPTOR = _ACKNOWLEDGETASKREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for acknowledging a task using
[AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask].
Attributes:
name:
Required. The task name. For example: ``projects/PROJECT_ID/l
ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
schedule_time:
Required. The task's current schedule time, available in the
[schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim
e] returned by
[LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]
response or
[RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]
response. This restriction is to ensure that your worker
currently holds the lease.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AcknowledgeTaskRequest)
))
_sym_db.RegisterMessage(AcknowledgeTaskRequest)
RenewLeaseRequest = _reflection.GeneratedProtocolMessageType('RenewLeaseRequest', (_message.Message,), dict(
DESCRIPTOR = _RENEWLEASEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for renewing a lease using
[RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease].
Attributes:
name:
Required. The task name. For example: ``projects/PROJECT_ID/l
ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
schedule_time:
Required. The task's current schedule time, available in the
[schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim
e] returned by
[LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]
response or
[RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]
response. This restriction is to ensure that your worker
currently holds the lease.
lease_duration:
Required. The desired new lease duration, starting from now.
The maximum lease duration is 1 week. ``lease_duration`` will
be truncated to the nearest second.
response_view:
The response\_view specifies which subset of the
[Task][google.cloud.tasks.v2beta2.Task] will be returned. By
default response\_view is
[BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed
because of its large size or because of the sensitivity of
data that it contains. Authorization for
[FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
``cloudtasks.tasks.fullView`` `Google IAM
<https://cloud.google.com/iam/>`_ permission on the
[Task][google.cloud.tasks.v2beta2.Task] resource.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RenewLeaseRequest)
))
_sym_db.RegisterMessage(RenewLeaseRequest)
CancelLeaseRequest = _reflection.GeneratedProtocolMessageType('CancelLeaseRequest', (_message.Message,), dict(
DESCRIPTOR = _CANCELLEASEREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for canceling a lease using
[CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease].
Attributes:
name:
Required. The task name. For example: ``projects/PROJECT_ID/l
ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
schedule_time:
Required. The task's current schedule time, available in the
[schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim
e] returned by
[LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]
response or
[RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]
response. This restriction is to ensure that your worker
currently holds the lease.
response_view:
The response\_view specifies which subset of the
[Task][google.cloud.tasks.v2beta2.Task] will be returned. By
default response\_view is
[BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed
because of its large size or because of the sensitivity of
data that it contains. Authorization for
[FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
``cloudtasks.tasks.fullView`` `Google IAM
<https://cloud.google.com/iam/>`_ permission on the
[Task][google.cloud.tasks.v2beta2.Task] resource.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CancelLeaseRequest)
))
_sym_db.RegisterMessage(CancelLeaseRequest)
RunTaskRequest = _reflection.GeneratedProtocolMessageType('RunTaskRequest', (_message.Message,), dict(
DESCRIPTOR = _RUNTASKREQUEST,
__module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2'
,
__doc__ = """Request message for forcing a task to run now using
[RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask].
Attributes:
name:
Required. The task name. For example: ``projects/PROJECT_ID/l
ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``
response_view:
The response\_view specifies which subset of the
[Task][google.cloud.tasks.v2beta2.Task] will be returned. By
default response\_view is
[BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all
information is retrieved by default because some data, such as
payloads, might be desirable to return only when needed
because of its large size or because of the sensitivity of
data that it contains. Authorization for
[FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires
``cloudtasks.tasks.fullView`` `Google IAM
<https://cloud.google.com/iam/>`_ permission on the
[Task][google.cloud.tasks.v2beta2.Task] resource.
""",
# @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RunTaskRequest)
))
_sym_db.RegisterMessage(RunTaskRequest)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\036com.google.cloud.tasks.v2beta2B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\242\002\005TASKS'))
_CLOUDTASKS = _descriptor.ServiceDescriptor(
name='CloudTasks',
full_name='google.cloud.tasks.v2beta2.CloudTasks',
file=DESCRIPTOR,
index=0,
options=None,
serialized_start=2290,
serialized_end=5602,
methods=[
_descriptor.MethodDescriptor(
name='ListQueues',
full_name='google.cloud.tasks.v2beta2.CloudTasks.ListQueues',
index=0,
containing_service=None,
input_type=_LISTQUEUESREQUEST,
output_type=_LISTQUEUESRESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0021\022//v2beta2/{parent=projects/*/locations/*}/queues')),
),
_descriptor.MethodDescriptor(
name='GetQueue',
full_name='google.cloud.tasks.v2beta2.CloudTasks.GetQueue',
index=1,
containing_service=None,
input_type=_GETQUEUEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0021\022//v2beta2/{name=projects/*/locations/*/queues/*}')),
),
_descriptor.MethodDescriptor(
name='CreateQueue',
full_name='google.cloud.tasks.v2beta2.CloudTasks.CreateQueue',
index=2,
containing_service=None,
input_type=_CREATEQUEUEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0028\"//v2beta2/{parent=projects/*/locations/*}/queues:\005queue')),
),
_descriptor.MethodDescriptor(
name='UpdateQueue',
full_name='google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue',
index=3,
containing_service=None,
input_type=_UPDATEQUEUEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\005queue')),
),
_descriptor.MethodDescriptor(
name='DeleteQueue',
full_name='google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue',
index=4,
containing_service=None,
input_type=_DELETEQUEUEREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0021*//v2beta2/{name=projects/*/locations/*/queues/*}')),
),
_descriptor.MethodDescriptor(
name='PurgeQueue',
full_name='google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue',
index=5,
containing_service=None,
input_type=_PURGEQUEUEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\001*')),
),
_descriptor.MethodDescriptor(
name='PauseQueue',
full_name='google.cloud.tasks.v2beta2.CloudTasks.PauseQueue',
index=6,
containing_service=None,
input_type=_PAUSEQUEUEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\001*')),
),
_descriptor.MethodDescriptor(
name='ResumeQueue',
full_name='google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue',
index=7,
containing_service=None,
input_type=_RESUMEQUEUEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002;\"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\001*')),
),
_descriptor.MethodDescriptor(
name='GetIamPolicy',
full_name='google.cloud.tasks.v2beta2.CloudTasks.GetIamPolicy',
index=8,
containing_service=None,
input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST,
output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002E\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\001*')),
),
_descriptor.MethodDescriptor(
name='SetIamPolicy',
full_name='google.cloud.tasks.v2beta2.CloudTasks.SetIamPolicy',
index=9,
containing_service=None,
input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST,
output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002E\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\001*')),
),
_descriptor.MethodDescriptor(
name='TestIamPermissions',
full_name='google.cloud.tasks.v2beta2.CloudTasks.TestIamPermissions',
index=10,
containing_service=None,
input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST,
output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002K\"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\001*')),
),
_descriptor.MethodDescriptor(
name='ListTasks',
full_name='google.cloud.tasks.v2beta2.CloudTasks.ListTasks',
index=11,
containing_service=None,
input_type=_LISTTASKSREQUEST,
output_type=_LISTTASKSRESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0029\0227/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks')),
),
_descriptor.MethodDescriptor(
name='GetTask',
full_name='google.cloud.tasks.v2beta2.CloudTasks.GetTask',
index=12,
containing_service=None,
input_type=_GETTASKREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0029\0227/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}')),
),
_descriptor.MethodDescriptor(
name='CreateTask',
full_name='google.cloud.tasks.v2beta2.CloudTasks.CreateTask',
index=13,
containing_service=None,
input_type=_CREATETASKREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002<\"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\001*')),
),
_descriptor.MethodDescriptor(
name='DeleteTask',
full_name='google.cloud.tasks.v2beta2.CloudTasks.DeleteTask',
index=14,
containing_service=None,
input_type=_DELETETASKREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0029*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}')),
),
_descriptor.MethodDescriptor(
name='LeaseTasks',
full_name='google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks',
index=15,
containing_service=None,
input_type=_LEASETASKSREQUEST,
output_type=_LEASETASKSRESPONSE,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002B\"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\001*')),
),
_descriptor.MethodDescriptor(
name='AcknowledgeTask',
full_name='google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask',
index=16,
containing_service=None,
input_type=_ACKNOWLEDGETASKREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\001*')),
),
_descriptor.MethodDescriptor(
name='RenewLease',
full_name='google.cloud.tasks.v2beta2.CloudTasks.RenewLease',
index=17,
containing_service=None,
input_type=_RENEWLEASEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002G\"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\001*')),
),
_descriptor.MethodDescriptor(
name='CancelLease',
full_name='google.cloud.tasks.v2beta2.CloudTasks.CancelLease',
index=18,
containing_service=None,
input_type=_CANCELLEASEREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\001*')),
),
_descriptor.MethodDescriptor(
name='RunTask',
full_name='google.cloud.tasks.v2beta2.CloudTasks.RunTask',
index=19,
containing_service=None,
input_type=_RUNTASKREQUEST,
output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK,
options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002@\";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\001*')),
),
])
_sym_db.RegisterServiceDescriptor(_CLOUDTASKS)
DESCRIPTOR.services_by_name['CloudTasks'] = _CLOUDTASKS
# @@protoc_insertion_point(module_scope)
| apache-2.0 |
calvinleenyc/zulip | zproject/test_settings.py | 7 | 4212 | from __future__ import absolute_import
import os
# test_settings.py works differently from
# dev_settings.py/prod_settings.py; it actually is directly referenced
# by the test suite as DJANGO_SETTINGS_MODULE and imports settings.py
# directly and then hacks up the values that are different for the
# test suite. As will be explained, this is kinda messy and probably
# we'd be better off switching it to work more like dev_settings.py,
# but for now, this is what we have.
#
# An important downside of the test_settings.py approach is that if we
# want to change any settings that settings.py then computes
# additional settings from (e.g. EXTERNAL_HOST), we need to do a hack
# like the below line(s) before we import from settings, for
# transmitting the value of EXTERNAL_HOST to dev_settings.py so that
# it can be set there, at the right place in the settings.py flow.
# Ick.
if os.getenv("EXTERNAL_HOST") is None:
os.environ["EXTERNAL_HOST"] = "testserver"
from .settings import *
DATABASES["default"] = {"NAME": "zulip_test",
"USER": "zulip_test",
"PASSWORD": LOCAL_DATABASE_PASSWORD,
"HOST": "localhost",
"SCHEMA": "zulip",
"ENGINE": "django.db.backends.postgresql_psycopg2",
"TEST_NAME": "django_zulip_tests",
"OPTIONS": {"connection_factory": TimeTrackingConnection },}
if USING_PGROONGA:
# We need to have "pgroonga" schema before "pg_catalog" schema in
# the PostgreSQL search path, because "pgroonga" schema overrides
# the "@@" operator from "pg_catalog" schema, and "pg_catalog"
# schema is searched first if not specified in the search path.
# See also: http://www.postgresql.org/docs/current/static/runtime-config-client.html
pg_options = '-c search_path=%(SCHEMA)s,zulip,public,pgroonga,pg_catalog' % \
DATABASES['default']
DATABASES['default']['OPTIONS']['options'] = pg_options
# In theory this should just go in zproject/settings.py inside the `if
# PIPELINE_ENABLED` statement, but because zproject/settings.py is processed
# first, we have to add it here as a hack.
JS_SPECS['app']['source_filenames'].append('js/bundle.js')
if "TORNADO_SERVER" in os.environ:
# This covers the Casper test suite case
TORNADO_SERVER = os.environ["TORNADO_SERVER"]
else:
# This covers the backend test suite case
TORNADO_SERVER = None
CAMO_URI = 'https://external-content.zulipcdn.net/'
CAMO_KEY = 'dummy'
# Decrease the get_updates timeout to 1 second.
# This allows CasperJS to proceed quickly to the next test step.
POLL_TIMEOUT = 1000
# Don't use the real message log for tests
EVENT_LOG_DIR = '/tmp/zulip-test-event-log'
# Print our emails rather than sending them
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
# The test suite uses EmailAuthBackend
AUTHENTICATION_BACKENDS += ('zproject.backends.EmailAuthBackend',)
# Configure Google Oauth2
GOOGLE_OAUTH2_CLIENT_ID = "test_client_id"
# Makes testing LDAP backend require less mocking
AUTH_LDAP_ALWAYS_UPDATE_USER = False
TEST_SUITE = True
RATE_LIMITING = False
# Don't use rabbitmq from the test suite -- the user_profile_ids for
# any generated queue elements won't match those being used by the
# real app.
USING_RABBITMQ = False
# Disable the tutorial because it confuses the client tests.
TUTORIAL_ENABLED = False
# Disable use of memcached for caching
CACHES['database'] = {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'LOCATION': 'zulip-database-test-cache',
'TIMEOUT': 3600,
'CONN_MAX_AGE': 600,
'OPTIONS': {
'MAX_ENTRIES': 100000
}
}
# Enable file:/// hyperlink support by default in tests
ENABLE_FILE_LINKS = True
LOGGING['loggers']['zulip.requests']['level'] = 'CRITICAL'
LOGGING['loggers']['zulip.management']['level'] = 'CRITICAL'
LOCAL_UPLOADS_DIR = 'var/test_uploads'
S3_KEY = 'test-key'
S3_SECRET_KEY = 'test-secret-key'
S3_AUTH_UPLOADS_BUCKET = 'test-authed-bucket'
REALMS_HAVE_SUBDOMAINS = bool(os.getenv('REALMS_HAVE_SUBDOMAINS', False))
# Test Custom TOS template rendering
TERMS_OF_SERVICE = 'corporate/terms.md'
| apache-2.0 |
leeclemens/dnspython | examples/zonediff.py | 79 | 10711 | #!/usr/bin/env python
#
# Small library and commandline tool to do logical diffs of zonefiles
# ./zonediff -h gives you help output
#
# Requires dnspython to do all the heavy lifting
#
# (c)2009 Dennis Kaarsemaker <dennis@kaarsemaker.net>
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""See diff_zones.__doc__ for more information"""
__all__ = ['diff_zones', 'format_changes_plain', 'format_changes_html']
try:
import dns.zone
except ImportError:
import sys
sys.stderr.write("Please install dnspython")
sys.exit(1)
def diff_zones(zone1, zone2, ignore_ttl=False, ignore_soa=False):
"""diff_zones(zone1, zone2, ignore_ttl=False, ignore_soa=False) -> changes
Compares two dns.zone.Zone objects and returns a list of all changes
in the format (name, oldnode, newnode).
If ignore_ttl is true, a node will not be added to this list if the
only change is its TTL.
If ignore_soa is true, a node will not be added to this list if the
only changes is a change in a SOA Rdata set.
The returned nodes do include all Rdata sets, including unchanged ones.
"""
changes = []
for name in zone1:
name = str(name)
n1 = zone1.get_node(name)
n2 = zone2.get_node(name)
if not n2:
changes.append((str(name), n1, n2))
elif _nodes_differ(n1, n2, ignore_ttl, ignore_soa):
changes.append((str(name), n1, n2))
for name in zone2:
n1 = zone1.get_node(name)
if not n1:
n2 = zone2.get_node(name)
changes.append((str(name), n1, n2))
return changes
def _nodes_differ(n1, n2, ignore_ttl, ignore_soa):
if ignore_soa or not ignore_ttl:
# Compare datasets directly
for r in n1.rdatasets:
if ignore_soa and r.rdtype == dns.rdatatype.SOA:
continue
if r not in n2.rdatasets:
return True
if not ignore_ttl:
return r.ttl != n2.find_rdataset(r.rdclass, r.rdtype).ttl
for r in n2.rdatasets:
if ignore_soa and r.rdtype == dns.rdatatype.SOA:
continue
if r not in n1.rdatasets:
return True
else:
return n1 != n2
def format_changes_plain(oldf, newf, changes, ignore_ttl=False):
"""format_changes(oldfile, newfile, changes, ignore_ttl=False) -> str
Given 2 filenames and a list of changes from diff_zones, produce diff-like
output. If ignore_ttl is True, TTL-only changes are not displayed"""
ret = "--- %s\n+++ %s\n" % (oldf, newf)
for name, old, new in changes:
ret += "@ %s\n" % name
if not old:
for r in new.rdatasets:
ret += "+ %s\n" % str(r).replace('\n','\n+ ')
elif not new:
for r in old.rdatasets:
ret += "- %s\n" % str(r).replace('\n','\n+ ')
else:
for r in old.rdatasets:
if r not in new.rdatasets or (r.ttl != new.find_rdataset(r.rdclass, r.rdtype).ttl and not ignore_ttl):
ret += "- %s\n" % str(r).replace('\n','\n+ ')
for r in new.rdatasets:
if r not in old.rdatasets or (r.ttl != old.find_rdataset(r.rdclass, r.rdtype).ttl and not ignore_ttl):
ret += "+ %s\n" % str(r).replace('\n','\n+ ')
return ret
def format_changes_html(oldf, newf, changes, ignore_ttl=False):
"""format_changes(oldfile, newfile, changes, ignore_ttl=False) -> str
Given 2 filenames and a list of changes from diff_zones, produce nice html
output. If ignore_ttl is True, TTL-only changes are not displayed"""
ret = '''<table class="zonediff">
<thead>
<tr>
<th> </th>
<th class="old">%s</th>
<th class="new">%s</th>
</tr>
</thead>
<tbody>\n''' % (oldf, newf)
for name, old, new in changes:
ret += ' <tr class="rdata">\n <td class="rdname">%s</td>\n' % name
if not old:
for r in new.rdatasets:
ret += ' <td class="old"> </td>\n <td class="new">%s</td>\n' % str(r).replace('\n','<br />')
elif not new:
for r in old.rdatasets:
ret += ' <td class="old">%s</td>\n <td class="new"> </td>\n' % str(r).replace('\n','<br />')
else:
ret += ' <td class="old">'
for r in old.rdatasets:
if r not in new.rdatasets or (r.ttl != new.find_rdataset(r.rdclass, r.rdtype).ttl and not ignore_ttl):
ret += str(r).replace('\n','<br />')
ret += '</td>\n'
ret += ' <td class="new">'
for r in new.rdatasets:
if r not in old.rdatasets or (r.ttl != old.find_rdataset(r.rdclass, r.rdtype).ttl and not ignore_ttl):
ret += str(r).replace('\n','<br />')
ret += '</td>\n'
ret += ' </tr>\n'
return ret + ' </tbody>\n</table>'
# Make this module usable as a script too.
if __name__ == '__main__':
import optparse
import subprocess
import sys
import traceback
usage = """%prog zonefile1 zonefile2 - Show differences between zones in a diff-like format
%prog [--git|--bzr|--rcs] zonefile rev1 [rev2] - Show differences between two revisions of a zonefile
The differences shown will be logical differences, not textual differences.
"""
p = optparse.OptionParser(usage=usage)
p.add_option('-s', '--ignore-soa', action="store_true", default=False, dest="ignore_soa",
help="Ignore SOA-only changes to records")
p.add_option('-t', '--ignore-ttl', action="store_true", default=False, dest="ignore_ttl",
help="Ignore TTL-only changes to Rdata")
p.add_option('-T', '--traceback', action="store_true", default=False, dest="tracebacks",
help="Show python tracebacks when errors occur")
p.add_option('-H', '--html', action="store_true", default=False, dest="html",
help="Print HTML output")
p.add_option('-g', '--git', action="store_true", default=False, dest="use_git",
help="Use git revisions instead of real files")
p.add_option('-b', '--bzr', action="store_true", default=False, dest="use_bzr",
help="Use bzr revisions instead of real files")
p.add_option('-r', '--rcs', action="store_true", default=False, dest="use_rcs",
help="Use rcs revisions instead of real files")
opts, args = p.parse_args()
opts.use_vc = opts.use_git or opts.use_bzr or opts.use_rcs
def _open(what, err):
if isinstance(what, basestring):
# Open as normal file
try:
return open(what, 'rb')
except:
sys.stderr.write(err + "\n")
if opts.tracebacks:
traceback.print_exc()
else:
# Must be a list, open subprocess
try:
proc = subprocess.Popen(what, stdout=subprocess.PIPE)
proc.wait()
if proc.returncode == 0:
return proc.stdout
sys.stderr.write(err + "\n")
except:
sys.stderr.write(err + "\n")
if opts.tracebacks:
traceback.print_exc()
if not opts.use_vc and len(args) != 2:
p.print_help()
sys.exit(64)
if opts.use_vc and len(args) not in (2,3):
p.print_help()
sys.exit(64)
# Open file desriptors
if not opts.use_vc:
oldn, newn = args
else:
if len(args) == 3:
filename, oldr, newr = args
oldn = "%s:%s" % (oldr, filename)
newn = "%s:%s" % (newr, filename)
else:
filename, oldr = args
newr = None
oldn = "%s:%s" % (oldr, filename)
newn = filename
old, new = None, None
oldz, newz = None, None
if opts.use_bzr:
old = _open(["bzr", "cat", "-r" + oldr, filename],
"Unable to retrieve revision %s of %s" % (oldr, filename))
if newr != None:
new = _open(["bzr", "cat", "-r" + newr, filename],
"Unable to retrieve revision %s of %s" % (newr, filename))
elif opts.use_git:
old = _open(["git", "show", oldn],
"Unable to retrieve revision %s of %s" % (oldr, filename))
if newr != None:
new = _open(["git", "show", newn],
"Unable to retrieve revision %s of %s" % (newr, filename))
elif opts.use_rcs:
old = _open(["co", "-q", "-p", "-r" + oldr, filename],
"Unable to retrieve revision %s of %s" % (oldr, filename))
if newr != None:
new = _open(["co", "-q", "-p", "-r" + newr, filename],
"Unable to retrieve revision %s of %s" % (newr, filename))
if not opts.use_vc:
old = _open(oldn, "Unable to open %s" % oldn)
if not opts.use_vc or newr == None:
new = _open(newn, "Unable to open %s" % newn)
if not old or not new:
sys.exit(65)
# Parse the zones
try:
oldz = dns.zone.from_file(old, origin = '.', check_origin=False)
except dns.exception.DNSException:
sys.stderr.write("Incorrect zonefile: %s\n", old)
if opts.tracebacks:
traceback.print_exc()
try:
newz = dns.zone.from_file(new, origin = '.', check_origin=False)
except dns.exception.DNSException:
sys.stderr.write("Incorrect zonefile: %s\n" % new)
if opts.tracebacks:
traceback.print_exc()
if not oldz or not newz:
sys.exit(65)
changes = diff_zones(oldz, newz, opts.ignore_ttl, opts.ignore_soa)
changes.sort()
if not changes:
sys.exit(0)
if opts.html:
print format_changes_html(oldn, newn, changes, opts.ignore_ttl)
else:
print format_changes_plain(oldn, newn, changes, opts.ignore_ttl)
sys.exit(1)
| isc |
stansonhealth/ansible-modules-core | cloud/amazon/route53.py | 40 | 22225 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: route53
version_added: "1.3"
short_description: add or delete entries in Amazons Route53 DNS service
description:
- Creates and deletes DNS records in Amazons Route53 service
options:
command:
description:
- Specifies the action to take.
required: true
choices: [ 'get', 'create', 'delete' ]
zone:
description:
- The DNS zone to modify
required: true
hosted_zone_id:
description:
- The Hosted Zone ID of the DNS zone to modify
required: false
version_added: "2.0"
default: null
record:
description:
- The full DNS record to create or delete
required: true
ttl:
description:
- The TTL to give the new record
required: false
default: 3600 (one hour)
type:
description:
- The type of DNS record to create
required: true
choices: [ 'A', 'CNAME', 'MX', 'AAAA', 'TXT', 'PTR', 'SRV', 'SPF', 'NS', 'SOA' ]
alias:
description:
- Indicates if this is an alias record.
required: false
version_added: "1.9"
default: False
choices: [ 'True', 'False' ]
alias_hosted_zone_id:
description:
- The hosted zone identifier.
required: false
version_added: "1.9"
default: null
alias_evaluate_target_health:
description:
- Whether or not to evaluate an alias target health. Useful for aliases to Elastic Load Balancers.
required: false
version_added: "2.1"
default: false
value:
description:
- The new value when creating a DNS record. Multiple comma-spaced values are allowed for non-alias records. When deleting a record all values for the record must be specified or Route53 will not delete it.
required: false
default: null
overwrite:
description:
- Whether an existing record should be overwritten on create if values do not match
required: false
default: null
retry_interval:
description:
- In the case that route53 is still servicing a prior request, this module will wait and try again after this many seconds. If you have many domain names, the default of 500 seconds may be too long.
required: false
default: 500
private_zone:
description:
- If set to true, the private zone matching the requested name within the domain will be used if there are both public and private zones. The default is to use the public zone.
required: false
default: false
version_added: "1.9"
identifier:
description:
- Have to be specified for Weighted, latency-based and failover resource record sets only. An identifier
that differentiates among multiple resource record sets that have the
same combination of DNS name and type.
required: false
default: null
version_added: "2.0"
weight:
description:
- Weighted resource record sets only. Among resource record sets that
have the same combination of DNS name and type, a value that
determines what portion of traffic for the current resource record set
is routed to the associated location.
required: false
default: null
version_added: "2.0"
region:
description:
- Latency-based resource record sets only Among resource record sets
that have the same combination of DNS name and type, a value that
determines which region this should be associated with for the
latency-based routing
required: false
default: null
version_added: "2.0"
health_check:
description:
- Health check to associate with this record
required: false
default: null
version_added: "2.0"
failover:
description:
- Failover resource record sets only. Whether this is the primary or
secondary resource record set. Allowed values are PRIMARY and SECONDARY
required: false
default: null
version_added: "2.0"
vpc_id:
description:
- "When used in conjunction with private_zone: true, this will only modify records in the private hosted zone attached to this VPC."
- This allows you to have multiple private hosted zones, all with the same name, attached to different VPCs.
required: false
default: null
version_added: "2.0"
wait:
description:
- Wait until the changes have been replicated to all Amazon Route 53 DNS servers.
required: false
default: no
version_added: "2.1"
wait_timeout:
description:
- How long to wait for the changes to be replicated, in seconds.
required: false
default: 300
version_added: "2.1"
author:
- "Bruce Pennypacker (@bpennypacker)"
- "Mike Buzzetti <mike.buzzetti@gmail.com>"
extends_documentation_fragment: aws
'''
# FIXME: the command stuff should have a more state like configuration alias -- MPD
EXAMPLES = '''
# Add new.foo.com as an A record with 3 IPs and wait until the changes have been replicated
- route53:
command: create
zone: foo.com
record: new.foo.com
type: A
ttl: 7200
value: 1.1.1.1,2.2.2.2,3.3.3.3
wait: yes
# Retrieve the details for new.foo.com
- route53:
command: get
zone: foo.com
record: new.foo.com
type: A
register: rec
# Delete new.foo.com A record using the results from the get command
- route53:
command: delete
zone: foo.com
record: "{{ rec.set.record }}"
ttl: "{{ rec.set.ttl }}"
type: "{{ rec.set.type }}"
value: "{{ rec.set.value }}"
# Add an AAAA record. Note that because there are colons in the value
# that the entire parameter list must be quoted:
- route53:
command: "create"
zone: "foo.com"
record: "localhost.foo.com"
type: "AAAA"
ttl: "7200"
value: "::1"
# Add a SRV record with multiple fields for a service on port 22222
# For more information on SRV records see:
# https://en.wikipedia.org/wiki/SRV_record
- route53:
command: "create"
"zone": "foo.com"
"record": "_example-service._tcp.foo.com"
"type": "SRV"
"value": ["0 0 22222 host1.foo.com", "0 0 22222 host2.foo.com"]
# Add a TXT record. Note that TXT and SPF records must be surrounded
# by quotes when sent to Route 53:
- route53:
command: "create"
zone: "foo.com"
record: "localhost.foo.com"
type: "TXT"
ttl: "7200"
value: '"bar"'
# Add an alias record that points to an Amazon ELB:
- route53:
command=create
zone=foo.com
record=elb.foo.com
type=A
value="{{ elb_dns_name }}"
alias=True
alias_hosted_zone_id="{{ elb_zone_id }}"
# Retrieve the details for elb.foo.com
- route53:
command: get
zone: foo.com
record: elb.foo.com
type: A
register: rec
# Delete an alias record using the results from the get command
- route53:
command: delete
zone: foo.com
record: "{{ rec.set.record }}"
ttl: "{{ rec.set.ttl }}"
type: "{{ rec.set.type }}"
value: "{{ rec.set.value }}"
alias: True
alias_hosted_zone_id: "{{ rec.set.alias_hosted_zone_id }}"
# Add an alias record that points to an Amazon ELB and evaluates it health:
- route53:
command=create
zone=foo.com
record=elb.foo.com
type=A
value="{{ elb_dns_name }}"
alias=True
alias_hosted_zone_id="{{ elb_zone_id }}"
alias_evaluate_target_health=True
# Add an AAAA record with Hosted Zone ID. Note that because there are colons in the value
# that the entire parameter list must be quoted:
- route53:
command: "create"
zone: "foo.com"
hosted_zone_id: "Z2AABBCCDDEEFF"
record: "localhost.foo.com"
type: "AAAA"
ttl: "7200"
value: "::1"
# Add an AAAA record with Hosted Zone ID. Note that because there are colons in the value
# that the entire parameter list must be quoted:
- route53:
command: "create"
zone: "foo.com"
hosted_zone_id: "Z2AABBCCDDEEFF"
record: "localhost.foo.com"
type: "AAAA"
ttl: "7200"
value: "::1"
# Use a routing policy to distribute traffic:
- route53:
command: "create"
zone: "foo.com"
record: "www.foo.com"
type: "CNAME"
value: "host1.foo.com"
ttl: 30
# Routing policy
identifier: "host1@www"
weight: 100
health_check: "d994b780-3150-49fd-9205-356abdd42e75"
'''
MINIMUM_BOTO_VERSION = '2.28.0'
WAIT_RETRY_SLEEP = 5 # how many seconds to wait between propagation status polls
import time
import distutils.version
try:
import boto
import boto.ec2
from boto import route53
from boto.route53 import Route53Connection
from boto.route53.record import Record, ResourceRecordSets
from boto.route53.status import Status
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class TimeoutError(Exception):
pass
def get_zone_by_name(conn, module, zone_name, want_private, zone_id, want_vpc_id):
"""Finds a zone by name or zone_id"""
for zone in conn.get_zones():
# only save this zone id if the private status of the zone matches
# the private_zone_in boolean specified in the params
private_zone = module.boolean(zone.config.get('PrivateZone', False))
if private_zone == want_private and ((zone.name == zone_name and zone_id == None) or zone.id.replace('/hostedzone/', '') == zone_id):
if want_vpc_id:
# NOTE: These details aren't available in other boto methods, hence the necessary
# extra API call
zone_details = conn.get_hosted_zone(zone.id)['GetHostedZoneResponse']
# this is to deal with this boto bug: https://github.com/boto/boto/pull/2882
if isinstance(zone_details['VPCs'], dict):
if zone_details['VPCs']['VPC']['VPCId'] == want_vpc_id:
return zone
else: # Forward compatibility for when boto fixes that bug
if want_vpc_id in [v['VPCId'] for v in zone_details['VPCs']]:
return zone
else:
return zone
return None
def commit(changes, retry_interval, wait, wait_timeout):
"""Commit changes, but retry PriorRequestNotComplete errors."""
result = None
retry = 10
while True:
try:
retry -= 1
result = changes.commit()
break
except boto.route53.exception.DNSServerError as e:
code = e.body.split("<Code>")[1]
code = code.split("</Code>")[0]
if code != 'PriorRequestNotComplete' or retry < 0:
raise e
time.sleep(float(retry_interval))
if wait:
timeout_time = time.time() + wait_timeout
connection = changes.connection
change = result['ChangeResourceRecordSetsResponse']['ChangeInfo']
status = Status(connection, change)
while status.status != 'INSYNC' and time.time() < timeout_time:
time.sleep(WAIT_RETRY_SLEEP)
status.update()
if time.time() >= timeout_time:
raise TimeoutError()
return result
# Shamelessly copied over from https://git.io/vgmDG
IGNORE_CODE = 'Throttling'
MAX_RETRIES=5
def invoke_with_throttling_retries(function_ref, *argv):
retries=0
while True:
try:
retval=function_ref(*argv)
return retval
except boto.exception.BotoServerError as e:
if e.code != IGNORE_CODE or retries==MAX_RETRIES:
raise e
time.sleep(5 * (2**retries))
retries += 1
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
command = dict(choices=['get', 'create', 'delete'], required=True),
zone = dict(required=True),
hosted_zone_id = dict(required=False, default=None),
record = dict(required=True),
ttl = dict(required=False, type='int', default=3600),
type = dict(choices=['A', 'CNAME', 'MX', 'AAAA', 'TXT', 'PTR', 'SRV', 'SPF', 'NS', 'SOA'], required=True),
alias = dict(required=False, type='bool'),
alias_hosted_zone_id = dict(required=False),
alias_evaluate_target_health = dict(required=False, type='bool', default=False),
value = dict(required=False),
overwrite = dict(required=False, type='bool'),
retry_interval = dict(required=False, default=500),
private_zone = dict(required=False, type='bool', default=False),
identifier = dict(required=False, default=None),
weight = dict(required=False, type='int'),
region = dict(required=False),
health_check = dict(required=False),
failover = dict(required=False,choices=['PRIMARY','SECONDARY']),
vpc_id = dict(required=False),
wait = dict(required=False, type='bool', default=False),
wait_timeout = dict(required=False, type='int', default=300),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
if distutils.version.StrictVersion(boto.__version__) < distutils.version.StrictVersion(MINIMUM_BOTO_VERSION):
module.fail_json(msg='Found boto in version %s, but >= %s is required' % (boto.__version__, MINIMUM_BOTO_VERSION))
command_in = module.params.get('command')
zone_in = module.params.get('zone').lower()
hosted_zone_id_in = module.params.get('hosted_zone_id')
ttl_in = module.params.get('ttl')
record_in = module.params.get('record').lower()
type_in = module.params.get('type')
value_in = module.params.get('value')
alias_in = module.params.get('alias')
alias_hosted_zone_id_in = module.params.get('alias_hosted_zone_id')
alias_evaluate_target_health_in = module.params.get('alias_evaluate_target_health')
retry_interval_in = module.params.get('retry_interval')
private_zone_in = module.params.get('private_zone')
identifier_in = module.params.get('identifier')
weight_in = module.params.get('weight')
region_in = module.params.get('region')
health_check_in = module.params.get('health_check')
failover_in = module.params.get('failover')
vpc_id_in = module.params.get('vpc_id')
wait_in = module.params.get('wait')
wait_timeout_in = module.params.get('wait_timeout')
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
value_list = ()
if type(value_in) is str:
if value_in:
value_list = sorted([s.strip() for s in value_in.split(',')])
elif type(value_in) is list:
value_list = sorted(value_in)
if zone_in[-1:] != '.':
zone_in += "."
if record_in[-1:] != '.':
record_in += "."
if command_in == 'create' or command_in == 'delete':
if not value_in:
module.fail_json(msg = "parameter 'value' required for create/delete")
elif alias_in:
if len(value_list) != 1:
module.fail_json(msg = "parameter 'value' must contain a single dns name for alias create/delete")
elif not alias_hosted_zone_id_in:
module.fail_json(msg = "parameter 'alias_hosted_zone_id' required for alias create/delete")
elif ( weight_in!=None or region_in!=None or failover_in!=None ) and identifier_in==None:
module.fail_json(msg= "If you specify failover, region or weight you must also specify identifier")
if command_in == 'create':
if ( weight_in!=None or region_in!=None or failover_in!=None ) and identifier_in==None:
module.fail_json(msg= "If you specify failover, region or weight you must also specify identifier")
elif ( weight_in==None and region_in==None and failover_in==None ) and identifier_in!=None:
module.fail_json(msg= "You have specified identifier which makes sense only if you specify one of: weight, region or failover.")
if vpc_id_in and not private_zone_in:
module.fail_json(msg="parameter 'private_zone' must be true when specifying parameter"
" 'vpc_id'")
# connect to the route53 endpoint
try:
conn = Route53Connection(**aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg = e.error_message)
# Find the named zone ID
zone = get_zone_by_name(conn, module, zone_in, private_zone_in, hosted_zone_id_in, vpc_id_in)
# Verify that the requested zone is already defined in Route53
if zone is None:
errmsg = "Zone %s does not exist in Route53" % zone_in
module.fail_json(msg = errmsg)
record = {}
found_record = False
wanted_rset = Record(name=record_in, type=type_in, ttl=ttl_in,
identifier=identifier_in, weight=weight_in, region=region_in,
health_check=health_check_in, failover=failover_in)
for v in value_list:
if alias_in:
wanted_rset.set_alias(alias_hosted_zone_id_in, v, alias_evaluate_target_health_in)
else:
wanted_rset.add_value(v)
sets = conn.get_all_rrsets(zone.id, name=record_in, type=type_in, identifier=identifier_in)
for rset in sets:
# Due to a bug in either AWS or Boto, "special" characters are returned as octals, preventing round
# tripping of things like * and @.
decoded_name = rset.name.replace(r'\052', '*')
decoded_name = decoded_name.replace(r'\100', '@')
#Need to save this changes in rset, because of comparing rset.to_xml() == wanted_rset.to_xml() in next block
rset.name = decoded_name
if identifier_in is not None:
identifier_in = str(identifier_in)
if rset.type == type_in and decoded_name.lower() == record_in.lower() and rset.identifier == identifier_in:
found_record = True
record['zone'] = zone_in
record['type'] = rset.type
record['record'] = decoded_name
record['ttl'] = rset.ttl
record['value'] = ','.join(sorted(rset.resource_records))
record['values'] = sorted(rset.resource_records)
if hosted_zone_id_in:
record['hosted_zone_id'] = hosted_zone_id_in
record['identifier'] = rset.identifier
record['weight'] = rset.weight
record['region'] = rset.region
record['failover'] = rset.failover
record['health_check'] = rset.health_check
if hosted_zone_id_in:
record['hosted_zone_id'] = hosted_zone_id_in
if rset.alias_dns_name:
record['alias'] = True
record['value'] = rset.alias_dns_name
record['values'] = [rset.alias_dns_name]
record['alias_hosted_zone_id'] = rset.alias_hosted_zone_id
record['alias_evaluate_target_health'] = rset.alias_evaluate_target_health
else:
record['alias'] = False
record['value'] = ','.join(sorted(rset.resource_records))
record['values'] = sorted(rset.resource_records)
if command_in == 'create' and rset.to_xml() == wanted_rset.to_xml():
module.exit_json(changed=False)
break
if command_in == 'get':
if type_in == 'NS':
ns = record['values']
else:
# Retrieve name servers associated to the zone.
ns = conn.get_zone(zone_in).get_nameservers()
module.exit_json(changed=False, set=record, nameservers=ns)
if command_in == 'delete' and not found_record:
module.exit_json(changed=False)
changes = ResourceRecordSets(conn, zone.id)
if command_in == 'create' or command_in == 'delete':
if command_in == 'create' and found_record:
if not module.params['overwrite']:
module.fail_json(msg = "Record already exists with different value. Set 'overwrite' to replace it")
command = 'UPSERT'
else:
command = command_in.upper()
changes.add_change_record(command, wanted_rset)
try:
result = invoke_with_throttling_retries(commit, changes, retry_interval_in, wait_in, wait_timeout_in)
except boto.route53.exception.DNSServerError as e:
txt = e.body.split("<Message>")[1]
txt = txt.split("</Message>")[0]
if "but it already exists" in txt:
module.exit_json(changed=False)
else:
module.fail_json(msg = txt)
except TimeoutError:
module.fail_json(msg='Timeout waiting for changes to replicate')
module.exit_json(changed=True)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
dotKom/onlineweb4 | apps/gsuite/mail_syncer/signals.py | 1 | 4337 | import json
import logging
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db.models.signals import pre_save
from django.dispatch import receiver
from googleapiclient.errors import HttpError
from apps.gsuite.mail_syncer.utils import insert_email_into_g_suite_group
from .main import remove_g_suite_user_from_group
User = get_user_model()
MAILING_LIST_USER_FIELDS_TO_LIST_NAME = {
'infomail': 'info',
'jobmail': 'oppdrag',
}
def _get_error_message_from_httperror(err):
"""
Parses an HTTP Error from the Google API and returns the error message.
:param err: An error from the Google API.
:type err: HttpError
:return: The error message.
:rtype: str
"""
json_error = json.loads(str(err.content.decode()))
return json_error.get('error', {}).get('message', '')
def insert_user_into_group_pass_if_already_member(domain, group, email):
"""
Subscribes an email address to a mailing list. If the email address is already subscribed, silently pass.
:param domain: The G Suite domain in question.
:type domain: str
:param group: The group to add an email to.
:type group: str
:param email: The email address to add to the group.
:type email: str
:return: Nothing. Raises exception if failed.
:rtype None
"""
logger = logging.getLogger(__name__)
try:
insert_email_into_g_suite_group(domain, group, email)
except HttpError as err:
error_message = _get_error_message_from_httperror(err)
if 'Member already exists' in error_message:
logger.warning('Email address "{email}" was already subscribed to mailing list "{list}"!'.format(
email=email, list=group
))
else:
raise err
def remove_user_from_group_pass_if_not_subscribed(domain, group, email):
"""
Unsubscribes an email address from a mailing list. If the email address is not already subscribed, silently pass.
:param domain: The G Suite domain in question.
:type domain: str
:param group: The group to add an email to.
:type group: str
:param email: The email address to add to the group.
:type email: str
:return: Nothing. Raises exception if failed.
:rtype None
"""
logger = logging.getLogger(__name__)
try:
remove_g_suite_user_from_group(domain, group, email)
except HttpError as err:
error_message = _get_error_message_from_httperror(err)
if 'Resource Not Found' in error_message:
logger.warning('Email address "{email}" was not subscribed to mailing list "{list}"!'.format(
email=email, list=group
))
else:
raise err
def get_updated_mailing_list_fields(user):
updated_mailing_lists = []
try:
# Get the current user and find out what's about to change
current_user = User.objects.get(pk=user.pk)
if user.infomail != current_user.infomail:
updated_mailing_lists.append('infomail')
if user.jobmail != current_user.jobmail:
updated_mailing_lists.append('jobmail')
except User.DoesNotExist:
# Find out which mailing lists are opted into if the user did not previously exist
for mailing_list in MAILING_LIST_USER_FIELDS_TO_LIST_NAME.keys():
if getattr(user, mailing_list, False):
updated_mailing_lists.append(mailing_list)
return updated_mailing_lists
@receiver(pre_save, sender=User)
def toggle_mailing_lists(sender, instance, **kwargs):
update_fields = get_updated_mailing_list_fields(instance)
if update_fields:
for mailing_list in MAILING_LIST_USER_FIELDS_TO_LIST_NAME.keys():
if mailing_list not in update_fields:
# Skips toggle if mailing list field not changed.
continue
domain = settings.OW4_GSUITE_SYNC.get('DOMAIN')
g_suite_mailing_list = MAILING_LIST_USER_FIELDS_TO_LIST_NAME[mailing_list]
if getattr(instance, mailing_list):
insert_user_into_group_pass_if_already_member(domain, g_suite_mailing_list, instance.get_email().email)
else:
remove_user_from_group_pass_if_not_subscribed(domain, g_suite_mailing_list, instance.get_email().email)
| mit |
rd37/horizon | openstack_dashboard/dashboards/project/routers/extensions/routerrules/tables.py | 9 | 2317 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013, Big Switch Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.project.routers.extensions.routerrules\
import rulemanager
from horizon import tables
LOG = logging.getLogger(__name__)
class AddRouterRule(tables.LinkAction):
name = "create"
verbose_name = _("Add Router Rule")
url = "horizon:project:routers:addrouterrule"
classes = ("ajax-modal", "btn-create")
def get_link_url(self, datum=None):
router_id = self.table.kwargs['router_id']
return reverse(self.url, args=(router_id,))
class RemoveRouterRule(tables.DeleteAction):
data_type_singular = _("Router Rule")
data_type_plural = _("Router Rules")
failure_url = 'horizon:project:routers:detail'
def delete(self, request, obj_id):
router_id = self.table.kwargs['router_id']
rulemanager.remove_rules(request, [obj_id],
router_id=router_id)
class RouterRulesTable(tables.DataTable):
source = tables.Column("source", verbose_name=_("Source CIDR"))
destination = tables.Column("destination",
verbose_name=_("Destination CIDR"))
action = tables.Column("action", verbose_name=_("Action"))
nexthops = tables.Column("nexthops", verbose_name=_("Next Hops"))
def get_object_display(self, rule):
return "(%(action)s) %(source)s -> %(destination)s" % rule
class Meta:
name = "routerrules"
verbose_name = _("Router Rules")
table_actions = (AddRouterRule, RemoveRouterRule)
row_actions = (RemoveRouterRule, )
| apache-2.0 |
nubark/odoo | addons/auth_signup/res_config.py | 43 | 1975 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import osv, fields
from openerp.tools.safe_eval import safe_eval
class base_config_settings(osv.TransientModel):
_inherit = 'base.config.settings'
_columns = {
'auth_signup_reset_password': fields.boolean('Enable password reset from Login page',
help="This allows users to trigger a password reset from the Login page."),
'auth_signup_uninvited': fields.boolean('Allow external users to sign up',
help="If unchecked, only invited users may sign up."),
'auth_signup_template_user_id': fields.many2one('res.users',
string='Template user for new users created through signup'),
}
def get_default_auth_signup_template_user_id(self, cr, uid, fields, context=None):
icp = self.pool.get('ir.config_parameter')
# we use safe_eval on the result, since the value of the parameter is a nonempty string
return {
'auth_signup_reset_password': safe_eval(icp.get_param(cr, uid, 'auth_signup.reset_password', 'False')),
'auth_signup_uninvited': safe_eval(icp.get_param(cr, uid, 'auth_signup.allow_uninvited', 'False')),
'auth_signup_template_user_id': safe_eval(icp.get_param(cr, uid, 'auth_signup.template_user_id', 'False')),
}
def set_auth_signup_template_user_id(self, cr, uid, ids, context=None):
config = self.browse(cr, uid, ids[0], context=context)
icp = self.pool.get('ir.config_parameter')
# we store the repr of the values, since the value of the parameter is a required string
icp.set_param(cr, uid, 'auth_signup.reset_password', repr(config.auth_signup_reset_password))
icp.set_param(cr, uid, 'auth_signup.allow_uninvited', repr(config.auth_signup_uninvited))
icp.set_param(cr, uid, 'auth_signup.template_user_id', repr(config.auth_signup_template_user_id.id))
| gpl-3.0 |
fedora-infra/fedmsg_meta_fedora_infrastructure | fedmsg_meta_fedora_infrastructure/conglomerators/meetbot/meetbot.py | 2 | 1856 | import fedmsg.meta.base
from fedmsg_meta_fedora_infrastructure.fasshim import avatar_url
class ByURL(fedmsg.meta.base.BaseConglomerator):
def can_handle(self, msg, **config):
return '.meetbot.' in msg['topic']
def get_title(self, ms):
for msg in ms:
title = msg['msg'].get('meeting_topic')
if title:
return title
return None
def merge(self, constituents, subject, **config):
ms = constituents # shorthand
usernames = self.list_to_series(
sum([list(m['msg']['attendees'].keys()) for m in ms], [])
)
channel = ms[0]['msg']['channel']
title = self.get_title(ms)
if title:
subtitle = self._('{usernames} participated in {title} in {channel}')
else:
subtitle = self._('{usernames} participated in a meeting in {channel}')
tmpl = self.produce_template(constituents, subject, **config)
tmpl['subtitle'] = subtitle.format(
usernames=usernames,
title=title,
channel=channel,
)
tmpl['subjective'] = tmpl['subtitle']
default = tmpl['icon']
# These are the only two keys that vary amongst our concrete children.
tmpl['secondary_icon'] = self.get_secondary_icon(constituents, default)
tmpl['link'] = self.get_link(constituents)
return tmpl
def matches(self, a, b, **config):
""" The events must be all about the same meetbot """
a, b = a['msg'], b['msg']
if a['url'] != b['url']:
return False
return True
def get_secondary_icon(self, constituents, default):
username = constituents[0]['msg']['owner']
return avatar_url(username)
def get_link(self, constituents):
return constituents[0]['msg']['url']
| lgpl-2.1 |
sebi-hgdata/ansible | lib/ansible/module_utils/openstack.py | 34 | 3309 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
def openstack_argument_spec():
# Consume standard OpenStack environment variables.
# This is mainly only useful for ad-hoc command line operation as
# in playbooks one would assume variables would be used appropriately
OS_AUTH_URL=os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/')
OS_PASSWORD=os.environ.get('OS_PASSWORD', None)
OS_REGION_NAME=os.environ.get('OS_REGION_NAME', None)
OS_USERNAME=os.environ.get('OS_USERNAME', 'admin')
OS_TENANT_NAME=os.environ.get('OS_TENANT_NAME', OS_USERNAME)
spec = dict(
login_username = dict(default=OS_USERNAME),
auth_url = dict(default=OS_AUTH_URL),
region_name = dict(default=OS_REGION_NAME),
availability_zone = dict(default=None),
)
if OS_PASSWORD:
spec['login_password'] = dict(default=OS_PASSWORD)
else:
spec['login_password'] = dict(required=True)
if OS_TENANT_NAME:
spec['login_tenant_name'] = dict(default=OS_TENANT_NAME)
else:
spec['login_tenant_name'] = dict(required=True)
return spec
def openstack_find_nova_addresses(addresses, ext_tag, key_name=None):
ret = []
for (k, v) in addresses.iteritems():
if key_name and k == key_name:
ret.extend([addrs['addr'] for addrs in v])
else:
for interface_spec in v:
if 'OS-EXT-IPS:type' in interface_spec and interface_spec['OS-EXT-IPS:type'] == ext_tag:
ret.append(interface_spec['addr'])
return ret
| gpl-3.0 |
AgataGibas/python101 | docs/bazy/sqlorm/ormpeewee.py | 2 | 1690 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
from peewee import *
if os.path.exists('test.db'):
os.remove('test.db')
# tworzymy instancję bazy używanej przez modele
baza = SqliteDatabase('test.db') # ':memory:'
# BazaModel to klasa bazowa dla klas Klasa i Uczen, które
# opisują rekordy tabel "klasa" i "uczen" oraz relacje między nimi
class BazaModel(Model):
class Meta:
database = baza
class Klasa(BazaModel):
nazwa = CharField(null=False)
profil = CharField(default='')
class Uczen(BazaModel):
imie = CharField(null=False)
nazwisko = CharField(null=False)
klasa = ForeignKeyField(Klasa, related_name='uczniowie')
baza.connect() # nawiązujemy połączenie z bazą
baza.create_tables([Klasa, Uczen], True) # tworzymy tabele
# dodajemy dwie klasy, jeżeli tabela jest pusta
if Klasa().select().count() == 0:
klasa = Klasa(nazwa='1A', profil='matematyczny')
klasa.save()
klasa = Klasa(nazwa='1B', profil='humanistyczny')
klasa.save()
# tworzymy instancję klasy Klasa reprezentującą klasę "1A"
klasa = Klasa.select().where(Klasa.nazwa == '1A').get()
uczen = Uczen(imie='Tomasz', nazwisko='Nowak', klasa=klasa)
uczen.save()
# odczytujemy dane z bazy
for uczen in Uczen.select().join(Klasa):
print uczen.id, uczen.imie, uczen.nazwisko, uczen.klasa.nazwa
print ""
# zmiana klasy ucznia o identyfikatorze 2
uczen = Uczen().select().join(Klasa).where(Uczen.nazwisko == 'Nowak').get()
uczen.klasa = Klasa.select().where(Klasa.nazwa == '1B').get()
uczen.save() # zapisanie zmian w bazie
# usunięcie ucznia o identyfikatorze 1
Uczen.select().where(Uczen.id == 1).get().delete_instance()
baza.close()
| mit |
fibbo/DIRAC | Core/scripts/dirac-distribution.py | 4 | 16439 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
########################################################################
# $HeadURL$
# File : dirac-distribution
# Author : Adria Casajus
########################################################################
"""
Create tarballs for a given DIRAC release
"""
__RCSID__ = "$Id$"
from DIRAC import S_OK, S_ERROR, gLogger
from DIRAC.Core.Base import Script
from DIRAC.Core.Utilities import List, File, Distribution, Platform, Subprocess, CFG
import sys, os, re, urllib2, tempfile, getpass, imp
try:
import hashlib as md5
except ImportError:
import md5
globalDistribution = Distribution.Distribution()
g_uploadCmd = {
'DIRAC' : "( cd %OUTLOCATION% ; tar -cf - *.tar.gz *.md5 *.cfg *.pdf *.html ) | ssh $USER@lxplus.cern.ch 'cd /afs/cern.ch/lhcb/distribution/DIRAC3/installSource && tar -xvf - && ls *.tar.gz > tars.list'",
'LHCb' : "( cd %OUTLOCATION% ; tar -cf - *.tar.gz *.md5 *.cfg *.pdf *.html ) | ssh $USER@lxplus.cern.ch 'cd /afs/cern.ch/lhcb/distribution/LHCbDirac_project && tar -xvf - && ls *.tar.gz > tars.list'",
'ILC' : "( cd %OUTLOCATION% ; tar -cf - *.tar.gz *.md5 *.cfg *.pdf *.html ) | ssh $USER@lxplus.cern.ch 'cd /afs/cern.ch/lhcb/distribution/DIRAC3/tars && tar -xvf - && ls *.tar.gz > tars.list'",
}
###
# Load release manager from dirac-install
##
diracInstallLocation = os.path.join( os.path.dirname( __file__ ), "dirac-install" )
if not os.path.isfile( diracInstallLocation ):
diracInstallLocation = os.path.join( os.path.dirname( __file__ ), "dirac-install.py" )
try:
diFile = open( diracInstallLocation, "r" )
DiracInstall = imp.load_module( "DiracInstall", diFile, diracInstallLocation, ( "", "r", imp.PY_SOURCE ) )
diFile.close()
except Exception, excp:
raise
gLogger.fatal( "Cannot find dirac-install! Aborting (%s)" % str( excp ) )
sys.exit( 1 )
##END OF LOAD
class Params:
def __init__( self ):
self.releasesToBuild = []
self.projectName = 'DIRAC'
self.debug = False
self.externalsBuildType = [ 'client' ]
self.ignoreExternals = False
self.forceExternals = False
self.ignorePackages = False
self.relcfg = False
self.externalsPython = '26'
self.destination = ""
self.externalsLocation = ""
self.makeJobs = 1
self.globalDefaults = ""
self.forcedLocations = {}
def setReleases( self, optionValue ):
self.releasesToBuild = List.fromChar( optionValue )
return S_OK()
def setProject( self, optionValue ):
self.projectName = optionValue
return S_OK()
def setDebug( self, optionValue ):
self.debug = True
return S_OK()
def setExternalsBuildType( self, optionValue ):
self.externalsBuildType = List.fromChar( optionValue )
return S_OK()
def setForceExternals( self, optionValue ):
self.forceExternals = True
return S_OK()
def setIgnoreExternals( self, optionValue ):
self.ignoreExternals = True
return S_OK()
def setDestination( self, optionValue ):
self.destination = optionValue
return S_OK()
def setPythonVersion( self, optionValue ):
self.externalsPython = optionValue
return S_OK()
def setIgnorePackages( self, optionValue ):
self.ignorePackages = True
return S_OK()
def setExternalsLocation( self, optionValue ):
self.externalsLocation = optionValue
return S_OK()
def setMakeJobs( self, optionValue ):
self.makeJobs = max( 1, int( optionValue ) )
return S_OK()
def setReleasesCFG( self, optionValue ):
self.relcfg = optionValue
return S_OK()
def setGlobalDefaults( self, value ):
self.globalDefaults = value
return S_OK()
def overWriteLocation( self, value ):
locSplit = value.split( ":" )
if len( locSplit ) < 2:
return S_ERROR( "Invalid location. It has to have format <moduleName>:<url> insteaf of %s" % value )
modName = locSplit[0]
location = ":".join( locSplit[1:] )
gLogger.notice( "Forcing location of %s to %s" % ( modName, location ) )
self.forcedLocations[ modName ] = location
return S_OK()
def registerSwitches( self ):
Script.registerSwitch( "r:", "releases=", "releases to build (mandatory, comma separated)", cliParams.setReleases )
Script.registerSwitch( "l:", "project=", "Project to build the release for (DIRAC by default)", cliParams.setProject )
Script.registerSwitch( "D:", "destination", "Destination where to build the tar files", cliParams.setDestination )
Script.registerSwitch( "i:", "pythonVersion", "Python version to use (25/26)", cliParams.setPythonVersion )
Script.registerSwitch( "P", "ignorePackages", "Do not make tars of python packages", cliParams.setIgnorePackages )
Script.registerSwitch( "C:", "relcfg=", "Use <file> as the releases.cfg", cliParams.setReleasesCFG )
Script.registerSwitch( "b", "buildExternals", "Force externals compilation even if already compiled", cliParams.setForceExternals )
Script.registerSwitch( "B", "ignoreExternals", "Skip externals compilation", cliParams.setIgnoreExternals )
Script.registerSwitch( "t:", "buildType=", "External type to build (client/server)", cliParams.setExternalsBuildType )
Script.registerSwitch( "x:", "externalsLocation=", "Use externals location instead of downloading them", cliParams.setExternalsLocation )
Script.registerSwitch( "j:", "makeJobs=", "Make jobs (default is 1)", cliParams.setMakeJobs )
Script.registerSwitch( 'M:', 'defaultsURL=', 'Where to retrieve the global defaults from', cliParams.setGlobalDefaults )
Script.registerSwitch( 'O:', 'overwriteLocation=', 'Force location of modules from where to make the release. Format <moduleName>:<url>', cliParams.overWriteLocation )
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'\nUsage:',
' %s [option|cfgfile] ...\n' % Script.scriptName ] ) )
class DistributionMaker:
def __init__( self, cliParams ):
self.cliParams = cliParams
self.relConf = DiracInstall.ReleaseConfig( projectName = cliParams.projectName,
globalDefaultsURL = cliParams.globalDefaults )
self.relConf.setDebugCB( gLogger.info )
self.relConf.loadProjectDefaults()
def isOK( self ):
if not self.cliParams.releasesToBuild:
gLogger.error( "Missing releases to build!" )
Script.showHelp()
return False
if not self.cliParams.destination:
self.cliParams.destination = tempfile.mkdtemp( 'DiracDist' )
else:
try:
os.makedirs( self.cliParams.destination )
except:
pass
gLogger.notice( "Will generate tarballs in %s" % self.cliParams.destination )
return True
def loadReleases( self ):
gLogger.notice( "Loading releases.cfg" )
return self.relConf.loadProjectRelease( self.cliParams.releasesToBuild, releaseMode = True, relLocation = self.cliParams.relcfg )
def createModuleTarballs( self ):
for version in self.cliParams.releasesToBuild:
result = self.__createReleaseTarballs( version )
if not result[ 'OK' ]:
return result
return S_OK()
def __createReleaseTarballs( self, releaseVersion ):
result = self.relConf.getModulesForRelease( releaseVersion )
if not result[ 'OK' ]:
return result
modsToTar = result[ 'Value' ]
for modName in modsToTar:
modVersion = modsToTar[ modName ]
dctArgs = [ '-A' ] #Leave a copy of the release notes outside the tarballs
#Version
dctArgs.append( "-n '%s'" % modName )
dctArgs.append( "-v '%s'" % modVersion )
gLogger.notice( "Creating tar for %s version %s" % ( modName, modVersion ) )
#Source
if modName in cliParams.forcedLocations:
location = cliParams.forcedLocations[ modName ]
gLogger.notice( "Source is forced to %s" % location )
dctArgs.append( "-u '%s'" % location )
else:
result = self.relConf.getModSource( releaseVersion, modName )
if not result[ 'OK' ]:
return result
modSrcTuple = result[ 'Value' ]
if modSrcTuple[0]:
logMsgVCS = modSrcTuple[0]
dctArgs.append( "-z '%s'" % modSrcTuple[0] )
else:
logMsgVCS = "autodiscover"
dctArgs.append( "-u '%s'" % modSrcTuple[1] )
gLogger.notice( "Sources will be retrieved from %s (%s)" % ( modSrcTuple[1], logMsgVCS ) )
#Tar destination
dctArgs.append( "-D '%s'" % self.cliParams.destination )
if cliParams.debug:
dctArgs.append( "-dd" )
#Script location discovery
scriptName = os.path.join( os.path.dirname( __file__ ), "dirac-create-distribution-tarball" )
if not os.path.isfile( scriptName ):
scriptName = os.path.join( os.path.dirname( __file__ ), "dirac-create-distribution-tarball.py" )
cmd = "'%s' %s" % ( scriptName, " ".join( dctArgs ) )
gLogger.verbose( "Executing %s" % cmd )
if os.system( cmd ) != 0:
return S_ERROR( "Failed creating tarball for module %s. Aborting" % modName )
gLogger.notice( "Tarball for %s version %s created" % ( modName, modVersion ) )
return S_OK()
def getAvailableExternals( self ):
packagesURL = "http://lhcbproject.web.cern.ch/lhcbproject/dist/DIRAC3/installSource/tars.list"
try:
remoteFile = urllib2.urlopen( packagesURL )
except urllib2.URLError:
gLogger.exception()
return []
remoteData = remoteFile.read()
remoteFile.close()
versionRE = re.compile( "Externals-([a-zA-Z]*)-([a-zA-Z0-9]*(?:-pre[0-9]+)*)-(.*)-(python[0-9]+)\.tar\.gz" )
availableExternals = []
for line in remoteData.split( "\n" ):
res = versionRE.search( line )
if res:
availableExternals.append( res.groups() )
return availableExternals
def createExternalsTarballs( self ):
extDone = []
for releaseVersion in self.cliParams.releasesToBuild:
if releaseVersion in extDone:
continue
if not self.tarExternals( releaseVersion ):
return False
extDone.append( releaseVersion )
return True
def tarExternals( self, releaseVersion ):
externalsVersion = self.relConf.getExtenalsVersion( releaseVersion )
platform = Platform.getPlatformString()
availableExternals = self.getAvailableExternals()
if not externalsVersion:
gLogger.notice( "Externals is not defined for release %s" % releaseVersion )
return False
for externalType in self.cliParams.externalsBuildType:
requestedExternals = ( externalType, externalsVersion, platform, 'python%s' % self.cliParams.externalsPython )
requestedExternalsString = "-".join( list( requestedExternals ) )
gLogger.notice( "Trying to compile %s externals..." % requestedExternalsString )
if not self.cliParams.forceExternals and requestedExternals in availableExternals:
gLogger.notice( "Externals %s is already compiled, skipping..." % ( requestedExternalsString ) )
continue
compileScript = os.path.join( os.path.dirname( __file__ ), "dirac-compile-externals" )
if not os.path.isfile( compileScript ):
compileScript = os.path.join( os.path.dirname( __file__ ), "dirac-compile-externals.py" )
compileTarget = os.path.join( self.cliParams.destination, platform )
cmdArgs = []
cmdArgs.append( "-D '%s'" % compileTarget )
cmdArgs.append( "-t '%s'" % externalType )
cmdArgs.append( "-v '%s'" % externalsVersion )
cmdArgs.append( "-i '%s'" % self.cliParams.externalsPython )
if cliParams.externalsLocation:
cmdArgs.append( "-e '%s'" % self.cliParams.externalsLocation )
if cliParams.makeJobs:
cmdArgs.append( "-j '%s'" % self.cliParams.makeJobs )
compileCmd = "%s %s" % ( compileScript, " ".join( cmdArgs ) )
gLogger.info( compileCmd )
if os.system( compileCmd ):
gLogger.error( "Error while compiling externals!" )
sys.exit( 1 )
tarfilePath = os.path.join( self.cliParams.destination, "Externals-%s.tar.gz" % ( requestedExternalsString ) )
result = Distribution.createTarball( tarfilePath,
compileTarget,
os.path.join( self.cliParams.destination, "mysql" ) )
if not result[ 'OK' ]:
gLogger.error( "Could not generate tarball for package %s" % requestedExternalsString, result[ 'Error' ] )
sys.exit( 1 )
os.system( "rm -rf '%s'" % compileTarget )
return True
def doTheMagic( self ):
if not distMaker.isOK():
gLogger.fatal( "There was an error with the release description" )
return False
result = distMaker.loadReleases()
if not result[ 'OK' ]:
gLogger.fatal( "There was an error when loading the release.cfg file: %s" % result[ 'Message' ] )
return False
#Module tars
if self.cliParams.ignorePackages:
gLogger.notice( "Skipping creating module tarballs" )
else:
result = self.createModuleTarballs()
if not result[ 'OK' ]:
gLogger.fatal( "There was a problem when creating the module tarballs: %s" % result[ 'Message' ] )
return False
#Externals
if self.cliParams.ignoreExternals or cliParams.projectName != "DIRAC":
gLogger.notice( "Skipping creating externals tarball" )
else:
if not self.createExternalsTarballs():
gLogger.fatal( "There was a problem when creating the Externals tarballs" )
return False
#Write the releases files
for relVersion in self.cliParams.releasesToBuild:
projectCFG = self.relConf.getReleaseCFG( self.cliParams.projectName, relVersion )
projectCFGData = projectCFG.toString() + "\n"
try:
relFile = file( os.path.join( self.cliParams.destination, "release-%s-%s.cfg" % ( self.cliParams.projectName, relVersion ) ), "w" )
relFile.write( projectCFGData )
relFile.close()
except Exception, exc:
gLogger.fatal( "Could not write the release info: %s" % str( exc ) )
return False
try:
relFile = file( os.path.join( self.cliParams.destination, "release-%s-%s.md5" % ( self.cliParams.projectName, relVersion ) ), "w" )
relFile.write( md5.md5( projectCFGData ).hexdigest() )
relFile.close()
except Exception, exc:
gLogger.fatal( "Could not write the release info: %s" % str( exc ) )
return False
#Check deps
if 'DIRAC' != self.cliParams.projectName:
deps = self.relConf.getReleaseDependencies( self.cliParams.projectName, relVersion )
if 'DIRAC' not in deps:
gLogger.notice( "Release %s doesn't depend on DIRAC. Check it's what you really want" % relVersion )
else:
gLogger.notice( "Release %s depends on DIRAC %s" % ( relVersion, deps[ 'DIRAC'] ) )
return True
def getUploadCmd( self ):
result = self.relConf.getUploadCommand()
upCmd = False
if not result['OK']:
if self.cliParams.projectName in g_uploadCmd:
upCmd = g_uploadCmd[ self.cliParams.projectName ]
else:
upCmd = result[ 'Value' ]
filesToCopy = []
for fileName in os.listdir( cliParams.destination ):
for ext in ( ".tar.gz", ".md5", ".cfg", ".html", ".pdf" ):
if fileName.find( ext ) == len( fileName ) - len( ext ):
filesToCopy.append( os.path.join( cliParams.destination, fileName ) )
outFiles = " ".join( filesToCopy )
outFileNames = " ".join( [ os.path.basename( filePath ) for filePath in filesToCopy ] )
if not upCmd:
return "Upload to your installation source:\n'%s'\n" % "' '".join( filesToCopy )
for inRep, outRep in ( ( "%OUTLOCATION%", self.cliParams.destination ),
( "%OUTFILES%", outFiles ),
( "%OUTFILENAMES%", outFileNames ) ):
upCmd = upCmd.replace( inRep, outRep )
return upCmd
if __name__ == "__main__":
cliParams = Params()
Script.disableCS()
Script.addDefaultOptionValue( "/DIRAC/Setup", "Dummy" )
cliParams.registerSwitches()
Script.parseCommandLine( ignoreErrors = False )
if Script.localCfg.getDebugMode():
cliParams.debug = True
distMaker = DistributionMaker( cliParams )
if not distMaker.doTheMagic():
sys.exit( 1 )
gLogger.notice( "Everything seems ok. Tarballs generated in %s" % cliParams.destination )
upCmd = distMaker.getUploadCmd()
gLogger.always( upCmd )
| gpl-3.0 |
urbanrace6/WarpCoin-8.7.2 | contrib/bitrpc/bitrpc.py | 1 | 7838 | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a warpcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a warpcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| mit |
Wojtechnology/Muzit | StreetMuse/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py | 2360 | 3778 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
# Note: This file is under the PSF license as the code comes from the python
# stdlib. http://docs.python.org/3/license.html
import re
__version__ = '3.4.0.2'
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
| apache-2.0 |
PfarrCh/openrsa | test/jacobi_testcase.py | 1 | 2317 | # -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2017 Christian Pfarr
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import unittest
from jacobi import Jacobi
class JacobiTestCase(unittest.TestCase):
def setUp(self):
self.a = 5
self.n_divider = 15
self.n_square_rest = 11
self.n_no_square_rest = 17
self.symbol_divider = 0
self.symbol_square_rest = 1
self.symbol_no_square_rest = -1
def tearDown(self):
del self.a
del self.n_divider
del self.n_square_rest
del self.n_no_square_rest
del self.symbol_divider
del self.symbol_square_rest
del self.symbol_no_square_rest
def test_symbol_divider(self):
self.assertTrue(Jacobi.symbol(self.a, self.n_divider) == self.symbol_divider,
"Jacobi Symbol cant test if n is divider of a")
def test_symbol_square_rest(self):
self.assertTrue(Jacobi.symbol(self.a, self.n_square_rest) == self.symbol_square_rest,
"Jacobi Symbol cant test if a is square rest of modulo n")
def test_symbol_no_square_rest(self):
self.assertTrue(Jacobi.symbol(self.a, self.n_no_square_rest) == self.symbol_no_square_rest,
"Jacobi Symbol cant test if a is no square rest of modulo n")
| mit |
karllessard/tensorflow | tensorflow/python/compiler/tensorrt/test/const_broadcast_test.py | 23 | 2640 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.compiler.tensorrt.test import tf_trt_integration_test_base as trt_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
class ConstBroadcastTest(trt_test.TfTrtIntegrationTestBase):
"""Test for Constant broadcasting in TF-TRT."""
def GraphFn(self, x):
"""Return the expected graph to convert."""
dtype = x.dtype
filt1 = constant_op.constant(
0.3, shape=(3, 3, 2, 1), dtype=dtype, name='filt1')
y1 = nn.conv2d(x, filt1, strides=[1, 1, 1, 1], padding='SAME', name='y1')
z1 = nn.relu(y1, name='z1')
filt2 = constant_op.constant(
0.3, shape=(3, 3, 1, 1), dtype=dtype, name='filt2')
y2 = nn.conv2d(z1, filt2, strides=[1, 1, 1, 1], padding='SAME', name='y2')
z2 = nn.relu(y2, name='z')
filt3 = constant_op.constant(
0.3, shape=(3, 3, 1, 1), dtype=dtype, name='filt3')
y3 = nn.conv2d(z2, filt3, strides=[1, 1, 1, 1], padding='SAME', name='y3')
return nn.relu(y3, name='output_0')
def GetParams(self):
return self.BuildParams(self.GraphFn, dtypes.float32, [[5, 12, 12, 2]],
[[5, 12, 12, 1]])
def ExpectedEnginesToBuild(self, run_params):
"""Return the expected engines to build."""
return ['TRTEngineOp_0']
def ExpectedAbsoluteTolerance(self, run_params):
"""The absolute tolerance to compare floating point results."""
return 1.e-04 if run_params.precision_mode == 'FP32' else 1.e-02
def ExpectedRelativeTolerance(self, run_params):
"""The relative tolerance to compare floating point results."""
return 1.e-04 if run_params.precision_mode == 'FP32' else 1.e-02
if __name__ == '__main__':
test.main()
| apache-2.0 |
unicri/edx-platform | common/lib/chem/chem/chemtools.py | 250 | 10721 | """This module originally includes functions for grading Vsepr problems.
Also, may be this module is the place for other chemistry-related grade functions. TODO: discuss it.
"""
import json
import unittest
import itertools
def vsepr_parse_user_answer(user_input):
"""
user_input is json generated by vsepr.js from dictionary.
There are must be only two keys in original user_input dictionary: "geometry" and "atoms".
Format: u'{"geometry": "AX3E0","atoms":{"c0": "B","p0": "F","p1": "B","p2": "F"}}'
Order of elements inside "atoms" subdict does not matters.
Return dict from parsed json.
"Atoms" subdict stores positions of atoms in molecule.
General types of positions:
c0 - central atom
p0..pN - peripheral atoms
a0..aN - axial atoms
e0..eN - equatorial atoms
Each position is dictionary key, i.e. user_input["atoms"]["c0"] is central atom, user_input["atoms"]["a0"] is one of axial atoms.
Special position only for AX6 (Octahedral) geometry:
e10, e12 - atom pairs opposite the central atom,
e20, e22 - atom pairs opposite the central atom,
e1 and e2 pairs lying crosswise in equatorial plane.
In user_input["atoms"] may be only 3 set of keys:
(c0,p0..pN),
(c0, a0..aN, e0..eN),
(c0, a0, a1, e10,e11,e20,e21) - if geometry is AX6.
"""
return json.loads(user_input)
def vsepr_build_correct_answer(geometry, atoms):
"""
geometry is string.
atoms is dict of atoms with proper positions.
Example:
correct_answer = vsepr_build_correct_answer(geometry="AX4E0", atoms={"c0": "N", "p0": "H", "p1": "(ep)", "p2": "H", "p3": "H"})
returns a dictionary composed from input values:
{'geometry': geometry, 'atoms': atoms}
"""
return {'geometry': geometry, 'atoms': atoms}
def vsepr_grade(user_input, correct_answer, convert_to_peripheral=False):
"""
This function does comparison between user_input and correct_answer.
Comparison is successful if all steps are successful:
1) geometries are equal
2) central atoms (index in dictionary 'c0') are equal
3):
In next steps there is comparing of corresponding subsets of atom positions: equatorial (e0..eN), axial (a0..aN) or peripheral (p0..pN)
If convert_to_peripheral is True, then axial and equatorial positions are converted to peripheral.
This means that user_input from:
"atoms":{"c0": "Br","a0": "test","a1": "(ep)","e10": "H","e11": "(ep)","e20": "H","e21": "(ep)"}}' after parsing to json
is converted to:
{"c0": "Br", "p0": "(ep)", "p1": "test", "p2": "H", "p3": "H", "p4": "(ep)", "p6": "(ep)"}
i.e. aX and eX -> pX
So if converted, p subsets are compared,
if not a and e subsets are compared
If all subsets are equal, grade succeeds.
There is also one special case for AX6 geometry.
In this case user_input["atoms"] contains special 3 symbol keys: e10, e12, e20, and e21.
Correct answer for this geometry can be of 3 types:
1) c0 and peripheral
2) c0 and axial and equatorial
3) c0 and axial and equatorial-subset-1 (e1X) and equatorial-subset-2 (e2X)
If correct answer is type 1 or 2, then user_input is converted from type 3 to type 2 (or to type 1 if convert_to_peripheral is True)
If correct_answer is type 3, then we done special case comparison. We have 3 sets of atoms positions both in user_input and correct_answer: axial, eq-1 and eq-2.
Answer will be correct if these sets are equals for one of permutations. For example, if :
user_axial = correct_eq-1
user_eq-1 = correct-axial
user_eq-2 = correct-eq-2
"""
if user_input['geometry'] != correct_answer['geometry']:
return False
if user_input['atoms']['c0'] != correct_answer['atoms']['c0']:
return False
if convert_to_peripheral:
# convert user_input from (a,e,e1,e2) to (p)
# correct_answer must be set in (p) using this flag
c0 = user_input['atoms'].pop('c0')
user_input['atoms'] = {'p' + str(i): v for i, v in enumerate(user_input['atoms'].values())}
user_input['atoms']['c0'] = c0
# special case for AX6
if 'e10' in correct_answer['atoms']: # need check e1x, e2x symmetry for AX6..
a_user = {}
a_correct = {}
for ea_position in ['a', 'e1', 'e2']: # collecting positions:
a_user[ea_position] = [v for k, v in user_input['atoms'].items() if k.startswith(ea_position)]
a_correct[ea_position] = [v for k, v in correct_answer['atoms'].items() if k.startswith(ea_position)]
correct = [sorted(a_correct['a'])] + [sorted(a_correct['e1'])] + [sorted(a_correct['e2'])]
for permutation in itertools.permutations(['a', 'e1', 'e2']):
if correct == [sorted(a_user[permutation[0]])] + [sorted(a_user[permutation[1]])] + [sorted(a_user[permutation[2]])]:
return True
return False
else: # no need to check e1x,e2x symmetry - convert them to ex
if 'e10' in user_input['atoms']: # e1x exists, it is AX6.. case
e_index = 0
for k, v in user_input['atoms'].items():
if len(k) == 3: # e1x
del user_input['atoms'][k]
user_input['atoms']['e' + str(e_index)] = v
e_index += 1
# common case
for ea_position in ['p', 'a', 'e']:
# collecting atoms:
a_user = [v for k, v in user_input['atoms'].items() if k.startswith(ea_position)]
a_correct = [v for k, v in correct_answer['atoms'].items() if k.startswith(ea_position)]
# print a_user, a_correct
if len(a_user) != len(a_correct):
return False
if sorted(a_user) != sorted(a_correct):
return False
return True
class Test_Grade(unittest.TestCase):
''' test grade function '''
def test_incorrect_geometry(self):
correct_answer = vsepr_build_correct_answer(geometry="AX4E0", atoms={"c0": "N", "p0": "H", "p1": "(ep)", "p2": "H", "p3": "H"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX3E0","atoms":{"c0": "B","p0": "F","p1": "B","p2": "F"}}')
self.assertFalse(vsepr_grade(user_answer, correct_answer))
def test_correct_answer_p(self):
correct_answer = vsepr_build_correct_answer(geometry="AX4E0", atoms={"c0": "N", "p0": "H", "p1": "(ep)", "p2": "H", "p3": "H"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX4E0","atoms":{"c0": "N","p0": "H","p1": "(ep)","p2": "H", "p3": "H"}}')
self.assertTrue(vsepr_grade(user_answer, correct_answer))
def test_correct_answer_ae(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "a0": "test", "a1": "(ep)", "e0": "H", "e1": "H", "e2": "(ep)", "e3": "(ep)"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "Br","a0": "test","a1": "(ep)","e10": "H","e11": "H","e20": "(ep)","e21": "(ep)"}}')
self.assertTrue(vsepr_grade(user_answer, correct_answer))
def test_correct_answer_ae_convert_to_p_but_input_not_in_p(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "a0": "(ep)", "a1": "test", "e0": "H", "e1": "H", "e2": "(ep)", "e3": "(ep)"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "Br","a0": "test","a1": "(ep)","e10": "H","e11": "(ep)","e20": "H","e21": "(ep)"}}')
self.assertFalse(vsepr_grade(user_answer, correct_answer, convert_to_peripheral=True))
def test_correct_answer_ae_convert_to_p(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "p0": "(ep)", "p1": "test", "p2": "H", "p3": "H", "p4": "(ep)", "p6": "(ep)"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "Br","a0": "test","a1": "(ep)","e10": "H","e11": "(ep)","e20": "H","e21": "(ep)"}}')
self.assertTrue(vsepr_grade(user_answer, correct_answer, convert_to_peripheral=True))
def test_correct_answer_e1e2_in_a(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "a0": "(ep)", "a1": "(ep)", "e10": "H", "e11": "H", "e20": "H", "e21": "H"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "Br","a0": "(ep)","a1": "(ep)","e10": "H","e11": "H","e20": "H","e21": "H"}}')
self.assertTrue(vsepr_grade(user_answer, correct_answer))
def test_correct_answer_e1e2_in_e1(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "a0": "(ep)", "a1": "(ep)", "e10": "H", "e11": "H", "e20": "H", "e21": "H"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "Br","a0": "H","a1": "H","e10": "(ep)","e11": "(ep)","e20": "H","e21": "H"}}')
self.assertTrue(vsepr_grade(user_answer, correct_answer))
def test_correct_answer_e1e2_in_e2(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "a0": "(ep)", "a1": "(ep)", "e10": "H", "e11": "H", "e20": "H", "e21": "H"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "Br","a0": "H","a1": "H","e10": "H","e11": "H","e20": "(ep)","e21": "(ep)"}}')
self.assertTrue(vsepr_grade(user_answer, correct_answer))
def test_incorrect_answer_e1e2(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "a0": "(ep)", "a1": "(ep)", "e10": "H", "e11": "H", "e20": "H", "e21": "H"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "Br","a0": "H","a1": "H","e10": "(ep)","e11": "H","e20": "H","e21": "(ep)"}}')
self.assertFalse(vsepr_grade(user_answer, correct_answer))
def test_incorrect_c0(self):
correct_answer = vsepr_build_correct_answer(geometry="AX6E0", atoms={"c0": "Br", "a0": "(ep)", "a1": "test", "e0": "H", "e1": "H", "e2": "H", "e3": "(ep)"})
user_answer = vsepr_parse_user_answer(u'{"geometry": "AX6E0","atoms":{"c0": "H","a0": "test","a1": "(ep)","e0": "H","e1": "H","e2": "(ep)","e3": "H"}}')
self.assertFalse(vsepr_grade(user_answer, correct_answer))
def suite():
testcases = [Test_Grade]
suites = []
for testcase in testcases:
suites.append(unittest.TestLoader().loadTestsFromTestCase(testcase))
return unittest.TestSuite(suites)
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())
| agpl-3.0 |
askulkarni2/ansible-modules-core | cloud/rackspace/rax_cdb.py | 104 | 7169 | #!/usr/bin/python -tt
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_cdb
short_description: create/delete or resize a Rackspace Cloud Databases instance
description:
- creates / deletes or resize a Rackspace Cloud Databases instance
and optionally waits for it to be 'running'. The name option needs to be
unique since it's used to identify the instance.
version_added: "1.8"
options:
name:
description:
- Name of the databases server instance
default: null
flavor:
description:
- flavor to use for the instance 1 to 6 (i.e. 512MB to 16GB)
default: 1
volume:
description:
- Volume size of the database 1-150GB
default: 2
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
wait:
description:
- wait for the instance to be in state 'running' before returning
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
author: "Simon JAILLET (@jails)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Build a Cloud Databases
gather_facts: False
tasks:
- name: Server build request
local_action:
module: rax_cdb
credentials: ~/.raxpub
region: IAD
name: db-server1
flavor: 1
volume: 2
wait: yes
state: present
register: rax_db_server
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def find_instance(name):
cdb = pyrax.cloud_databases
instances = cdb.list()
if instances:
for instance in instances:
if instance.name == name:
return instance
return False
def save_instance(module, name, flavor, volume, wait, wait_timeout):
for arg, value in dict(name=name, flavor=flavor,
volume=volume).iteritems():
if not value:
module.fail_json(msg='%s is required for the "rax_cdb"'
' module' % arg)
if not (volume >= 1 and volume <= 150):
module.fail_json(msg='volume is required to be between 1 and 150')
cdb = pyrax.cloud_databases
flavors = []
for item in cdb.list_flavors():
flavors.append(item.id)
if not (flavor in flavors):
module.fail_json(msg='unexisting flavor reference "%s"' % str(flavor))
changed = False
instance = find_instance(name)
if not instance:
action = 'create'
try:
instance = cdb.create(name=name, flavor=flavor, volume=volume)
except Exception, e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
else:
action = None
if instance.volume.size != volume:
action = 'resize'
if instance.volume.size > volume:
module.fail_json(changed=False, action=action,
msg='The new volume size must be larger than '
'the current volume size',
cdb=rax_to_dict(instance))
instance.resize_volume(volume)
changed = True
if int(instance.flavor.id) != flavor:
action = 'resize'
pyrax.utils.wait_until(instance, 'status', 'ACTIVE',
attempts=wait_timeout)
instance.resize(flavor)
changed = True
if wait:
pyrax.utils.wait_until(instance, 'status', 'ACTIVE',
attempts=wait_timeout)
if wait and instance.status != 'ACTIVE':
module.fail_json(changed=changed, action=action,
cdb=rax_to_dict(instance),
msg='Timeout waiting for "%s" databases instance to '
'be created' % name)
module.exit_json(changed=changed, action=action, cdb=rax_to_dict(instance))
def delete_instance(module, name, wait, wait_timeout):
if not name:
module.fail_json(msg='name is required for the "rax_cdb" module')
changed = False
instance = find_instance(name)
if not instance:
module.exit_json(changed=False, action='delete')
try:
instance.delete()
except Exception, e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
if wait:
pyrax.utils.wait_until(instance, 'status', 'SHUTDOWN',
attempts=wait_timeout)
if wait and instance.status != 'SHUTDOWN':
module.fail_json(changed=changed, action='delete',
cdb=rax_to_dict(instance),
msg='Timeout waiting for "%s" databases instance to '
'be deleted' % name)
module.exit_json(changed=changed, action='delete',
cdb=rax_to_dict(instance))
def rax_cdb(module, state, name, flavor, volume, wait, wait_timeout):
# act on the state
if state == 'present':
save_instance(module, name, flavor, volume, wait, wait_timeout)
elif state == 'absent':
delete_instance(module, name, wait, wait_timeout)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
flavor=dict(type='int', default=1),
volume=dict(type='int', default=2),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
name = module.params.get('name')
flavor = module.params.get('flavor')
volume = module.params.get('volume')
state = module.params.get('state')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
rax_cdb(module, state, name, flavor, volume, wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
main()
| gpl-3.0 |
huhuibin147/osu-qqbot | new/tree.py | 1 | 4724 | from math import log
import operator
def createDataSet():
dataSet = [[1, 1, 'yes'],
[1, 1, 'yes'],
[1, 0, 'no'],
[0, 1, 'no'],
[0, 1, 'no']]
labels = ['no surfacing','flippers']
return dataSet, labels
def calcShannonEnt(dataSet):
numEntries = len(dataSet)
labelCounts = {}
#统计分类出现
for featVec in dataSet:
currentLabel = featVec[-1]
if currentLabel not in labelCounts.keys():
labelCounts[currentLabel] = 0
labelCounts[currentLabel] += 1
shannonEnt = 0
#计算信息期望
for key in labelCounts:
#分类概率
prob = labelCounts[key]/numEntries
shannonEnt -= prob * log(prob, 2)
return shannonEnt
def splitDataSet(dataSet, axis, value):
#axis特征,value特征值
retDataSet = []
for featVec in dataSet:
#去除这个特征,返回符合这个特征值的集合
if featVec[axis] == value:
reducedFeatVec = featVec[:axis]
reducedFeatVec.extend(featVec[axis+1:])
retDataSet.append(reducedFeatVec)
return retDataSet
def chooseBestFeatureToSplit(dataSet):
#取类别数量
numFeatures = len(dataSet[0]) - 1
baseEntropy = calcShannonEnt(dataSet)
bestInfoGain = 0.0
bestFeature = -1
#遍历分类
for i in range(numFeatures):
#整个分类的特征值列表
featList = [example[i] for example in dataSet]
uniqueVals = set(featList)
newEntropy = 0.0
#取特征,分别计算熵
for value in uniqueVals:
#分离特征集合
subDataSet = splitDataSet(dataSet, i, value)
#特征分类概率
prob = len(subDataSet)/len(dataSet)
newEntropy += prob * calcShannonEnt(subDataSet)
#信息增益是熵的减少
infoGain = baseEntropy - newEntropy
if(infoGain > bestInfoGain):
bestInfoGain = infoGain
bestFeature = i
return bestFeature
def majorityCnt(classList):
classCount = {}
for vote in classList:
if vote not in classCount.keys():
classCount[vote] = 0
classCount[vote] += 1
sortedClassCount = sorted(classCount.items(),key=lambda k:k[1],reverse=True)
return sortedClassCount[0][0]
def createTree(dataSet,labels):
#分类列表
classList = [example[-1] for example in dataSet]
#出口1,分类相同
if classList.count(classList[0]) == len(classList):
return classList[0]
#出口2,遍历完特征,还存在不同分类,多数决定
if len(dataSet[0]) == 1:
return majorityCnt(classList)
bestFeat = chooseBestFeatureToSplit(dataSet)
bestFeatLabel = labels[bestFeat]
myTree = {bestFeatLabel:{}}
del(labels[bestFeat])
#最优特征的值列表
featValues = [example[bestFeat] for example in dataSet]
uniqueVals = set(featValues)
#分支
for value in uniqueVals:
subLabels = labels[:]
myTree[bestFeatLabel][value] = createTree(splitDataSet(dataSet,bestFeat,value),subLabels)
return myTree
def classify(inputTree,featLabels,testVec):
#{'no surfacing':{0:'no',1:{'flippers':{0:'no',1:'yes'}}}}
#获得树的第一特征
firstStr = list(inputTree.keys())[0]
#第一特征对应的字典
secondDict = inputTree[firstStr]
#找到特征对应测试的下标
featIndex = featLabels.index(firstStr)
for key in secondDict.keys():
if testVec[featIndex] == key:
if type(secondDict[key]) == 'dict':
classLabels = classify(secondDict[key],featLabels,testVec)
else:
classLabels = secondDict[key]
return classLabels
def storeTree(inputTree, filename):
import pickle
fw = open(filename,'w')
pickle.dump(inputTree,fw)
fw.close()
def grabTree(filename):
import pickle
fr = open(filename)
return pickle.load(fr)
#########################
# import matplotlib.pyplot as plt
# decisionNode = dict(boxstyle='sawtooth', fc='0.8')
# leafNode = dict(boxstyle='round4', fc='0.8')
# arrow_args = dict(arrowstyle='<-')
def plotNode(nodeTxt, centerPt, parentPt, nodeType):
createPlot.ax1.annotate(nodeTxt, xy=parentPt, xycoords='axes fraction', xytext=centerPt,
textcoords='axes fraction',va='center',ha='center',bbox=nodeType,arrowprops=arrow_args)
def createPlot():
fig = plt.figure(1, facecolor='white')
fig.clf()
createPlot.ax1 = plt.subplot(111, frameon=False)
plotNode('a decision node', (0.5,0.1), (0.1,0.5), decisionNode)
plotNode('a leaf node', (0.8,0.1), (0.3,0.8),leafNode)
plt.show()
#########################
| gpl-2.0 |
mvaled/sentry | src/sentry/ingest_consumer.py | 1 | 7407 | from __future__ import absolute_import
import logging
import msgpack
import signal
from contextlib import contextmanager
from django.conf import settings
from django.core.cache import cache
import confluent_kafka as kafka
from sentry.coreapi import cache_key_from_project_id_and_event_id
from sentry.cache import default_cache
from sentry.tasks.store import preprocess_event
logger = logging.getLogger(__name__)
class ConsumerType(object):
"""
Defines the types of ingestion consumers
"""
Events = "events" # consumes simple events ( from the Events topic)
Attachments = "attachments" # consumes events with attachments ( from the Attachments topic)
Transactions = "transactions" # consumes transaction events ( from the Transactions topic)
@staticmethod
def get_topic_name(consumer_type):
if consumer_type == ConsumerType.Events:
return settings.KAFKA_INGEST_EVENTS
elif consumer_type == ConsumerType.Attachments:
return settings.KAFKA_INGEST_ATTACHMENTS
elif consumer_type == ConsumerType.Transactions:
return settings.KAFKA_INGEST_TRANSACTIONS
raise ValueError("Invalid consumer type", consumer_type)
def _create_consumer(consumer_group, consumer_type, initial_offset_reset):
"""
Creates a kafka consumer based on the
:param consumer_group:
:return:
"""
topic_name = ConsumerType.get_topic_name(consumer_type)
cluster_name = settings.KAFKA_TOPICS[topic_name]["cluster"]
bootstrap_servers = settings.KAFKA_CLUSTERS[cluster_name]["bootstrap.servers"]
consumer_configuration = {
"bootstrap.servers": bootstrap_servers,
"group.id": consumer_group,
"enable.auto.commit": "false", # we commit manually
"enable.auto.offset.store": "true", # we let the broker keep count of the current offset (when committing)
"enable.partition.eof": "false", # stop EOF errors when we read all messages in the topic
"default.topic.config": {"auto.offset.reset": initial_offset_reset},
}
return kafka.Consumer(consumer_configuration)
@contextmanager
def set_termination_request_handlers(handler):
# hook the new handlers
old_sigint = signal.signal(signal.SIGINT, handler)
old_sigterm = signal.signal(signal.SIGTERM, handler)
try:
# run the code inside the with context ( with the hooked handler)
yield
finally:
# restore the old handlers when exiting the with context
signal.signal(signal.SIGINT, old_sigint)
signal.signal(signal.SIGTERM, old_sigterm)
def run_ingest_consumer(
commit_batch_size,
consumer_group,
consumer_type,
max_fetch_time_seconds,
initial_offset_reset="latest",
is_shutdown_requested=lambda: False,
):
"""
Handles events coming via a kafka queue.
The events should have already been processed (normalized... ) upstream (by Relay).
:param commit_batch_size: the number of message the consumer will try to process/commit in one loop
:param consumer_group: kafka consumer group name
:param consumer_type: an enumeration defining the types of ingest messages see `ConsumerType`
:param max_fetch_time_seconds: the maximum number of seconds a consume operation will be blocked waiting
for the specified commit_batch_size number of messages to appear in the queue before it returns. At the
end of the specified time the consume operation will return however many messages it has ( including
an empty array if no new messages are available).
:param initial_offset_reset: offset reset policy when there's no available offset for the consumer
:param is_shutdown_requested: Callable[[],bool] predicate checked after each loop, if it returns
True the forwarder stops (by default is lambda: False). In normal operation this should be left to default.
For unit testing it offers a way to cleanly stop the forwarder after some particular condition is achieved.
"""
logger.debug("Starting ingest-consumer...")
consumer = _create_consumer(consumer_group, consumer_type, initial_offset_reset)
consumer.subscribe([ConsumerType.get_topic_name(consumer_type)])
# setup a flag to mark termination signals received, see below why we use an array
termination_signal_received = [False]
def termination_signal_handler(_sig_id, _frame):
"""
Function to use a hook for SIGINT and SIGTERM
This signal handler only remembers that the signal was emitted.
The batch processing loop detects that the signal was emitted
and stops once the whole batch is processed.
"""
# We need to use an array so that terminal_signal_received is not a
# local variable assignment, but a lookup in the clojure's outer scope.
termination_signal_received[0] = True
with set_termination_request_handlers(termination_signal_handler):
while not (is_shutdown_requested() or termination_signal_received[0]):
# get up to commit_batch_size messages
messages = consumer.consume(
num_messages=commit_batch_size, timeout=max_fetch_time_seconds
)
for message in messages:
message_error = message.error()
if message_error is not None:
logger.error(
"Received message with error on %s, error:'%s'",
consumer_type,
message_error,
)
raise ValueError(
"Bad message received from consumer", consumer_type, message_error
)
message = msgpack.unpackb(message.value(), use_list=False)
body = message["payload"]
start_time = float(message["start_time"])
event_id = message["event_id"]
project_id = message["project_id"]
# check that we haven't already processed this event (a previous instance of the forwarder
# died before it could commit the event queue offset)
deduplication_key = "ev:{}:{}".format(project_id, event_id)
if cache.get(deduplication_key) is not None:
logger.warning(
"pre-process-forwarder detected a duplicated event"
" with id:%s for project:%s.",
event_id,
project_id,
)
continue
cache_key = cache_key_from_project_id_and_event_id(
project_id=project_id, event_id=event_id
)
cache_timeout = 3600
default_cache.set(cache_key, body, cache_timeout, raw=True)
preprocess_event.delay(
cache_key=cache_key, start_time=start_time, event_id=event_id
)
# remember for an 1 hour that we saved this event (deduplication protection)
cache.set(deduplication_key, "", 3600)
if len(messages) > 0:
# we have read some messages in the previous consume, commit the offset
consumer.commit(asynchronous=False)
logger.debug("Closing ingest-consumer %s...", consumer_type)
consumer.close()
| bsd-3-clause |
michaelhowden/eden | modules/s3/s3rest.py | 5 | 73202 | # -*- coding: utf-8 -*-
""" S3 RESTful API
@copyright: 2009-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3Request",
"S3Method",
"s3_request",
)
import datetime
import os
import re
import sys
import time
import types
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
from gluon import *
# Here are dependencies listed for reference:
#from gluon.globals import current
#from gluon.html import URL
#from gluon.http import HTTP, redirect
from gluon.storage import Storage
from s3datetime import s3_parse_datetime
from s3resource import S3Resource
from s3utils import s3_get_extension, s3_remove_last_record_id, s3_store_last_record_id
REGEX_FILTER = re.compile(".+\..+|.*\(.+\).*")
DEBUG = False
if DEBUG:
print >> sys.stderr, "S3REST: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
# =============================================================================
class S3Request(object):
"""
Class to handle RESTful requests
"""
INTERACTIVE_FORMATS = ("html", "iframe", "popup", "dl")
DEFAULT_REPRESENTATION = "html"
# -------------------------------------------------------------------------
def __init__(self,
prefix=None,
name=None,
r=None,
c=None,
f=None,
args=None,
vars=None,
extension=None,
get_vars=None,
post_vars=None,
http=None):
"""
Constructor
@param prefix: the table name prefix
@param name: the table name
@param c: the controller prefix
@param f: the controller function
@param args: list of request arguments
@param vars: dict of request variables
@param extension: the format extension (representation)
@param get_vars: the URL query variables (overrides vars)
@param post_vars: the POST variables (overrides vars)
@param http: the HTTP method (GET, PUT, POST, or DELETE)
@note: all parameters fall back to the attributes of the
current web2py request object
"""
# Common settings
# XSLT Paths
self.XSLT_PATH = "static/formats"
self.XSLT_EXTENSION = "xsl"
# Attached files
self.files = Storage()
# Allow override of controller/function
self.controller = c or self.controller
self.function = f or self.function
if "." in self.function:
self.function, ext = self.function.split(".", 1)
if extension is None:
extension = ext
if c or f:
auth = current.auth
if not auth.permission.has_permission("read",
c=self.controller,
f=self.function):
auth.permission.fail()
# Allow override of request args/vars
if args is not None:
if isinstance(args, (list, tuple)):
self.args = args
else:
self.args = [args]
if get_vars is not None:
self.get_vars = get_vars
self.vars = get_vars.copy()
if post_vars is not None:
self.vars.update(post_vars)
else:
self.vars.update(self.post_vars)
if post_vars is not None:
self.post_vars = post_vars
if get_vars is None:
self.vars = post_vars.copy()
self.vars.update(self.get_vars)
if get_vars is None and post_vars is None and vars is not None:
self.vars = vars
self.get_vars = vars
self.post_vars = Storage()
self.extension = extension or current.request.extension
self.http = http or current.request.env.request_method
# Main resource attributes
if r is not None:
if not prefix:
prefix = r.prefix
if not name:
name = r.name
self.prefix = prefix or self.controller
self.name = name or self.function
# Parse the request
self.__parse()
self.custom_action = None
get_vars = Storage(self.get_vars)
# Interactive representation format?
self.interactive = self.representation in self.INTERACTIVE_FORMATS
# Show information on deleted records?
include_deleted = False
if self.representation == "xml" and "include_deleted" in get_vars:
include_deleted = True
if "components" in get_vars:
cnames = get_vars["components"]
if isinstance(cnames, list):
cnames = ",".join(cnames)
cnames = cnames.split(",")
if len(cnames) == 1 and cnames[0].lower() == "none":
cnames = []
else:
cnames = None
# Append component ID to the URL query
component_name = self.component_name
component_id = self.component_id
if component_name and component_id:
varname = "%s.id" % component_name
if varname in get_vars:
var = get_vars[varname]
if not isinstance(var, (list, tuple)):
var = [var]
var.append(component_id)
get_vars[varname] = var
else:
get_vars[varname] = component_id
# Define the target resource
_filter = current.response.s3.filter
components = component_name
if components is None:
components = cnames
if self.method == "review":
approved, unapproved = False, True
else:
approved, unapproved = True, False
tablename = "%s_%s" % (self.prefix, self.name)
self.resource = S3Resource(tablename,
id=self.id,
filter=_filter,
vars=get_vars,
components=components,
approved=approved,
unapproved=unapproved,
include_deleted=include_deleted,
context=True,
filter_component=component_name,
)
self.tablename = self.resource.tablename
table = self.table = self.resource.table
# Try to load the master record
self.record = None
uid = self.vars.get("%s.uid" % self.name)
if self.id or uid and not isinstance(uid, (list, tuple)):
# Single record expected
self.resource.load()
if len(self.resource) == 1:
self.record = self.resource.records().first()
_id = table._id.name
self.id = self.record[_id]
s3_store_last_record_id(self.tablename, self.id)
else:
raise KeyError(current.ERROR.BAD_RECORD)
# Identify the component
self.component = None
if self.component_name:
c = self.resource.components.get(self.component_name)
if c:
self.component = c
else:
error = "%s not a component of %s" % (self.component_name,
self.resource.tablename)
raise AttributeError(error)
# Identify link table and link ID
self.link = None
self.link_id = None
if self.component is not None:
self.link = self.component.link
if self.link and self.id and self.component_id:
self.link_id = self.link.link_id(self.id, self.component_id)
if self.link_id is None:
raise KeyError(current.ERROR.BAD_RECORD)
# Store method handlers
self._handler = Storage()
set_handler = self.set_handler
set_handler("export_tree", self.get_tree,
http=("GET",), transform=True)
set_handler("import_tree", self.put_tree,
http=("GET", "PUT", "POST"), transform=True)
set_handler("fields", self.get_fields,
http=("GET",), transform=True)
set_handler("options", self.get_options,
http=("GET",), transform=True)
sync = current.sync
set_handler("sync", sync,
http=("GET", "PUT", "POST",), transform=True)
set_handler("sync_log", sync.log,
http=("GET",), transform=True)
set_handler("sync_log", sync.log,
http=("GET",), transform=False)
# Initialize CRUD
self.resource.crud(self, method="_init")
if self.component is not None:
self.component.crud(self, method="_init")
# -------------------------------------------------------------------------
# Method handler configuration
# -------------------------------------------------------------------------
def set_handler(self, method, handler,
http=None,
representation=None,
transform=False):
"""
Set a method handler for this request
@param method: the method name
@param handler: the handler function
@type handler: handler(S3Request, **attr)
"""
HTTP = ("GET", "PUT", "POST", "DELETE")
if http is None:
http = HTTP
if not isinstance(http, (set, tuple, list)):
http = [http]
if transform:
representation = ["__transform__"]
elif representation is None:
representation = [self.DEFAULT_REPRESENTATION]
if not isinstance(representation, (set, tuple, list)):
representation = [representation]
if not isinstance(method, (set, tuple, list)):
method = [method]
handlers = self._handler
for h in http:
if h not in HTTP:
continue
if h not in handlers:
handlers[h] = Storage()
format_hooks = handlers[h]
for r in representation:
if r not in format_hooks:
format_hooks[r] = Storage()
method_hooks = format_hooks[r]
for m in method:
if m is None:
_m = "__none__"
else:
_m = m
method_hooks[_m] = handler
return
# -------------------------------------------------------------------------
def get_handler(self, method, transform=False):
"""
Get a method handler for this request
@param method: the method name
@return: the handler function
"""
http = self.http
representation = self.representation
if transform:
representation = "__transform__"
elif representation is None:
representation = self.DEFAULT_REPRESENTATION
if method is None:
method = "__none__"
if http not in self._handler:
http = "GET"
if http not in self._handler:
return None
else:
format_hooks = self._handler[http]
if representation not in format_hooks:
representation = self.DEFAULT_REPRESENTATION
if representation not in format_hooks:
return None
else:
method_hooks = format_hooks[representation]
if method not in method_hooks:
method = "__none__"
if method not in method_hooks:
return None
else:
handler = method_hooks[method]
if isinstance(handler, (type, types.ClassType)):
return handler()
else:
return handler
# -------------------------------------------------------------------------
def get_widget_handler(self, method):
"""
Get the widget handler for a method
@param r: the S3Request
@param method: the widget method
"""
if self.component:
resource = self.component
if resource.link:
resource = resource.link
else:
resource = self.resource
prefix, name = self.prefix, self.name
component_name = self.component_name
custom_action = current.s3db.get_method(prefix,
name,
component_name=component_name,
method=method)
http = self.http
handler = None
if method and custom_action:
handler = custom_action
if http == "GET":
if not method:
if resource.count() == 1:
method = "read"
else:
method = "list"
transform = self.transformable()
handler = self.get_handler(method, transform=transform)
elif http == "PUT":
transform = self.transformable(method="import")
handler = self.get_handler(method, transform=transform)
elif http == "POST":
transform = self.transformable(method="import")
return self.get_handler(method, transform=transform)
elif http == "DELETE":
if method:
return self.get_handler(method)
else:
return self.get_handler("delete")
else:
return None
if handler is None:
handler = resource.crud
if isinstance(handler, (type, types.ClassType)):
handler = handler()
return handler
# -------------------------------------------------------------------------
# Request Parser
# -------------------------------------------------------------------------
def __parse(self):
""" Parses the web2py request object """
self.id = None
self.component_name = None
self.component_id = None
self.method = None
representation = self.extension
# Get the names of all components
tablename = "%s_%s" % (self.prefix, self.name)
components = current.s3db.get_components(tablename)
if components:
components = components.keys()
else:
components = []
# Map request args, catch extensions
f = []
append = f.append
args = self.args
if len(args) > 4:
args = args[:4]
method = self.name
for arg in args:
if "." in arg:
arg, representation = arg.rsplit(".", 1)
if method is None:
method = arg
elif arg.isdigit():
append((method, arg))
method = None
else:
append((method, None))
method = arg
if method:
append((method, None))
self.id = f[0][1]
# Sort out component name and method
l = len(f)
if l > 1:
m = f[1][0].lower()
i = f[1][1]
if m in components:
self.component_name = m
self.component_id = i
else:
self.method = m
if not self.id:
self.id = i
if self.component_name and l > 2:
self.method = f[2][0].lower()
if not self.component_id:
self.component_id = f[2][1]
representation = s3_get_extension(self)
if representation:
self.representation = representation
else:
self.representation = self.DEFAULT_REPRESENTATION
return
# -------------------------------------------------------------------------
# REST Interface
# -------------------------------------------------------------------------
def __call__(self, **attr):
"""
Execute this request
@param attr: Parameters for the method handler
"""
response = current.response
s3 = response.s3
self.next = None
bypass = False
output = None
preprocess = None
postprocess = None
representation = self.representation
# Enforce primary record ID
if not self.id and representation == "html":
if self.component or self.method in ("read", "profile", "update"):
count = self.resource.count()
if self.vars is not None and count == 1:
self.resource.load()
self.record = self.resource._rows[0]
self.id = self.record.id
else:
#current.session.error = current.ERROR.BAD_RECORD
redirect(URL(r=self, c=self.prefix, f=self.name))
# Pre-process
if s3 is not None:
preprocess = s3.get("prep")
if preprocess:
pre = preprocess(self)
# Re-read representation after preprocess:
representation = self.representation
if pre and isinstance(pre, dict):
bypass = pre.get("bypass", False) is True
output = pre.get("output")
if not bypass:
success = pre.get("success", True)
if not success:
if representation == "html" and output:
if isinstance(output, dict):
output.update(r=self)
return output
else:
status = pre.get("status", 400)
message = pre.get("message",
current.ERROR.BAD_REQUEST)
self.error(status, message)
elif not pre:
self.error(400, current.ERROR.BAD_REQUEST)
# Default view
if representation not in ("html", "popup"):
response.view = "xml.html"
# Content type
response.headers["Content-Type"] = s3.content_type.get(representation,
"text/html")
# Custom action?
if not self.custom_action:
action = current.s3db.get_method(self.prefix,
self.name,
component_name=self.component_name,
method=self.method)
if isinstance(action, (type, types.ClassType)):
self.custom_action = action()
else:
self.custom_action = action
# Method handling
http = self.http
handler = None
if not bypass:
# Find the method handler
if self.method and self.custom_action:
handler = self.custom_action
elif http == "GET":
handler = self.__GET()
elif http == "PUT":
handler = self.__PUT()
elif http == "POST":
handler = self.__POST()
elif http == "DELETE":
handler = self.__DELETE()
else:
self.error(405, current.ERROR.BAD_METHOD)
# Invoke the method handler
if handler is not None:
output = handler(self, **attr)
else:
# Fall back to CRUD
output = self.resource.crud(self, **attr)
# Post-process
if s3 is not None:
postprocess = s3.get("postp")
if postprocess is not None:
output = postprocess(self, output)
if output is not None and isinstance(output, dict):
# Put a copy of r into the output for the view
# to be able to make use of it
output.update(r=self)
# Redirection
if self.next is not None and \
(self.http != "GET" or self.method == "clear"):
if isinstance(output, dict):
form = output.get("form")
if form:
if not hasattr(form, "errors"):
# Form embedded in a DIV together with other components
form = form.elements('form', first_only=True)
form = form[0] if form else None
if form and form.errors:
return output
session = current.session
session.flash = response.flash
session.confirmation = response.confirmation
session.error = response.error
session.warning = response.warning
redirect(self.next)
return output
# -------------------------------------------------------------------------
def __GET(self, resource=None):
"""
Get the GET method handler
"""
method = self.method
transform = False
if method is None or method in ("read", "display", "update"):
if self.transformable():
method = "export_tree"
transform = True
elif self.component:
resource = self.resource
if self.interactive and resource.count() == 1:
# Load the record
if not resource._rows:
resource.load(start=0, limit=1)
if resource._rows:
self.record = resource._rows[0]
self.id = resource.get_id()
self.uid = resource.get_uid()
if self.component.multiple and not self.component_id:
method = "list"
else:
method = "read"
elif self.id or method in ("read", "display", "update"):
# Enforce single record
resource = self.resource
if not resource._rows:
resource.load(start=0, limit=1)
if resource._rows:
self.record = resource._rows[0]
self.id = resource.get_id()
self.uid = resource.get_uid()
else:
self.error(404, current.ERROR.BAD_RECORD)
method = "read"
else:
method = "list"
elif method in ("create", "update"):
if self.transformable(method="import"):
method = "import_tree"
transform = True
elif method == "delete":
return self.__DELETE()
elif method == "clear" and not self.component:
s3_remove_last_record_id(self.tablename)
self.next = URL(r=self, f=self.name)
return lambda r, **attr: None
elif self.transformable():
transform = True
return self.get_handler(method, transform=transform)
# -------------------------------------------------------------------------
def __PUT(self):
"""
Get the PUT method handler
"""
method = self.method
transform = self.transformable(method="import")
if not self.method and transform:
method = "import_tree"
return self.get_handler(method, transform=transform)
# -------------------------------------------------------------------------
def __POST(self):
"""
Get the POST method handler
"""
method = self.method
if method == "delete":
return self.__DELETE()
else:
if self.transformable(method="import"):
return self.__PUT()
else:
post_vars = self.post_vars
table = self.target()[2]
if "deleted" in table and "id" not in post_vars: # and "uuid" not in post_vars:
original = S3Resource.original(table, post_vars)
if original and original.deleted:
self.post_vars.update(id=original.id)
self.vars.update(id=original.id)
return self.__GET()
# -------------------------------------------------------------------------
def __DELETE(self):
"""
Get the DELETE method handler
"""
if self.method:
return self.get_handler(self.method)
else:
return self.get_handler("delete")
# -------------------------------------------------------------------------
# Built-in method handlers
# -------------------------------------------------------------------------
@staticmethod
def get_tree(r, **attr):
"""
XML Element tree export method
@param r: the S3Request instance
@param attr: controller attributes
"""
get_vars = r.get_vars
args = Storage()
# Slicing
start = get_vars.get("start")
if start is not None:
try:
start = int(start)
except ValueError:
start = None
limit = get_vars.get("limit")
if limit is not None:
try:
limit = int(limit)
except ValueError:
limit = None
# msince
msince = get_vars.get("msince")
if msince is not None:
msince = s3_parse_datetime(msince)
# Show IDs (default: False)
if "show_ids" in get_vars:
if get_vars["show_ids"].lower() == "true":
current.xml.show_ids = True
# Show URLs (default: True)
if "show_urls" in get_vars:
if get_vars["show_urls"].lower() == "false":
current.xml.show_urls = False
# Maxbounds (default: False)
maxbounds = False
if "maxbounds" in get_vars:
if get_vars["maxbounds"].lower() == "true":
maxbounds = True
if r.representation in ("gpx", "osm"):
maxbounds = True
# Components of the master resource (tablenames)
if "mcomponents" in get_vars:
mcomponents = get_vars["mcomponents"]
if str(mcomponents).lower() == "none":
mcomponents = None
elif not isinstance(mcomponents, list):
mcomponents = mcomponents.split(",")
else:
mcomponents = [] # all
# Components of referenced resources (tablenames)
if "rcomponents" in get_vars:
rcomponents = get_vars["rcomponents"]
if str(rcomponents).lower() == "none":
rcomponents = None
elif not isinstance(rcomponents, list):
rcomponents = rcomponents.split(",")
else:
rcomponents = None
# Maximum reference resolution depth
if "maxdepth" in get_vars:
try:
args["maxdepth"] = int(get_vars["maxdepth"])
except ValueError:
pass
# References to resolve (field names)
if "references" in get_vars:
references = get_vars["references"]
if str(references).lower() == "none":
references = []
elif not isinstance(references, list):
references = references.split(",")
else:
references = None # all
# Export field selection
if "fields" in get_vars:
fields = get_vars["fields"]
if str(fields).lower() == "none":
fields = []
elif not isinstance(fields, list):
fields = fields.split(",")
else:
fields = None # all
# Find XSLT stylesheet
stylesheet = r.stylesheet()
# Add stylesheet parameters
if stylesheet is not None:
if r.component:
args.update(id=r.id,
component=r.component.tablename)
if r.component.alias:
args.update(alias=r.component.alias)
mode = get_vars.get("xsltmode")
if mode is not None:
args.update(mode=mode)
# Set response headers
response = current.response
s3 = response.s3
headers = response.headers
representation = r.representation
if representation in s3.json_formats:
as_json = True
default = "application/json"
else:
as_json = False
default = "text/xml"
headers["Content-Type"] = s3.content_type.get(representation,
default)
# Export the resource
output = r.resource.export_xml(start=start,
limit=limit,
msince=msince,
fields=fields,
dereference=True,
# maxdepth in args
references=references,
mcomponents=mcomponents,
rcomponents=rcomponents,
stylesheet=stylesheet,
as_json=as_json,
maxbounds=maxbounds,
**args)
# Transformation error?
if not output:
r.error(400, "XSLT Transformation Error: %s " % current.xml.error)
return output
# -------------------------------------------------------------------------
@staticmethod
def put_tree(r, **attr):
"""
XML Element tree import method
@param r: the S3Request method
@param attr: controller attributes
"""
get_vars = r.get_vars
# Skip invalid records?
if "ignore_errors" in get_vars:
ignore_errors = True
else:
ignore_errors = False
# Find all source names in the URL vars
def findnames(get_vars, name):
nlist = []
if name in get_vars:
names = get_vars[name]
if isinstance(names, (list, tuple)):
names = ",".join(names)
names = names.split(",")
for n in names:
if n[0] == "(" and ")" in n[1:]:
nlist.append(n[1:].split(")", 1))
else:
nlist.append([None, n])
return nlist
filenames = findnames(get_vars, "filename")
fetchurls = findnames(get_vars, "fetchurl")
source_url = None
# Get the source(s)
s3 = current.response.s3
json_formats = s3.json_formats
csv_formats = s3.csv_formats
source = []
format = r.representation
if format in json_formats or format in csv_formats:
if filenames:
try:
for f in filenames:
source.append((f[0], open(f[1], "rb")))
except:
source = []
elif fetchurls:
import urllib
try:
for u in fetchurls:
source.append((u[0], urllib.urlopen(u[1])))
except:
source = []
elif r.http != "GET":
source = r.read_body()
else:
if filenames:
source = filenames
elif fetchurls:
source = fetchurls
# Assume only 1 URL for GeoRSS feed caching
source_url = fetchurls[0][1]
elif r.http != "GET":
source = r.read_body()
if not source:
if filenames or fetchurls:
# Error: source not found
r.error(400, "Invalid source")
else:
# No source specified => return resource structure
return r.get_struct(r, **attr)
# Find XSLT stylesheet
stylesheet = r.stylesheet(method="import")
# Target IDs
if r.method == "create":
_id = None
else:
_id = r.id
# Transformation mode?
if "xsltmode" in get_vars:
args = dict(xsltmode=get_vars["xsltmode"])
else:
args = dict()
# These 3 options are called by gis.show_map() & read by the
# GeoRSS Import stylesheet to populate the gis_cache table
# Source URL: For GeoRSS/KML Feed caching
if source_url:
args["source_url"] = source_url
# Data Field: For GeoRSS/KML Feed popups
if "data_field" in get_vars:
args["data_field"] = get_vars["data_field"]
# Image Field: For GeoRSS/KML Feed popups
if "image_field" in get_vars:
args["image_field"] = get_vars["image_field"]
# Format type?
if format in json_formats:
format = "json"
elif format in csv_formats:
format = "csv"
else:
format = "xml"
try:
output = r.resource.import_xml(source,
id=_id,
format=format,
files=r.files,
stylesheet=stylesheet,
ignore_errors=ignore_errors,
**args)
except IOError:
current.auth.permission.fail()
except SyntaxError:
e = sys.exc_info()[1]
if hasattr(e, "message"):
e = e.message
r.error(400, e)
return output
# -------------------------------------------------------------------------
@staticmethod
def get_struct(r, **attr):
"""
Resource structure introspection method
@param r: the S3Request instance
@param attr: controller attributes
"""
response = current.response
json_formats = response.s3.json_formats
if r.representation in json_formats:
as_json = True
content_type = "application/json"
else:
as_json = False
content_type = "text/xml"
get_vars = r.get_vars
meta = str(get_vars.get("meta", False)).lower() == "true"
opts = str(get_vars.get("options", False)).lower() == "true"
refs = str(get_vars.get("references", False)).lower() == "true"
stylesheet = r.stylesheet()
output = r.resource.export_struct(meta=meta,
options=opts,
references=refs,
stylesheet=stylesheet,
as_json=as_json)
if output is None:
# Transformation error
r.error(400, current.xml.error)
response.headers["Content-Type"] = content_type
return output
# -------------------------------------------------------------------------
@staticmethod
def get_fields(r, **attr):
"""
Resource structure introspection method (single table)
@param r: the S3Request instance
@param attr: controller attributes
"""
representation = r.representation
if representation == "xml":
output = r.resource.export_fields(component=r.component_name)
content_type = "text/xml"
elif representation == "s3json":
output = r.resource.export_fields(component=r.component_name,
as_json=True)
content_type = "application/json"
else:
r.error(501, current.ERROR.BAD_FORMAT)
response = current.response
response.headers["Content-Type"] = content_type
return output
# -------------------------------------------------------------------------
@staticmethod
def get_options(r, **attr):
"""
Field options introspection method (single table)
@param r: the S3Request instance
@param attr: controller attributes
"""
get_vars = r.get_vars
if "field" in get_vars:
items = get_vars["field"]
if not isinstance(items, (list, tuple)):
items = [items]
fields = []
add_fields = fields.extend
for item in items:
f = item.split(",")
if f:
add_fields(f)
else:
fields = None
if "hierarchy" in get_vars:
hierarchy = get_vars["hierarchy"].lower() not in ("false", "0")
else:
hierarchy = False
if "only_last" in get_vars:
only_last = get_vars["only_last"].lower() not in ("false", "0")
else:
only_last = False
if "show_uids" in get_vars:
show_uids = get_vars["show_uids"].lower() not in ("false", "0")
else:
show_uids = False
representation = r.representation
if representation == "xml":
only_last = False
as_json = False
content_type = "text/xml"
elif representation == "s3json":
show_uids = False
as_json = True
content_type = "application/json"
else:
r.error(501, current.ERROR.BAD_FORMAT)
component = r.component_name
output = r.resource.export_options(component=component,
fields=fields,
show_uids=show_uids,
only_last=only_last,
hierarchy=hierarchy,
as_json=as_json)
current.response.headers["Content-Type"] = content_type
return output
# -------------------------------------------------------------------------
# Tools
# -------------------------------------------------------------------------
def factory(self, **args):
"""
Generate a new request for the same resource
@param args: arguments for request constructor
"""
return s3_request(r=self, **args)
# -------------------------------------------------------------------------
def __getattr__(self, key):
"""
Called upon S3Request.<key> - looks up the value for the <key>
attribute. Falls back to current.request if the attribute is
not defined in this S3Request.
@param key: the key to lookup
"""
if key in self.__dict__:
return self.__dict__[key]
sentinel = object()
value = getattr(current.request, key, sentinel)
if value is sentinel:
raise AttributeError
return value
# -------------------------------------------------------------------------
def transformable(self, method=None):
"""
Check the request for a transformable format
@param method: "import" for import methods, else None
"""
if self.representation in ("html", "aadata", "popup", "iframe"):
return False
stylesheet = self.stylesheet(method=method, skip_error=True)
if not stylesheet and self.representation != "xml":
return False
else:
return True
# -------------------------------------------------------------------------
def actuate_link(self, component_id=None):
"""
Determine whether to actuate a link or not
@param component_id: the component_id (if not self.component_id)
"""
if not component_id:
component_id = self.component_id
if self.component:
single = component_id != None
component = self.component
if component.link:
actuate = self.component.actuate
if "linked" in self.get_vars:
linked = self.get_vars.get("linked", False)
linked = linked in ("true", "True")
if linked:
actuate = "replace"
else:
actuate = "hide"
if actuate == "link":
if self.method != "delete" and self.http != "DELETE":
return single
else:
return not single
elif actuate == "replace":
return True
#elif actuate == "embed":
#raise NotImplementedError
else:
return False
else:
return True
else:
return False
# -------------------------------------------------------------------------
@staticmethod
def unauthorised():
"""
Action upon unauthorised request
"""
current.auth.permission.fail()
# -------------------------------------------------------------------------
def error(self, status, message, tree=None, next=None):
"""
Action upon error
@param status: HTTP status code
@param message: the error message
@param tree: the tree causing the error
"""
if self.representation == "html":
current.session.error = message
if next is not None:
redirect(next)
else:
redirect(URL(r=self, f="index"))
else:
headers = {"Content-Type":"application/json"}
current.log.error(message)
raise HTTP(status,
body=current.xml.json_message(success=False,
statuscode=status,
message=message,
tree=tree),
web2py_error=message,
**headers)
# -------------------------------------------------------------------------
def url(self,
id=None,
component=None,
component_id=None,
target=None,
method=None,
representation=None,
vars=None,
host=None):
"""
Returns the URL of this request, use parameters to override
current requests attributes:
- None to keep current attribute (default)
- 0 or "" to set attribute to NONE
- value to use explicit value
@param id: the master record ID
@param component: the component name
@param component_id: the component ID
@param target: the target record ID (choose automatically)
@param method: the URL method
@param representation: the representation for the URL
@param vars: the URL query variables
@param host: string to force absolute URL with host (True means http_host)
Particular behavior:
- changing the master record ID resets the component ID
- removing the target record ID sets the method to None
- removing the method sets the target record ID to None
- [] as id will be replaced by the "[id]" wildcard
"""
if vars is None:
vars = self.get_vars
elif vars and isinstance(vars, str):
# We've come from a dataTable_vars which has the vars as
# a JSON string, but with the wrong quotation marks
vars = json.loads(vars.replace("'", "\""))
if "format" in vars:
del vars["format"]
args = []
cname = self.component_name
# target
if target is not None:
if cname and (component is None or component == cname):
component_id = target
else:
id = target
# method
default_method = False
if method is None:
default_method = True
method = self.method
elif method == "":
# Switch to list? (= method="" and no explicit target ID)
if component_id is None:
if self.component_id is not None:
component_id = 0
elif not self.component:
if id is None:
if self.id is not None:
id = 0
method = None
# id
if id is None:
id = self.id
elif id in (0, ""):
id = None
elif id in ([], "[id]", "*"):
id = "[id]"
component_id = 0
elif str(id) != str(self.id):
component_id = 0
# component
if component is None:
component = cname
elif component == "":
component = None
if cname and cname != component or not component:
component_id = 0
# component_id
if component_id is None:
component_id = self.component_id
elif component_id == 0:
component_id = None
if self.component_id and default_method:
method = None
if id is None and self.id and \
(not component or not component_id) and default_method:
method = None
if id:
args.append(id)
if component:
args.append(component)
if component_id:
args.append(component_id)
if method:
args.append(method)
# representation
if representation is None:
representation = self.representation
elif representation == "":
representation = self.DEFAULT_REPRESENTATION
f = self.function
if not representation == self.DEFAULT_REPRESENTATION:
if len(args) > 0:
args[-1] = "%s.%s" % (args[-1], representation)
else:
f = "%s.%s" % (f, representation)
return URL(r=self,
c=self.controller,
f=f,
args=args,
vars=vars,
host=host)
# -------------------------------------------------------------------------
def target(self):
"""
Get the target table of the current request
@return: a tuple of (prefix, name, table, tablename) of the target
resource of this request
@todo: update for link table support
"""
component = self.component
if component is not None:
link = self.component.link
if link and not self.actuate_link():
return(link.prefix,
link.name,
link.table,
link.tablename)
return (component.prefix,
component.name,
component.table,
component.tablename)
else:
return (self.prefix,
self.name,
self.table,
self.tablename)
# -------------------------------------------------------------------------
def stylesheet(self, method=None, skip_error=False):
"""
Find the XSLT stylesheet for this request
@param method: "import" for data imports, else None
@param skip_error: do not raise an HTTP error status
if the stylesheet cannot be found
"""
stylesheet = None
format = self.representation
if self.component:
resourcename = self.component.name
else:
resourcename = self.name
# Native S3XML?
if format == "xml":
return stylesheet
# External stylesheet specified?
if "transform" in self.vars:
return self.vars["transform"]
# Stylesheet attached to the request?
extension = self.XSLT_EXTENSION
filename = "%s.%s" % (resourcename, extension)
if filename in self.post_vars:
p = self.post_vars[filename]
import cgi
if isinstance(p, cgi.FieldStorage) and p.filename:
stylesheet = p.file
return stylesheet
# Internal stylesheet?
folder = self.folder
path = self.XSLT_PATH
if method != "import":
method = "export"
filename = "%s.%s" % (method, extension)
stylesheet = os.path.join(folder, path, format, filename)
if not os.path.exists(stylesheet):
if not skip_error:
self.error(501, "%s: %s" % (current.ERROR.BAD_TEMPLATE,
stylesheet))
else:
stylesheet = None
return stylesheet
# -------------------------------------------------------------------------
def read_body(self):
"""
Read data from request body
"""
self.files = Storage()
content_type = self.env.get("content_type")
source = []
if content_type and content_type.startswith("multipart/"):
import cgi
ext = ".%s" % self.representation
post_vars = self.post_vars
for v in post_vars:
p = post_vars[v]
if isinstance(p, cgi.FieldStorage) and p.filename:
self.files[p.filename] = p.file
if p.filename.endswith(ext):
source.append((v, p.file))
elif v.endswith(ext):
if isinstance(p, cgi.FieldStorage):
source.append((v, p.value))
elif isinstance(p, basestring):
source.append((v, StringIO(p)))
else:
s = self.body
s.seek(0)
source.append(s)
return source
# -------------------------------------------------------------------------
def customise_resource(self, tablename=None):
"""
Invoke the customization callback for a resource.
@param tablename: the tablename of the resource; if called
without tablename it will invoke the callbacks
for the target resources of this request:
- master
- active component
- active link table
(in this order)
Resource customization functions can be defined like:
def customise_resource_my_table(r, tablename):
current.s3db.configure(tablename,
my_custom_setting = "example")
return
settings.customise_resource_my_table = \
customise_resource_my_table
@note: the hook itself can call r.customise_resource in order
to cascade customizations as necessary
@note: if a table is customised that is not currently loaded,
then it will be loaded for this process
"""
if tablename is None:
customise = self.customise_resource
customise(self.resource.tablename)
component = self.component
if component:
customise(component.tablename)
link = self.link
if link:
customise(link.tablename)
else:
# Always load the model first (otherwise it would
# override the custom settings when loaded later)
db = current.db
if tablename not in db:
db.table(tablename)
customise = current.deployment_settings.customise_resource(tablename)
if customise:
customise(self, tablename)
return
# =============================================================================
class S3Method(object):
"""
REST Method Handler Base Class
Method handler classes should inherit from this class and
implement the apply_method() method.
@note: instances of subclasses don't have any of the instance
attributes available until they actually get invoked
from a request - i.e. apply_method() should never be
called directly.
"""
# -------------------------------------------------------------------------
def __call__(self, r, method=None, widget_id=None, **attr):
"""
Entry point for the REST interface
@param r: the S3Request
@param method: the method established by the REST interface
@param widget_id: widget ID
@param attr: dict of parameters for the method handler
@return: output object to send to the view
"""
# Environment of the request
self.request = r
# Settings
response = current.response
self.download_url = response.s3.download_url
# Init
self.next = None
# Override request method
if method is not None:
self.method = method
else:
self.method = r.method
# Find the target resource and record
if r.component:
component = r.component
resource = component
self.record_id = self._record_id(r)
if not self.method:
if component.multiple and not r.component_id:
self.method = "list"
else:
self.method = "read"
if component.link:
actuate_link = r.actuate_link()
if not actuate_link:
resource = component.link
else:
self.record_id = r.id
resource = r.resource
if not self.method:
if r.id or r.method in ("read", "display"):
self.method = "read"
else:
self.method = "list"
self.prefix = resource.prefix
self.name = resource.name
self.tablename = resource.tablename
self.table = resource.table
self.resource = resource
if self.method == "_init":
return None
if r.interactive:
# hide_filter policy:
#
# None show filters on master,
# hide for components (default)
# False show all filters (on all tabs)
# True hide all filters (on all tabs)
#
# dict(alias=setting) setting per component, alias
# None means master resource,
# use special alias _default
# to specify an alternative
# default
#
hide_filter = attr.get("hide_filter")
if isinstance(hide_filter, dict):
component_name = r.component_name
if component_name in hide_filter:
hide_filter = hide_filter[component_name]
elif "_default" in hide_filter:
hide_filter = hide_filter["_default"]
else:
hide_filter = None
if hide_filter is None:
hide_filter = r.component is not None
self.hide_filter = hide_filter
else:
self.hide_filter = True
# Apply method
if widget_id and hasattr(self, "widget"):
output = self.widget(r,
method=self.method,
widget_id=widget_id,
**attr)
else:
output = self.apply_method(r, **attr)
# Redirection
if self.next and resource.lastid:
self.next = str(self.next)
placeholder = "%5Bid%5D"
self.next = self.next.replace(placeholder, resource.lastid)
placeholder = "[id]"
self.next = self.next.replace(placeholder, resource.lastid)
if not response.error:
r.next = self.next
# Add additional view variables (e.g. rheader)
self._extend_view(output, r, **attr)
return output
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Stub, to be implemented in subclass. This method is used
to get the results as a standalone page.
@param r: the S3Request
@param attr: dictionary of parameters for the method handler
@return: output object to send to the view
"""
output = dict()
return output
# -------------------------------------------------------------------------
def widget(self, r, method=None, widget_id=None, visible=True, **attr):
"""
Stub, to be implemented in subclass. This method is used
by other method handlers to embed this method as widget.
@note:
For "html" format, the widget method must return an XML
component that can be embedded in a DIV. If a dict is
returned, it will be rendered against the view template
of the calling method - the view template selected by
the widget method will be ignored.
For other formats, the data returned by the widget method
will be rendered against the view template selected by
the widget method. If no view template is set, the data
will be returned as-is.
The widget must use the widget_id as HTML id for the element
providing the Ajax-update hook and this element must be
visible together with the widget.
The widget must include the widget_id as ?w=<widget_id> in
the URL query of the Ajax-update call, and Ajax-calls should
not use "html" format.
If visible==False, then the widget will initially be hidden,
so it can be rendered empty and Ajax-load its data layer
upon a separate refresh call. Otherwise, the widget should
receive its data layer immediately. Widgets can ignore this
parameter if delayed loading of the data layer is not
all([possible, useful, supported]).
@param r: the S3Request
@param method: the URL method
@param widget_id: the widget ID
@param visible: whether the widget is initially visible
@param attr: dictionary of parameters for the method handler
@return: output
"""
return None
# -------------------------------------------------------------------------
# Utility functions
# -------------------------------------------------------------------------
def _permitted(self, method=None):
"""
Check permission for the requested resource
@param method: method to check, defaults to the actually
requested method
"""
auth = current.auth
has_permission = auth.s3_has_permission
r = self.request
if not method:
method = self.method
if method in ("list", "datatable", "datalist"):
# Rest handled in S3Permission.METHODS
method = "read"
if r.component is None:
table = r.table
record_id = r.id
else:
table = r.component.table
record_id = r.component_id
if method == "create":
# Must have permission to update the master record
# in order to create a new component record...
master_access = has_permission("update",
r.table,
record_id=r.id)
if not master_access:
return False
return has_permission(method, table, record_id=record_id)
# -------------------------------------------------------------------------
@staticmethod
def _record_id(r):
"""
Get the ID of the target record of a S3Request
@param r: the S3Request
"""
master_id = r.id
if r.component:
component = r.component
component_id = r.component_id
link = r.link
if not component.multiple and not component_id:
# Enforce first component record
table = component.table
pkey = table._id.name
component.load(start=0, limit=1)
if len(component):
component_id = component.records().first()[pkey]
if link and master_id:
r.link_id = link.link_id(master_id, component_id)
r.component_id = component_id
if not link or r.actuate_link():
return component_id
else:
return r.link_id
else:
return master_id
return None
# -------------------------------------------------------------------------
def _config(self, key, default=None):
"""
Get a configuration setting of the current table
@param key: the setting key
@param default: the default value
"""
return current.s3db.get_config(self.tablename, key, default)
# -------------------------------------------------------------------------
@staticmethod
def _view(r, default):
"""
Get the path to the view template
@param r: the S3Request
@param default: name of the default view template
"""
folder = r.folder
prefix = r.controller
exists = os.path.exists
join = os.path.join
settings = current.deployment_settings
theme = settings.get_theme()
location = settings.get_template_location()
if theme != "default":
# See if there is a Custom View for this Theme
view = join(folder, location, "templates", theme, "views",
"%s_%s_%s" % (prefix, r.name, default))
if exists(view):
# There is a view specific to this page
# NB This should normally include {{extend layout.html}}
# Pass view as file not str to work in compiled mode
return open(view, "rb")
else:
if "/" in default:
subfolder, _default = default.split("/", 1)
else:
subfolder = ""
_default = default
if exists(join(folder, location, "templates", theme, "views",
subfolder, "_%s" % _default)):
# There is a general view for this page type
# NB This should not include {{extend layout.html}}
if subfolder:
subfolder = "%s/" % subfolder
# Pass this mapping to the View
current.response.s3.views[default] = \
"../%s/templates/%s/views/%s_%s" % (location,
theme,
subfolder,
_default)
if r.component:
view = "%s_%s_%s" % (r.name, r.component_name, default)
path = join(folder, "views", prefix, view)
if exists(path):
return "%s/%s" % (prefix, view)
else:
view = "%s_%s" % (r.name, default)
path = join(folder, "views", prefix, view)
else:
view = "%s_%s" % (r.name, default)
path = join(folder, "views", prefix, view)
if exists(path):
return "%s/%s" % (prefix, view)
else:
return default
# -------------------------------------------------------------------------
@staticmethod
def _extend_view(output, r, **attr):
"""
Add additional view variables (invokes all callables)
@param output: the output dict
@param r: the S3Request
@param attr: the view variables (e.g. 'rheader')
@note: overload this method in subclasses if you don't want
additional view variables to be added automatically
"""
if r.interactive and isinstance(output, dict):
for key in attr:
handler = attr[key]
if callable(handler):
resolve = True
try:
display = handler(r)
except TypeError:
# Argument list failure
# => pass callable to the view as-is
display = handler
continue
except:
# Propagate all other errors to the caller
raise
else:
resolve = False
display = handler
if isinstance(display, dict) and resolve:
output.update(**display)
elif display is not None:
output.update(**{key: display})
elif key in output and callable(handler):
del output[key]
# -------------------------------------------------------------------------
@staticmethod
def _remove_filters(vars):
"""
Remove all filters from URL vars
@param vars: the URL vars as dict
"""
return Storage((k, v) for k, v in vars.iteritems()
if not REGEX_FILTER.match(k))
# -------------------------------------------------------------------------
@staticmethod
def crud_string(tablename, name):
"""
Get a CRUD info string for interactive pages
@param tablename: the table name
@param name: the name of the CRUD string
"""
crud_strings = current.response.s3.crud_strings
# CRUD strings for this table
_crud_strings = crud_strings.get(tablename, crud_strings)
return _crud_strings.get(name,
# Default fallback
crud_strings.get(name))
# =============================================================================
# Global functions
#
def s3_request(*args, **kwargs):
"""
Helper function to generate S3Request instances
@param args: arguments for the S3Request
@param kwargs: keyword arguments for the S3Request
@keyword catch_errors: if set to False, errors will be raised
instead of returned to the client, useful
for optional sub-requests, or if the caller
implements fallbacks
"""
error = None
try:
r = S3Request(*args, **kwargs)
except (AttributeError, SyntaxError):
error = 400
except KeyError:
error = 404
if error:
if kwargs.get("catch_errors") is False:
raise
message = sys.exc_info()[1]
if hasattr(message, "message"):
message = message.message
if current.auth.permission.format == "html":
current.session.error = message
redirect(URL(f="index"))
else:
headers = {"Content-Type":"application/json"}
current.log.error(message)
raise HTTP(error,
body=current.xml.json_message(success=False,
statuscode=error,
message=message,
),
web2py_error=message,
**headers)
return r
# END =========================================================================
| mit |
podhmo/pyramid_translogger | setup.py | 1 | 1345 | # -*- coding:utf-8 -*-
import os
import sys
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
try:
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
with open(os.path.join(here, 'CHANGES.txt')) as f:
CHANGES = f.read()
except IOError:
README = CHANGES = ''
install_requires = [
'setuptools',
]
docs_extras = [
]
tests_require = [
]
testing_extras = tests_require + [
]
setup(name='pyramid_translogger',
version='0.1',
description='access log logger tween (almost stolen from Paste.translogger)',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
'License :: OSI Approved :: MIT License',
"Programming Language :: Python :: Implementation :: CPython",
],
keywords='',
author="",
author_email="",
license='MIT license',
url="https://github.com/podhmo/pyramid_translogger",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
extras_require={
'testing': testing_extras,
'docs': docs_extras,
},
tests_require=tests_require,
test_suite="pyramid_translogger.tests",
entry_points="""
""")
| mit |
jayceyxc/hue | desktop/libs/notebook/src/notebook/connectors/oozie_batch.py | 1 | 7567 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
import time
from django.core.urlresolvers import reverse
from django.http import QueryDict
from django.utils.translation import ugettext as _
from desktop.lib.exceptions_renderable import PopupException
from desktop.models import Document2
from notebook.connectors.base import Api, QueryError
LOG = logging.getLogger(__name__)
try:
from oozie.models2 import Workflow, WorkflowBuilder
from oozie.views.api import get_log as get_workflow_logs
from oozie.views.dashboard import check_job_access_permission, check_job_edition_permission
from oozie.views.editor2 import _submit_workflow
except Exception, e:
LOG.exception('Oozie application is not enabled: %s' % e)
class OozieApi(Api):
LOG_START_PATTERN = '(>>> Invoking Main class now >>>.+)'
LOG_END_PATTERN = '<<< Invocation of Main class completed <<<'
RESULTS_PATTERN = "(?P<results>>>> Invoking Beeline command line now >>>.+<<< Invocation of Beeline command completed <<<)"
RESULTS_PATTERN_GENERIC = "(?P<results>>>> Invoking Main class now >>>.+<<< Invocation of Main class completed <<<)"
RESULTS_PATTERN_MAPREDUCE = "(?P<results>.+)"
RESULTS_PATTERN_PIG = "(?P<results>>>> Invoking Pig command line now >>>.+<<< Invocation of Pig command completed <<<)"
BATCH_JOB_PREFIX = 'Hue_Batch'
SCHEDULE_JOB_PREFIX = 'Hue_Schedule'
def __init__(self, *args, **kwargs):
Api.__init__(self, *args, **kwargs)
self.fs = self.request.fs
self.jt = self.request.jt
def execute(self, notebook, snippet):
# Get document from notebook
if not notebook.get('uuid', ''):
raise PopupException(_('Notebook is missing a uuid, please save the notebook before executing as a batch job.'))
if notebook['type'] == 'notebook':
# Convert notebook to workflow
workflow_doc = WorkflowBuilder().create_notebook_workflow(notebook=notebook, user=self.user, managed=True, name=_("%s for %s") % (OozieApi.BATCH_JOB_PREFIX, notebook['name'] or notebook['type']))
workflow = Workflow(document=workflow_doc, user=self.user)
else:
notebook_doc = Document2.objects.get_by_uuid(user=self.user, uuid=notebook['uuid'], perm_type='read')
# Create a managed workflow from the notebook doc
workflow_doc = WorkflowBuilder().create_workflow(document=notebook_doc, user=self.user, managed=True, name=_("Batch job for %s") % (notebook_doc.name or notebook_doc.type))
workflow = Workflow(document=workflow_doc, user=self.user)
# Submit workflow
job_id = _submit_workflow(user=self.user, fs=self.fs, jt=self.jt, workflow=workflow, mapping=None)
return {
'id': job_id,
'has_result_set': True,
}
def check_status(self, notebook, snippet):
response = {'status': 'running'}
job_id = snippet['result']['handle']['id']
oozie_job = check_job_access_permission(self.request, job_id)
if oozie_job.is_running():
return response
elif oozie_job.status in ('KILLED', 'FAILED'):
raise QueryError(_('Job was %s') % oozie_job.status)
else:
# Check if job results are actually available, since YARN takes a while to move logs to JHS,
log_output = self.get_log(notebook, snippet)
if log_output:
results = self._get_results(log_output, snippet['type'])
if results:
response['status'] = 'available'
else:
LOG.warn('No log result could be matched for %s' % job_id)
else:
response['status'] = 'failed'
return response
def fetch_result(self, notebook, snippet, rows, start_over):
log_output = self.get_log(notebook, snippet)
results = self._get_results(log_output, snippet['type'])
return {
'data': [[line] for line in results.split('\n')], # hdfs_link()
'meta': [{'name': 'Header', 'type': 'STRING_TYPE', 'comment': ''}],
'type': 'table',
'has_more': False,
}
def cancel(self, notebook, snippet):
job_id = snippet['result']['handle']['id']
job = check_job_access_permission(self, job_id)
oozie_job = check_job_edition_permission(job, self.user)
oozie_job.kill()
return {'status': 0}
def get_log(self, notebook, snippet, startFrom=0, size=None):
job_id = snippet['result']['handle']['id']
oozie_job = check_job_access_permission(self.request, job_id)
return self._get_log_output(oozie_job)
def progress(self, snippet, logs):
job_id = snippet['result']['handle']['id']
oozie_job = check_job_access_permission(self.request, job_id)
return oozie_job.get_progress(),
def get_jobs(self, notebook, snippet, logs):
jobs = []
job_id = snippet['result']['handle']['id']
oozie_job = check_job_access_permission(self.request, job_id)
actions = oozie_job.get_working_actions()
for action in actions:
if action.externalId is not None:
jobs.append({
'name': action.externalId,
'url': reverse('jobbrowser.views.single_job', kwargs={'job': action.externalId}),
'started': action.startTime is not None,
'finished': action.endTime is not None
})
return jobs
def close_statement(self, snippet):
pass
def close_session(self, session):
pass
def _get_log_output(self, oozie_workflow):
log_output = ''
q = QueryDict(self.request.GET, mutable=True)
q['format'] = 'python' # Hack for triggering the good section in single_task_attempt_logs
self.request.GET = q
attempts = 0
max_attempts = 10
logs_found = False
while not logs_found and attempts < max_attempts:
logs, workflow_actions, is_really_done = get_workflow_logs(self.request, oozie_workflow, make_links=False,
log_start_pattern=self.LOG_START_PATTERN,
log_end_pattern=self.LOG_END_PATTERN)
if logs:
log_output = logs.values()[0]
if log_output.startswith('Unable to locate'):
LOG.debug('Failed to get job attempt logs, possibly due to YARN archiving job to JHS. Will sleep and try again.')
time.sleep(2.0)
else:
logs_found = True
attempts += 1
return log_output
def _get_results(self, log_output, action_type):
results = ''
if action_type == 'hive':
pattern = self.RESULTS_PATTERN
elif action_type == 'pig':
pattern = self.RESULTS_PATTERN_PIG
elif action_type == 'mapreduce':
pattern = self.RESULTS_PATTERN_MAPREDUCE
else:
pattern = self.RESULTS_PATTERN_GENERIC
re_results = re.compile(pattern, re.M | re.DOTALL)
if re_results.search(log_output):
results = re.search(re_results, log_output).group('results').strip()
return results
| apache-2.0 |
Kast0rTr0y/ansible | lib/ansible/modules/cloud/amazon/route53_zone.py | 25 | 7289 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
module: route53_zone
short_description: add or delete Route53 zones
description:
- Creates and deletes Route53 private and public zones
version_added: "2.0"
options:
zone:
description:
- "The DNS zone record (eg: foo.com.)"
required: true
state:
description:
- whether or not the zone should exist or not
required: false
default: true
choices: [ "present", "absent" ]
vpc_id:
description:
- The VPC ID the zone should be a part of (if this is going to be a private zone)
required: false
default: null
vpc_region:
description:
- The VPC Region the zone should be a part of (if this is going to be a private zone)
required: false
default: null
comment:
description:
- Comment associated with the zone
required: false
default: ''
extends_documentation_fragment:
- aws
- ec2
author: "Christopher Troup (@minichate)"
'''
EXAMPLES = '''
# create a public zone
- route53_zone:
zone: example.com
state: present
comment: this is an example
# delete a public zone
- route53_zone:
zone: example.com
state: absent
- name: private zone for devel
route53_zone:
zone: devel.example.com
state: present
vpc_id: '{{ myvpc_id }}'
comment: developer domain
# more complex example
- name: register output after creating zone in parameterized region
route53_zone:
vpc_id: '{{ vpc.vpc_id }}'
vpc_region: '{{ ec2_region }}'
zone: '{{ vpc_dns_zone }}'
state: present
register: zone_out
- debug:
var: zone_out
'''
RETURN='''
comment:
description: optional hosted zone comment
returned: when hosted zone exists
type: string
sample: "Private zone"
name:
description: hosted zone name
returned: when hosted zone exists
type: string
sample: "private.local."
private_zone:
description: whether hosted zone is private or public
returned: when hosted zone exists
type: bool
sample: true
vpc_id:
description: id of vpc attached to private hosted zone
returned: for private hosted zone
type: string
sample: "vpc-1d36c84f"
vpc_region:
description: region of vpc attached to private hosted zone
returned: for private hosted zone
type: string
sample: "eu-west-1"
zone_id:
description: hosted zone id
returned: when hosted zone exists
type: string
sample: "Z6JQG9820BEFMW"
'''
try:
import boto
import boto.ec2
from boto import route53
from boto.route53 import Route53Connection
from boto.route53.zone import Zone
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import ec2_argument_spec, get_aws_connection_info
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
zone=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
vpc_id=dict(default=None),
vpc_region=dict(default=None),
comment=dict(default='')))
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
zone_in = module.params.get('zone').lower()
state = module.params.get('state').lower()
vpc_id = module.params.get('vpc_id')
vpc_region = module.params.get('vpc_region')
comment = module.params.get('comment')
if zone_in[-1:] != '.':
zone_in += "."
private_zone = vpc_id is not None and vpc_region is not None
_, _, aws_connect_kwargs = get_aws_connection_info(module)
# connect to the route53 endpoint
try:
conn = Route53Connection(**aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg=e.error_message)
results = conn.get_all_hosted_zones()
zones = {}
for r53zone in results['ListHostedZonesResponse']['HostedZones']:
zone_id = r53zone['Id'].replace('/hostedzone/', '')
zone_details = conn.get_hosted_zone(zone_id)['GetHostedZoneResponse']
if vpc_id and 'VPCs' in zone_details:
# this is to deal with this boto bug: https://github.com/boto/boto/pull/2882
if isinstance(zone_details['VPCs'], dict):
if zone_details['VPCs']['VPC']['VPCId'] == vpc_id:
zones[r53zone['Name']] = zone_id
else: # Forward compatibility for when boto fixes that bug
if vpc_id in [v['VPCId'] for v in zone_details['VPCs']]:
zones[r53zone['Name']] = zone_id
else:
zones[r53zone['Name']] = zone_id
record = {
'private_zone': private_zone,
'vpc_id': vpc_id,
'vpc_region': vpc_region,
'comment': comment,
}
if state == 'present' and zone_in in zones:
if private_zone:
details = conn.get_hosted_zone(zones[zone_in])
if 'VPCs' not in details['GetHostedZoneResponse']:
module.fail_json(
msg="Can't change VPC from public to private"
)
vpc_details = details['GetHostedZoneResponse']['VPCs']['VPC']
current_vpc_id = vpc_details['VPCId']
current_vpc_region = vpc_details['VPCRegion']
if current_vpc_id != vpc_id:
module.fail_json(
msg="Can't change VPC ID once a zone has been created"
)
if current_vpc_region != vpc_region:
module.fail_json(
msg="Can't change VPC Region once a zone has been created"
)
record['zone_id'] = zones[zone_in]
record['name'] = zone_in
module.exit_json(changed=False, set=record)
elif state == 'present':
result = conn.create_hosted_zone(zone_in, **record)
hosted_zone = result['CreateHostedZoneResponse']['HostedZone']
zone_id = hosted_zone['Id'].replace('/hostedzone/', '')
record['zone_id'] = zone_id
record['name'] = zone_in
module.exit_json(changed=True, set=record)
elif state == 'absent' and zone_in in zones:
conn.delete_hosted_zone(zones[zone_in])
module.exit_json(changed=True)
elif state == 'absent':
module.exit_json(changed=False)
if __name__ == '__main__':
main()
| gpl-3.0 |
masstalde/uwb | src/gui_utils.py | 2 | 1495 | import numpy as np
import pyqtgraph
class MainWindow(pyqtgraph.GraphicsWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
def showEvent(self, event):
super(MainWindow, self).showEvent(event)
def closeEvent(self, event):
super(MainWindow, self).closeEvent(event)
class PlotData(object):
def __init__(self, plot, max_data_length=None):
self.plot = plot
self.curves = []
self.data = []
self.max_data_length = max_data_length
def add_curve(self, pen, initial_data=None, **kwargs):
self.curves.append(self.plot.plot(pen=pen, **kwargs))
if initial_data is None:
if self.max_data_length is None:
initial_data = []
else:
initial_data = np.zeros((self.max_data_length,))
self.data.append(initial_data)
def add_point(self, index, value):
assert(index < len(self.curves))
if self.max_data_length is None:
self.data[index].append(value)
else:
self.data[index][:-1] = self.data[index][1:]
self.data[index][-1] = value
if len(self.data[index]) > self.max_data_length:
self.data[index] = self.data[index][-self.max_data_length:len(self.data[index])]
self.curves[index].setData(self.data[index])
def get_plot(self):
return self.plot
def __len__(self):
return len(self.curves)
| mit |
cgstudiomap/cgstudiomap | main/eggs/pbr-1.8.1-py2.7.egg/pbr/tests/test_integration.py | 21 | 8926 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os.path
import shlex
import sys
import fixtures
import testtools
import textwrap
import virtualenv
from pbr.tests import base
from pbr.tests.test_packaging import TestRepo
PIPFLAGS = shlex.split(os.environ.get('PIPFLAGS', ''))
PIPVERSION = os.environ.get('PIPVERSION', 'pip')
PBRVERSION = os.environ.get('PBRVERSION', 'pbr')
REPODIR = os.environ.get('REPODIR', '')
WHEELHOUSE = os.environ.get('WHEELHOUSE', '')
PIP_CMD = ['-m', 'pip'] + PIPFLAGS + ['install', '-f', WHEELHOUSE]
PROJECTS = shlex.split(os.environ.get('PROJECTS', ''))
PBR_ROOT = os.path.abspath(os.path.join(__file__, '..', '..', '..'))
def all_projects():
if not REPODIR:
return
# Future: make this path parameterisable.
excludes = set(['pypi-mirror', 'jeepyb', 'tempest', 'requirements'])
for name in PROJECTS:
name = name.strip()
short_name = name.split('/')[-1]
try:
with open(os.path.join(
REPODIR, short_name, 'setup.py'), 'rt') as f:
if 'pbr' not in f.read():
continue
except IOError:
continue
if short_name in excludes:
continue
yield (short_name, dict(name=name, short_name=short_name))
class Venv(fixtures.Fixture):
"""Create a virtual environment for testing with.
:attr path: The path to the environment root.
:attr python: The path to the python binary in the environment.
"""
def __init__(self, reason, install_pbr=True):
"""Create a Venv fixture.
:param reason: A human readable string to bake into the venv
file path to aid diagnostics in the case of failures.
:param install_pbr: By default pbr is installed inside the
venv. Setting this to false will disable that.
"""
self._reason = reason
self._install_pbr = install_pbr
def _setUp(self):
path = self.useFixture(fixtures.TempDir()).path
virtualenv.create_environment(path, clear=True)
python = os.path.join(path, 'bin', 'python')
command = [python] + PIP_CMD + [
'-U', PIPVERSION, 'wheel']
if self._install_pbr:
command.append(PBRVERSION)
self.useFixture(base.CapturedSubprocess(
'mkvenv-' + self._reason, command))
self.addCleanup(delattr, self, 'path')
self.addCleanup(delattr, self, 'python')
self.path = path
self.python = python
return path, python
class TestIntegration(base.BaseTestCase):
scenarios = list(all_projects())
def setUp(self):
# Integration tests need a higher default - big repos can be slow to
# clone, particularly under guest load.
env = fixtures.EnvironmentVariable(
'OS_TEST_TIMEOUT', os.environ.get('OS_TEST_TIMEOUT', '600'))
with env:
super(TestIntegration, self).setUp()
base._config_git()
@testtools.skipUnless(
os.environ.get('PBR_INTEGRATION', None) == '1',
'integration tests not enabled')
def test_integration(self):
# Test that we can:
# - run sdist from the repo in a venv
# - install the resulting tarball in a new venv
# - pip install the repo
# - pip install -e the repo
# We don't break these into separate tests because we'd need separate
# source dirs to isolate from side effects of running pip, and the
# overheads of setup would start to beat the benefits of parallelism.
self.useFixture(base.CapturedSubprocess(
'sync-req',
['python', 'update.py', os.path.join(REPODIR, self.short_name)],
cwd=os.path.join(REPODIR, 'requirements')))
self.useFixture(base.CapturedSubprocess(
'commit-requirements',
'git diff --quiet || git commit -amrequirements',
cwd=os.path.join(REPODIR, self.short_name), shell=True))
path = os.path.join(
self.useFixture(fixtures.TempDir()).path, 'project')
self.useFixture(base.CapturedSubprocess(
'clone',
['git', 'clone', os.path.join(REPODIR, self.short_name), path]))
venv = self.useFixture(Venv('sdist'))
python = venv.python
self.useFixture(base.CapturedSubprocess(
'sdist', [python, 'setup.py', 'sdist'], cwd=path))
venv = self.useFixture(Venv('tarball'))
python = venv.python
filename = os.path.join(
path, 'dist', os.listdir(os.path.join(path, 'dist'))[0])
self.useFixture(base.CapturedSubprocess(
'tarball', [python] + PIP_CMD + [filename]))
venv = self.useFixture(Venv('install-git'))
root = venv.path
python = venv.python
self.useFixture(base.CapturedSubprocess(
'install-git', [python] + PIP_CMD + ['git+file://' + path]))
if self.short_name == 'nova':
found = False
for _, _, filenames in os.walk(root):
if 'migrate.cfg' in filenames:
found = True
self.assertTrue(found)
venv = self.useFixture(Venv('install-e'))
root = venv.path
python = venv.python
self.useFixture(base.CapturedSubprocess(
'install-e', [python] + PIP_CMD + ['-e', path]))
class TestInstallWithoutPbr(base.BaseTestCase):
@testtools.skipUnless(
os.environ.get('PBR_INTEGRATION', None) == '1',
'integration tests not enabled')
def test_install_without_pbr(self):
# Test easy-install of a thing that depends on a thing using pbr
tempdir = self.useFixture(fixtures.TempDir()).path
# A directory containing sdists of the things we're going to depend on
# in using-package.
dist_dir = os.path.join(tempdir, 'distdir')
os.mkdir(dist_dir)
self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir),
allow_fail=False, cwd=PBR_ROOT)
# testpkg - this requires a pbr-using package
test_pkg_dir = os.path.join(tempdir, 'testpkg')
os.mkdir(test_pkg_dir)
with open(os.path.join(test_pkg_dir, 'setup.py'), 'wt') as f:
f.write(textwrap.dedent("""\
#!/usr/bin/env python
import setuptools
setuptools.setup(
name = 'pkgTest',
tests_require = ['pkgReq'],
test_suite='pkgReq'
)
"""))
with open(os.path.join(test_pkg_dir, 'setup.cfg'), 'wt') as f:
f.write(textwrap.dedent("""\
[easy_install]
find_links = %s
""" % dist_dir))
repoTest = self.useFixture(TestRepo(test_pkg_dir))
repoTest.commit()
# reqpkg - this is a package that requires pbr
req_pkg_dir = os.path.join(tempdir, 'reqpkg')
pkg_req_module = os.path.join(req_pkg_dir, 'pkgReq/')
os.makedirs(pkg_req_module)
with open(os.path.join(req_pkg_dir, 'setup.py'), 'wt') as f:
f.write(textwrap.dedent("""\
#!/usr/bin/env python
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True
)
"""))
with open(os.path.join(req_pkg_dir, 'setup.cfg'), 'wt') as f:
f.write(textwrap.dedent("""\
[metadata]
name = pkgReq
"""))
with open(os.path.join(req_pkg_dir, 'requirements.txt'), 'wt') as f:
f.write(textwrap.dedent("""\
pbr
"""))
with open(os.path.join(req_pkg_dir, 'pkgReq/__init__.py'), 'wt') as f:
f.write(textwrap.dedent("""\
print("FakeTest loaded and ran")
"""))
repoReq = self.useFixture(TestRepo(req_pkg_dir))
repoReq.commit()
self._run_cmd(sys.executable, ('setup.py', 'sdist', '-d', dist_dir),
allow_fail=False, cwd=req_pkg_dir)
# A venv to test within
venv = self.useFixture(Venv('nopbr', install_pbr=False))
python = venv.python
# Run the depending script
self.useFixture(base.CapturedSubprocess(
'nopbr', [python] + ['setup.py', 'test'], cwd=test_pkg_dir))
| agpl-3.0 |
beni55/django | tests/m2m_multiple/tests.py | 228 | 2386 | from __future__ import unicode_literals
from datetime import datetime
from django.test import TestCase
from .models import Article, Category
class M2MMultipleTests(TestCase):
def test_multiple(self):
c1, c2, c3, c4 = [
Category.objects.create(name=name)
for name in ["Sports", "News", "Crime", "Life"]
]
a1 = Article.objects.create(
headline="Area man steals", pub_date=datetime(2005, 11, 27)
)
a1.primary_categories.add(c2, c3)
a1.secondary_categories.add(c4)
a2 = Article.objects.create(
headline="Area man runs", pub_date=datetime(2005, 11, 28)
)
a2.primary_categories.add(c1, c2)
a2.secondary_categories.add(c4)
self.assertQuerysetEqual(
a1.primary_categories.all(), [
"Crime",
"News",
],
lambda c: c.name
)
self.assertQuerysetEqual(
a2.primary_categories.all(), [
"News",
"Sports",
],
lambda c: c.name
)
self.assertQuerysetEqual(
a1.secondary_categories.all(), [
"Life",
],
lambda c: c.name
)
self.assertQuerysetEqual(
c1.primary_article_set.all(), [
"Area man runs",
],
lambda a: a.headline
)
self.assertQuerysetEqual(
c1.secondary_article_set.all(), []
)
self.assertQuerysetEqual(
c2.primary_article_set.all(), [
"Area man steals",
"Area man runs",
],
lambda a: a.headline
)
self.assertQuerysetEqual(
c2.secondary_article_set.all(), []
)
self.assertQuerysetEqual(
c3.primary_article_set.all(), [
"Area man steals",
],
lambda a: a.headline
)
self.assertQuerysetEqual(
c3.secondary_article_set.all(), []
)
self.assertQuerysetEqual(
c4.primary_article_set.all(), []
)
self.assertQuerysetEqual(
c4.secondary_article_set.all(), [
"Area man steals",
"Area man runs",
],
lambda a: a.headline
)
| bsd-3-clause |
jart/tensorflow | tensorflow/contrib/metrics/python/metrics/classification_test.py | 6 | 12209 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for metrics.classification."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.metrics.python.metrics import classification
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class ClassificationTest(test.TestCase):
def testAccuracy1D(self):
with self.test_session() as session:
pred = array_ops.placeholder(dtypes.int32, shape=[None])
labels = array_ops.placeholder(dtypes.int32, shape=[None])
acc = classification.accuracy(pred, labels)
result = session.run(acc,
feed_dict={pred: [1, 0, 1, 0],
labels: [1, 1, 0, 0]})
self.assertEqual(result, 0.5)
def testAccuracy1DBool(self):
with self.test_session() as session:
pred = array_ops.placeholder(dtypes.bool, shape=[None])
labels = array_ops.placeholder(dtypes.bool, shape=[None])
acc = classification.accuracy(pred, labels)
result = session.run(acc,
feed_dict={pred: [1, 0, 1, 0],
labels: [1, 1, 0, 0]})
self.assertEqual(result, 0.5)
def testAccuracy1DInt64(self):
with self.test_session() as session:
pred = array_ops.placeholder(dtypes.int64, shape=[None])
labels = array_ops.placeholder(dtypes.int64, shape=[None])
acc = classification.accuracy(pred, labels)
result = session.run(acc,
feed_dict={pred: [1, 0, 1, 0],
labels: [1, 1, 0, 0]})
self.assertEqual(result, 0.5)
def testAccuracy1DString(self):
with self.test_session() as session:
pred = array_ops.placeholder(dtypes.string, shape=[None])
labels = array_ops.placeholder(dtypes.string, shape=[None])
acc = classification.accuracy(pred, labels)
result = session.run(
acc,
feed_dict={pred: ['a', 'b', 'a', 'c'],
labels: ['a', 'c', 'b', 'c']})
self.assertEqual(result, 0.5)
def testAccuracyDtypeMismatch(self):
with self.assertRaises(ValueError):
pred = array_ops.placeholder(dtypes.int32, shape=[None])
labels = array_ops.placeholder(dtypes.int64, shape=[None])
classification.accuracy(pred, labels)
def testAccuracyFloatLabels(self):
with self.assertRaises(ValueError):
pred = array_ops.placeholder(dtypes.int32, shape=[None])
labels = array_ops.placeholder(dtypes.float32, shape=[None])
classification.accuracy(pred, labels)
def testAccuracy1DWeighted(self):
with self.test_session() as session:
pred = array_ops.placeholder(dtypes.int32, shape=[None])
labels = array_ops.placeholder(dtypes.int32, shape=[None])
weights = array_ops.placeholder(dtypes.float32, shape=[None])
acc = classification.accuracy(pred, labels)
result = session.run(acc,
feed_dict={
pred: [1, 0, 1, 1],
labels: [1, 1, 0, 1],
weights: [3.0, 1.0, 2.0, 0.0]
})
self.assertEqual(result, 0.5)
def testAccuracy1DWeightedBroadcast(self):
with self.test_session() as session:
pred = array_ops.placeholder(dtypes.int32, shape=[None])
labels = array_ops.placeholder(dtypes.int32, shape=[None])
weights = array_ops.placeholder(dtypes.float32, shape=[])
acc = classification.accuracy(pred, labels)
result = session.run(acc,
feed_dict={
pred: [1, 0, 1, 0],
labels: [1, 1, 0, 0],
weights: 3.0,
})
self.assertEqual(result, 0.5)
class F1ScoreTest(test.TestCase):
def setUp(self):
super(F1ScoreTest, self).setUp()
np.random.seed(1)
def testVars(self):
classification.f1_score(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
num_thresholds=3)
expected = {'f1/true_positives:0', 'f1/false_positives:0',
'f1/false_negatives:0'}
self.assertEquals(
expected, set(v.name for v in variables.local_variables()))
self.assertEquals(
set(expected), set(v.name for v in variables.local_variables()))
self.assertEquals(
set(expected),
set(v.name for v in ops.get_collection(ops.GraphKeys.METRIC_VARIABLES)))
def testMetricsCollection(self):
my_collection_name = '__metrics__'
f1, _ = classification.f1_score(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
num_thresholds=3,
metrics_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [f1])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, f1_op = classification.f1_score(
predictions=array_ops.ones((10, 1)),
labels=array_ops.ones((10, 1)),
num_thresholds=3,
updates_collections=[my_collection_name])
self.assertListEqual(ops.get_collection(my_collection_name), [f1_op])
def testValueTensorIsIdempotent(self):
predictions = random_ops.random_uniform(
(10, 3), maxval=1, dtype=dtypes.float32, seed=1)
labels = random_ops.random_uniform(
(10, 3), maxval=2, dtype=dtypes.int64, seed=2)
f1, f1_op = classification.f1_score(predictions, labels, num_thresholds=3)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run([f1_op])
# Then verify idempotency.
initial_f1 = f1.eval()
for _ in range(10):
self.assertAllClose(initial_f1, f1.eval())
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes.float32)
labels = constant_op.constant(inputs)
f1, f1_op = classification.f1_score(predictions, labels, num_thresholds=3)
sess.run(variables.local_variables_initializer())
sess.run([f1_op])
self.assertEqual(1, f1.eval())
def testSomeCorrect(self):
predictions = constant_op.constant(
[1, 0, 1, 0], shape=(1, 4), dtype=dtypes.float32)
labels = constant_op.constant([0, 1, 1, 0], shape=(1, 4))
f1, f1_op = classification.f1_score(predictions, labels, num_thresholds=1)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
sess.run([f1_op])
# Threshold 0 will have around 0.5 precision and 1 recall yielding an F1
# score of 2 * 0.5 * 1 / (1 + 0.5).
self.assertAlmostEqual(2 * 0.5 * 1 / (1 + 0.5), f1.eval())
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(10000, 1))
with self.test_session() as sess:
predictions = constant_op.constant(inputs, dtype=dtypes.float32)
labels = constant_op.constant(1 - inputs, dtype=dtypes.float32)
f1, f1_op = classification.f1_score(predictions, labels, num_thresholds=3)
sess.run(variables.local_variables_initializer())
sess.run([f1_op])
# Threshold 0 will have around 0.5 precision and 1 recall yielding an F1
# score of 2 * 0.5 * 1 / (1 + 0.5).
self.assertAlmostEqual(2 * 0.5 * 1 / (1 + 0.5), f1.eval(), places=2)
def testWeights1d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes.float32)
labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = constant_op.constant(
[[0], [1]], shape=(2, 1), dtype=dtypes.float32)
f1, f1_op = classification.f1_score(predictions, labels, weights,
num_thresholds=3)
sess.run(variables.local_variables_initializer())
sess.run([f1_op])
self.assertAlmostEqual(1.0, f1.eval(), places=5)
def testWeights2d(self):
with self.test_session() as sess:
predictions = constant_op.constant(
[[1, 0], [1, 0]], shape=(2, 2), dtype=dtypes.float32)
labels = constant_op.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = constant_op.constant(
[[0, 0], [1, 1]], shape=(2, 2), dtype=dtypes.float32)
f1, f1_op = classification.f1_score(predictions, labels, weights,
num_thresholds=3)
sess.run(variables.local_variables_initializer())
sess.run([f1_op])
self.assertAlmostEqual(1.0, f1.eval(), places=5)
def testZeroLabelsPredictions(self):
with self.test_session() as sess:
predictions = array_ops.zeros([4], dtype=dtypes.float32)
labels = array_ops.zeros([4])
f1, f1_op = classification.f1_score(predictions, labels, num_thresholds=3)
sess.run(variables.local_variables_initializer())
sess.run([f1_op])
self.assertAlmostEqual(0.0, f1.eval(), places=5)
def testWithMultipleUpdates(self):
num_samples = 1000
batch_size = 10
num_batches = int(num_samples / batch_size)
# Create the labels and data.
labels = np.random.randint(0, 2, size=(num_samples, 1))
noise = np.random.normal(0.0, scale=0.2, size=(num_samples, 1))
predictions = 0.4 + 0.2 * labels + noise
predictions[predictions > 1] = 1
predictions[predictions < 0] = 0
thresholds = [-0.01, 0.5, 1.01]
expected_max_f1 = -1.0
for threshold in thresholds:
tp = 0
fp = 0
fn = 0
tn = 0
for i in range(num_samples):
if predictions[i] >= threshold:
if labels[i] == 1:
tp += 1
else:
fp += 1
else:
if labels[i] == 1:
fn += 1
else:
tn += 1
epsilon = 1e-7
expected_prec = tp / (epsilon + tp + fp)
expected_rec = tp / (epsilon + tp + fn)
expected_f1 = (2 * expected_prec * expected_rec /
(epsilon + expected_prec + expected_rec))
if expected_f1 > expected_max_f1:
expected_max_f1 = expected_f1
labels = labels.astype(np.float32)
predictions = predictions.astype(np.float32)
tf_predictions, tf_labels = (dataset_ops.Dataset
.from_tensor_slices((predictions, labels))
.repeat()
.batch(batch_size)
.make_one_shot_iterator()
.get_next())
f1, f1_op = classification.f1_score(tf_labels, tf_predictions,
num_thresholds=3)
with self.test_session() as sess:
sess.run(variables.local_variables_initializer())
for _ in range(num_batches):
sess.run([f1_op])
# Since this is only approximate, we can't expect a 6 digits match.
# Although with higher number of samples/thresholds we should see the
# accuracy improving
self.assertAlmostEqual(expected_max_f1, f1.eval(), 2)
if __name__ == '__main__':
test.main()
| apache-2.0 |
florian-dacosta/OCB | addons/stock_account/wizard/stock_invoice_onshipping.py | 120 | 6111 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class stock_invoice_onshipping(osv.osv_memory):
def _get_journal(self, cr, uid, context=None):
journal_obj = self.pool.get('account.journal')
journal_type = self._get_journal_type(cr, uid, context=context)
journals = journal_obj.search(cr, uid, [('type', '=', journal_type)])
return journals and journals[0] or False
def _get_journal_type(self, cr, uid, context=None):
if context is None:
context = {}
res_ids = context and context.get('active_ids', [])
pick_obj = self.pool.get('stock.picking')
pickings = pick_obj.browse(cr, uid, res_ids, context=context)
vals = []
pick = pickings and pickings[0]
if not pick or not pick.move_lines:
return 'sale'
src_usage = pick.move_lines[0].location_id.usage
dest_usage = pick.move_lines[0].location_dest_id.usage
type = pick.picking_type_id.code
if type == 'outgoing' and dest_usage == 'supplier':
journal_type = 'purchase_refund'
elif type == 'outgoing' and dest_usage == 'customer':
journal_type = 'sale'
elif type == 'incoming' and src_usage == 'supplier':
journal_type = 'purchase'
elif type == 'incoming' and src_usage == 'customer':
journal_type = 'sale_refund'
else:
journal_type = 'sale'
return journal_type
_name = "stock.invoice.onshipping"
_description = "Stock Invoice Onshipping"
_columns = {
'journal_id': fields.many2one('account.journal', 'Destination Journal', required=True),
'journal_type': fields.selection([('purchase_refund', 'Refund Purchase'), ('purchase', 'Create Supplier Invoice'),
('sale_refund', 'Refund Sale'), ('sale', 'Create Customer Invoice')], 'Journal Type', readonly=True),
'group': fields.boolean("Group by partner"),
'invoice_date': fields.date('Invoice Date'),
}
_defaults = {
'journal_type': _get_journal_type,
'journal_id' : _get_journal,
}
def view_init(self, cr, uid, fields_list, context=None):
if context is None:
context = {}
res = super(stock_invoice_onshipping, self).view_init(cr, uid, fields_list, context=context)
pick_obj = self.pool.get('stock.picking')
count = 0
active_ids = context.get('active_ids',[])
for pick in pick_obj.browse(cr, uid, active_ids, context=context):
if pick.invoice_state != '2binvoiced':
count += 1
if len(active_ids) == count:
raise osv.except_osv(_('Warning!'), _('None of these picking lists require invoicing.'))
return res
def open_invoice(self, cr, uid, ids, context=None):
if context is None:
context = {}
invoice_ids = self.create_invoice(cr, uid, ids, context=context)
if not invoice_ids:
raise osv.except_osv(_('Error!'), _('No invoice created!'))
data = self.browse(cr, uid, ids[0], context=context)
action_model = False
action = {}
journal2type = {'sale':'out_invoice', 'purchase':'in_invoice' , 'sale_refund':'out_refund', 'purchase_refund':'in_refund'}
inv_type = journal2type.get(data.journal_type) or 'out_invoice'
data_pool = self.pool.get('ir.model.data')
if inv_type == "out_invoice":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree1')
elif inv_type == "in_invoice":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree2')
elif inv_type == "out_refund":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree3')
elif inv_type == "in_refund":
action_id = data_pool.xmlid_to_res_id(cr, uid, 'account.action_invoice_tree4')
if action_id:
action_pool = self.pool['ir.actions.act_window']
action = action_pool.read(cr, uid, action_id, context=context)
action['domain'] = "[('id','in', ["+','.join(map(str,invoice_ids))+"])]"
return action
return True
def create_invoice(self, cr, uid, ids, context=None):
context = dict(context or {})
picking_pool = self.pool.get('stock.picking')
data = self.browse(cr, uid, ids[0], context=context)
journal2type = {'sale':'out_invoice', 'purchase':'in_invoice', 'sale_refund':'out_refund', 'purchase_refund':'in_refund'}
context['date_inv'] = data.invoice_date
acc_journal = self.pool.get("account.journal")
inv_type = journal2type.get(data.journal_type) or 'out_invoice'
context['inv_type'] = inv_type
active_ids = context.get('active_ids', [])
res = picking_pool.action_invoice_create(cr, uid, active_ids,
journal_id = data.journal_id.id,
group = data.group,
type = inv_type,
context=context)
return res
| agpl-3.0 |
watspidererik/testenv | flask/lib/python2.7/site-packages/coverage/files.py | 209 | 10724 | """File wrangling."""
from coverage.backward import to_string
from coverage.misc import CoverageException
import fnmatch, os, os.path, re, sys
import ntpath, posixpath
class FileLocator(object):
"""Understand how filenames work."""
def __init__(self):
# The absolute path to our current directory.
self.relative_dir = os.path.normcase(abs_file(os.curdir) + os.sep)
# Cache of results of calling the canonical_filename() method, to
# avoid duplicating work.
self.canonical_filename_cache = {}
def relative_filename(self, filename):
"""Return the relative form of `filename`.
The filename will be relative to the current directory when the
`FileLocator` was constructed.
"""
fnorm = os.path.normcase(filename)
if fnorm.startswith(self.relative_dir):
filename = filename[len(self.relative_dir):]
return filename
def canonical_filename(self, filename):
"""Return a canonical filename for `filename`.
An absolute path with no redundant components and normalized case.
"""
if filename not in self.canonical_filename_cache:
if not os.path.isabs(filename):
for path in [os.curdir] + sys.path:
if path is None:
continue
f = os.path.join(path, filename)
if os.path.exists(f):
filename = f
break
cf = abs_file(filename)
self.canonical_filename_cache[filename] = cf
return self.canonical_filename_cache[filename]
def get_zip_data(self, filename):
"""Get data from `filename` if it is a zip file path.
Returns the string data read from the zip file, or None if no zip file
could be found or `filename` isn't in it. The data returned will be
an empty string if the file is empty.
"""
import zipimport
markers = ['.zip'+os.sep, '.egg'+os.sep]
for marker in markers:
if marker in filename:
parts = filename.split(marker)
try:
zi = zipimport.zipimporter(parts[0]+marker[:-1])
except zipimport.ZipImportError:
continue
try:
data = zi.get_data(parts[1])
except IOError:
continue
return to_string(data)
return None
if sys.platform == 'win32':
def actual_path(path):
"""Get the actual path of `path`, including the correct case."""
if path in actual_path.cache:
return actual_path.cache[path]
head, tail = os.path.split(path)
if not tail:
actpath = head
elif not head:
actpath = tail
else:
head = actual_path(head)
if head in actual_path.list_cache:
files = actual_path.list_cache[head]
else:
try:
files = os.listdir(head)
except OSError:
files = []
actual_path.list_cache[head] = files
normtail = os.path.normcase(tail)
for f in files:
if os.path.normcase(f) == normtail:
tail = f
break
actpath = os.path.join(head, tail)
actual_path.cache[path] = actpath
return actpath
actual_path.cache = {}
actual_path.list_cache = {}
else:
def actual_path(filename):
"""The actual path for non-Windows platforms."""
return filename
def abs_file(filename):
"""Return the absolute normalized form of `filename`."""
path = os.path.expandvars(os.path.expanduser(filename))
path = os.path.abspath(os.path.realpath(path))
path = actual_path(path)
return path
def isabs_anywhere(filename):
"""Is `filename` an absolute path on any OS?"""
return ntpath.isabs(filename) or posixpath.isabs(filename)
def prep_patterns(patterns):
"""Prepare the file patterns for use in a `FnmatchMatcher`.
If a pattern starts with a wildcard, it is used as a pattern
as-is. If it does not start with a wildcard, then it is made
absolute with the current directory.
If `patterns` is None, an empty list is returned.
"""
prepped = []
for p in patterns or []:
if p.startswith("*") or p.startswith("?"):
prepped.append(p)
else:
prepped.append(abs_file(p))
return prepped
class TreeMatcher(object):
"""A matcher for files in a tree."""
def __init__(self, directories):
self.dirs = directories[:]
def __repr__(self):
return "<TreeMatcher %r>" % self.dirs
def info(self):
"""A list of strings for displaying when dumping state."""
return self.dirs
def add(self, directory):
"""Add another directory to the list we match for."""
self.dirs.append(directory)
def match(self, fpath):
"""Does `fpath` indicate a file in one of our trees?"""
for d in self.dirs:
if fpath.startswith(d):
if fpath == d:
# This is the same file!
return True
if fpath[len(d)] == os.sep:
# This is a file in the directory
return True
return False
class FnmatchMatcher(object):
"""A matcher for files by filename pattern."""
def __init__(self, pats):
self.pats = pats[:]
def __repr__(self):
return "<FnmatchMatcher %r>" % self.pats
def info(self):
"""A list of strings for displaying when dumping state."""
return self.pats
def match(self, fpath):
"""Does `fpath` match one of our filename patterns?"""
for pat in self.pats:
if fnmatch.fnmatch(fpath, pat):
return True
return False
def sep(s):
"""Find the path separator used in this string, or os.sep if none."""
sep_match = re.search(r"[\\/]", s)
if sep_match:
the_sep = sep_match.group(0)
else:
the_sep = os.sep
return the_sep
class PathAliases(object):
"""A collection of aliases for paths.
When combining data files from remote machines, often the paths to source
code are different, for example, due to OS differences, or because of
serialized checkouts on continuous integration machines.
A `PathAliases` object tracks a list of pattern/result pairs, and can
map a path through those aliases to produce a unified path.
`locator` is a FileLocator that is used to canonicalize the results.
"""
def __init__(self, locator=None):
self.aliases = []
self.locator = locator
def add(self, pattern, result):
"""Add the `pattern`/`result` pair to the list of aliases.
`pattern` is an `fnmatch`-style pattern. `result` is a simple
string. When mapping paths, if a path starts with a match against
`pattern`, then that match is replaced with `result`. This models
isomorphic source trees being rooted at different places on two
different machines.
`pattern` can't end with a wildcard component, since that would
match an entire tree, and not just its root.
"""
# The pattern can't end with a wildcard component.
pattern = pattern.rstrip(r"\/")
if pattern.endswith("*"):
raise CoverageException("Pattern must not end with wildcards.")
pattern_sep = sep(pattern)
# The pattern is meant to match a filepath. Let's make it absolute
# unless it already is, or is meant to match any prefix.
if not pattern.startswith('*') and not isabs_anywhere(pattern):
pattern = abs_file(pattern)
pattern += pattern_sep
# Make a regex from the pattern. fnmatch always adds a \Z or $ to
# match the whole string, which we don't want.
regex_pat = fnmatch.translate(pattern).replace(r'\Z(', '(')
if regex_pat.endswith("$"):
regex_pat = regex_pat[:-1]
# We want */a/b.py to match on Windows too, so change slash to match
# either separator.
regex_pat = regex_pat.replace(r"\/", r"[\\/]")
# We want case-insensitive matching, so add that flag.
regex = re.compile(r"(?i)" + regex_pat)
# Normalize the result: it must end with a path separator.
result_sep = sep(result)
result = result.rstrip(r"\/") + result_sep
self.aliases.append((regex, result, pattern_sep, result_sep))
def map(self, path):
"""Map `path` through the aliases.
`path` is checked against all of the patterns. The first pattern to
match is used to replace the root of the path with the result root.
Only one pattern is ever used. If no patterns match, `path` is
returned unchanged.
The separator style in the result is made to match that of the result
in the alias.
"""
for regex, result, pattern_sep, result_sep in self.aliases:
m = regex.match(path)
if m:
new = path.replace(m.group(0), result)
if pattern_sep != result_sep:
new = new.replace(pattern_sep, result_sep)
if self.locator:
new = self.locator.canonical_filename(new)
return new
return path
def find_python_files(dirname):
"""Yield all of the importable Python files in `dirname`, recursively.
To be importable, the files have to be in a directory with a __init__.py,
except for `dirname` itself, which isn't required to have one. The
assumption is that `dirname` was specified directly, so the user knows
best, but subdirectories are checked for a __init__.py to be sure we only
find the importable files.
"""
for i, (dirpath, dirnames, filenames) in enumerate(os.walk(dirname)):
if i > 0 and '__init__.py' not in filenames:
# If a directory doesn't have __init__.py, then it isn't
# importable and neither are its files
del dirnames[:]
continue
for filename in filenames:
# We're only interested in files that look like reasonable Python
# files: Must end with .py or .pyw, and must not have certain funny
# characters that probably mean they are editor junk.
if re.match(r"^[^.#~!$@%^&*()+=,]+\.pyw?$", filename):
yield os.path.join(dirpath, filename)
| mit |
parheliamm/mosquitto-1.3.4 | test/lib/03-publish-qos0.py | 19 | 2097 | #!/usr/bin/env python
# Test whether a client sends a correct PUBLISH to a topic with QoS 0.
# The client should connect to port 1888 with keepalive=60, clean session set,
# and client id publish-qos0-test
# The test will send a CONNACK message to the client with rc=0. Upon receiving
# the CONNACK and verifying that rc=0, the client should send a PUBLISH message
# to topic "pub/qos0/test" with payload "message" and QoS=0. If rc!=0, the
# client should exit with an error.
# After sending the PUBLISH message, the client should send a DISCONNECT message.
import inspect
import os
import subprocess
import socket
import sys
import time
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("publish-qos0-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
publish_packet = mosq_test.gen_publish("pub/qos0/test", qos=0, payload="message")
disconnect_packet = mosq_test.gen_disconnect()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(10)
sock.bind(('', 1888))
sock.listen(5)
client_args = sys.argv[1:]
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp'
try:
pp = env['PYTHONPATH']
except KeyError:
pp = ''
env['PYTHONPATH'] = '../../lib/python:'+pp
client = subprocess.Popen(client_args, env=env)
try:
(conn, address) = sock.accept()
conn.settimeout(10)
if mosq_test.expect_packet(conn, "connect", connect_packet):
conn.send(connack_packet)
if mosq_test.expect_packet(conn, "publish", publish_packet):
if mosq_test.expect_packet(conn, "disconnect", disconnect_packet):
rc = 0
conn.close()
finally:
client.terminate()
client.wait()
sock.close()
exit(rc)
| bsd-3-clause |
ximion/dak | dak/dakdb/update93.py | 7 | 2429 | #!/usr/bin/env python
# coding=utf8
"""
update world.files-1 view to handle backports archive on ftp-master
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2012 Ansgar Burchardt <ansgar@debian.org>
@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import psycopg2
from daklib.dak_exceptions import DBUpdateError
from daklib.config import Config
################################################################################
def do_update(self):
print __doc__
try:
cnf = Config()
c = self.db.cursor()
c.execute("""
CREATE OR REPLACE VIEW world."files-1" AS
SELECT
files.id AS id,
component.name || '/' || files.filename AS filename,
files.size AS size,
files.md5sum AS md5sum,
files.sha1sum AS sha1sum,
files.sha256sum AS sha256sum,
files.last_used AS last_used,
files.created AS created,
files.modified AS modified
FROM files
JOIN files_archive_map fam ON files.id = fam.file_id
JOIN component ON fam.component_id = component.id
WHERE fam.archive_id = (SELECT id FROM archive WHERE name IN ('backports', 'ftp-master', 'security') ORDER BY id LIMIT 1)
""")
c.execute("UPDATE config SET value = '93' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError as msg:
self.db.rollback()
raise DBUpdateError('Unable to apply sick update 93, rollback issued. Error message: {0}'.format(msg))
| gpl-2.0 |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/PyQt4/QtNetwork/QNetworkProxyFactory.py | 2 | 1807 | # encoding: utf-8
# module PyQt4.QtNetwork
# from /usr/lib/python2.7/dist-packages/PyQt4/QtNetwork.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QNetworkProxyFactory(): # skipped bases: <type 'sip.wrapper'>
"""
QNetworkProxyFactory()
QNetworkProxyFactory(QNetworkProxyFactory)
"""
def proxyForQuery(self, QNetworkProxyQuery): # real signature unknown; restored from __doc__
""" QNetworkProxyFactory.proxyForQuery(QNetworkProxyQuery) -> list-of-QNetworkProxy """
pass
def queryProxy(self, QNetworkProxyQuery_query=None, *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
""" QNetworkProxyFactory.queryProxy(QNetworkProxyQuery query=QNetworkProxyQuery()) -> list-of-QNetworkProxy """
pass
def setApplicationProxyFactory(self, QNetworkProxyFactory): # real signature unknown; restored from __doc__
""" QNetworkProxyFactory.setApplicationProxyFactory(QNetworkProxyFactory) """
pass
def setUseSystemConfiguration(self, bool): # real signature unknown; restored from __doc__
""" QNetworkProxyFactory.setUseSystemConfiguration(bool) """
pass
def systemProxyForQuery(self, QNetworkProxyQuery_query=None, *args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
""" QNetworkProxyFactory.systemProxyForQuery(QNetworkProxyQuery query=QNetworkProxyQuery()) -> list-of-QNetworkProxy """
pass
def __init__(self, QNetworkProxyFactory=None): # real signature unknown; restored from __doc__ with multiple overloads
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
| gpl-2.0 |
WangHong-yang/wand | wand/font.py | 5 | 3231 | """:mod:`wand.font` --- Fonts
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.3.0
:class:`Font` is an object which takes the :attr:`~Font.path` of font file,
:attr:`~Font.size`, :attr:`~Font.color`, and whether to use
:attr:`~Font.antialias`\ ing. If you want to use font by its name rather
than the file path, use TTFQuery_ package. The font path resolution by its
name is a very complicated problem to achieve.
.. seealso::
TTFQuery_ --- Find and Extract Information from TTF Files
TTFQuery builds on the `FontTools-TTX`_ package to allow the Python
programmer to accomplish a number of tasks:
- query the system to find installed fonts
- retrieve metadata about any TTF font file
- this includes the glyph outlines (shape) of individual code-points,
which allows for rendering the glyphs in 3D (such as is done in
OpenGLContext)
- lookup/find fonts by:
- abstract family type
- proper font name
- build simple metadata registries for run-time font matching
.. _TTFQuery: http://ttfquery.sourceforge.net/
.. _FontTools-TTX: http://sourceforge.net/projects/fonttools/
"""
import numbers
from .color import Color
from .compat import string_type, text
__all__ = 'Font',
class Font(tuple):
"""Font struct which is a subtype of :class:`tuple`.
:param path: the path of the font file
:type path: :class:`str`, :class:`basestring`
:param size: the size of typeface. 0 by default which means *autosized*
:type size: :class:`numbers.Real`
:param color: the color of typeface. black by default
:type color: :class:`~wand.color.Color`
:param antialias: whether to use antialiasing. :const:`True` by default
:type antialias: :class:`bool`
.. versionchanged:: 0.3.9
The ``size`` parameter becomes optional. Its default value is
0, which means *autosized*.
"""
def __new__(cls, path, size=0, color=None, antialias=True):
if not isinstance(path, string_type):
raise TypeError('path must be a string, not ' + repr(path))
if not isinstance(size, numbers.Real):
raise TypeError('size must be a real number, not ' + repr(size))
if color is None:
color = Color('black')
elif not isinstance(color, Color):
raise TypeError('color must be an instance of wand.color.Color, '
'not ' + repr(color))
path = text(path)
return tuple.__new__(cls, (path, size, color, bool(antialias)))
@property
def path(self):
"""(:class:`basestring`) The path of font file."""
return self[0]
@property
def size(self):
"""(:class:`numbers.Real`) The font size in pixels."""
return self[1]
@property
def color(self):
"""(:class:`wand.color.Color`) The font color."""
return self[2]
@property
def antialias(self):
"""(:class:`bool`) Whether to apply antialiasing (``True``)
or not (``False``).
"""
return self[3]
def __repr__(self):
return '{0.__module__}.{0.__name__}({1})'.format(
type(self),
tuple.__repr__(self)
)
| mit |
adrianschlatter/pyDynamics | tanuna/examples/laser.py | 1 | 6870 | # -*- coding: utf-8 -*-
"""
Model of a passively mode-locked laser.
@author: Adrian Schlatter
"""
import numpy as np
from scipy.optimize import brentq
from scipy.integrate import ode
from ..root import CT_LTI_System
class Laser(ode):
"""A class to simulate lasers with a (slow) saturable absorber in the
cavity. While it is intended for mode-locked lasers, it may also be useful
for Q-switched lasers."""
def __init__(self, loss, TR, tauL, etaP, EsatL, DR, EsatA, Toc, PP0,
P0=None, g0=None):
self.loss = loss
self.TR = TR
self.tauL = tauL
self.etaP = etaP
self.EsatL = EsatL
self.DR = DR
self.EsatA = EsatA
self.Toc = Toc
self.PP0 = PP0
self.PP = PP0
if P0 is None:
P0 = self.Psteady()
if g0 is None:
g0 = self.gsteady()
self.g0 = g0
super().__init__(self.f, jac=self.grad_f)
t0 = 0.
self.set_initial_value([P0, g0], t0)
self.set_integrator('dopri5')
def qP(self, EP):
S = EP / self.EsatA
res = np.where(S == 0,
self.DR,
self.DR / S * (1. - np.exp(-S)))
if res.shape == (1,):
res = res[0]
return(res)
def dqP_dEP(self, EP):
EsatA = self.EsatA
S = EP / EsatA
if S == 0:
return(self.DR / self.EsatA)
else:
return(self.DR / EP * (np.exp(-S) - 1. / S + np.exp(-S) / S))
def Pdot(self, P, g):
P = np.array(P)
g = np.array(g)
EP = P * self.TR
return(np.where(P > 0, (g - self.loss - self.qP(EP)) / self.TR * P,
np.zeros(P.shape)))
def gdot(self, P, g):
spontaneous = (self.g0 - g) / self.tauL
stimulated = -P * g / self.EsatL
pump = self.etaP * self.PP / self.EsatL
return(spontaneous + stimulated + pump)
def f(self, t, s):
P, g = s
sdot = [self.Pdot(P, g), self.gdot(P, g)]
return(sdot)
def grad_f(self, t, s):
P, g = s
loss = self.loss
TR = self.TR
EP = P * TR
qP = self.qP(EP)
EsatL = self.EsatL
tauL = self.tauL
dfP_dP = (g - loss - qP) / TR - EP * self.dqP_dEP(EP)
dfP_dg = P / TR
dfg_dP = g / EsatL
dfg_dg = -1 / tauL - P / EsatL
return([[dfP_dP, dfP_dg], [dfg_dP, dfg_dg]])
@property
def pumpThreshold(self):
"""Pump power threshold, i.e., pump power needed to start lasing"""
EsatL, tauL, etaP = self.EsatL, self.tauL, self.etaP
loss, DR = self.loss, self.DR
return(EsatL / tauL * (loss + DR) / etaP)
def steadystate(self, Ppump=None):
"""Steady state (Psteady, gsteady) given pump power Ppump"""
if Ppump is None:
Ppump = self.PP
EsatL, TR, tauL = self.EsatL, self.TR, self.tauL
loss, DR, etaP = self.loss, self.DR, self.etaP
PPthreshold = self.pumpThreshold
if Ppump <= PPthreshold:
Psteady = 0.
gsteady = etaP * Ppump * tauL / EsatL
else:
# 1. determine boundaries for Psteady:
# 2. Apply root-finder (brentq) given boundaries
offs = EsatL / tauL
# assume non-linear losses (qP(EP)) = 0:
upperBound = -offs + Ppump * etaP / loss
# assume max. non-linear losses (qP(EP) = DR):
lowerBound = -offs + Ppump * etaP / (loss + DR)
Psteady = brentq(lambda P: -P - EsatL / tauL + etaP * Ppump /
(loss + self.qP(P*TR)), lowerBound, upperBound)
gsteady = loss + self.qP(Psteady * TR)
return(Psteady, gsteady)
def Psteady(self, Ppump=None):
"""Steady-state intracavity power given pump power Ppump"""
return(self.steadystate(Ppump)[0])
def gsteady(self, Ppump=None):
"""Steady-state gain given pump power Ppump"""
return(self.steadystate(Ppump)[1])
def w0(self, Ppump=None):
"""Returns natural angular frequency of disturbances around steady
state. Steady state is determined from pump power Ppump."""
EsatL, TR, tauL = self.EsatL, self.TR, self.tauL
Pst, gst = self.steadystate(Ppump)
r = Pst / EsatL
w0 = np.sqrt(r * gst / TR +
Pst * self.dqP_dEP(Pst * TR) * (1. / tauL + r))
return(w0)
def alpha(self, Ppump=None):
"""Damping rate of relaxation oscillations (negative real part of
poles). The nice thing about alpha is that it is also correct below
the lasing threshold (where it is equal to 1 / tauL)."""
EsatL, TR, tauL = self.EsatL, self.TR, self.tauL
Pst, gst = self.steadystate(Ppump)
a = (1. / tauL + Pst * (self.dqP_dEP(Pst * TR) + 1. / EsatL))
return(a)
def zeta(self, Ppump=None):
"""Damping ratio of relaxation oscillations."""
return(self.alpha(Ppump) / 2. / self.w0(Ppump))
def rho(self, Ppump=None):
"""Internal slope efficiency at pump power Ppump"""
etaP, EsatL, TR = self.etaP, self.EsatL, self.TR
return(self.Psteady(Ppump) * etaP / (EsatL * TR * self.w0(Ppump)**2))
@property
def stable(self):
"""Return true if laser is stable (i.e. no Q-switching)"""
return(self.zeta > 0)
def approximateLTI(self, Ppump=None):
"""Linearizes the state-equations around the steady state corresponding
to a pump power Ppump and returns a CT_LTI_System."""
w0 = self.w0(Ppump)
zeta = self.zeta(Ppump)
rho = self.rho(Ppump)
Toc = self.Toc
Pst = self.Psteady(Ppump)
TR = self.TR
dqPdEP = self.dqP_dEP(self.TR * Pst)
M = np.matrix([[-Pst * dqPdEP / w0, Pst / TR / w0],
[1, 0]])
A = np.matrix([[-2. * w0 * zeta, -w0],
[w0, 0.]])
B = np.matrix([[w0 * rho],
[0.]])
C = np.matrix([[0., Toc]])
D = np.matrix([[0.]])
return(M, CT_LTI_System(A, B, C, D))
class NdYVO4Laser(Laser):
"""An pre-configured example of a passively mode-locked 100 MHz Nd:YVO4
Laser"""
def __init__(self, Ppump):
tauL = 90e-6
TR = 10e-9
FsatA = 60e-6/1e-4
wA = 140e-6
DR = 1.7e-2
loss = 9e-2+1.3e-2
wavelength = 1064e-9
sigmaEm = 114e-20*1e-4
wL = 62e-6
etaP = 808. / 1064.
Toc = 8.7e-2
c = 3e8
h = 6.626e-34
nuL = c / wavelength
EsatL = np.pi * wL**2 * h * nuL / (2 * sigmaEm)
EsatA = FsatA*np.pi*wA**2
Laser.__init__(self, loss, TR, tauL, etaP, EsatL, DR, EsatA, Toc,
Ppump, P0=None, g0=None)
| bsd-3-clause |
amyvmiwei/kbengine | kbe/res/scripts/common/Lib/encodings/mac_croatian.py | 272 | 13633 | """ Python Character Mapping Codec mac_croatian generated from 'MAPPINGS/VENDORS/APPLE/CROATIAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-croatian',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> CONTROL CHARACTER
'\x01' # 0x01 -> CONTROL CHARACTER
'\x02' # 0x02 -> CONTROL CHARACTER
'\x03' # 0x03 -> CONTROL CHARACTER
'\x04' # 0x04 -> CONTROL CHARACTER
'\x05' # 0x05 -> CONTROL CHARACTER
'\x06' # 0x06 -> CONTROL CHARACTER
'\x07' # 0x07 -> CONTROL CHARACTER
'\x08' # 0x08 -> CONTROL CHARACTER
'\t' # 0x09 -> CONTROL CHARACTER
'\n' # 0x0A -> CONTROL CHARACTER
'\x0b' # 0x0B -> CONTROL CHARACTER
'\x0c' # 0x0C -> CONTROL CHARACTER
'\r' # 0x0D -> CONTROL CHARACTER
'\x0e' # 0x0E -> CONTROL CHARACTER
'\x0f' # 0x0F -> CONTROL CHARACTER
'\x10' # 0x10 -> CONTROL CHARACTER
'\x11' # 0x11 -> CONTROL CHARACTER
'\x12' # 0x12 -> CONTROL CHARACTER
'\x13' # 0x13 -> CONTROL CHARACTER
'\x14' # 0x14 -> CONTROL CHARACTER
'\x15' # 0x15 -> CONTROL CHARACTER
'\x16' # 0x16 -> CONTROL CHARACTER
'\x17' # 0x17 -> CONTROL CHARACTER
'\x18' # 0x18 -> CONTROL CHARACTER
'\x19' # 0x19 -> CONTROL CHARACTER
'\x1a' # 0x1A -> CONTROL CHARACTER
'\x1b' # 0x1B -> CONTROL CHARACTER
'\x1c' # 0x1C -> CONTROL CHARACTER
'\x1d' # 0x1D -> CONTROL CHARACTER
'\x1e' # 0x1E -> CONTROL CHARACTER
'\x1f' # 0x1F -> CONTROL CHARACTER
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> CONTROL CHARACTER
'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
'\u2020' # 0xA0 -> DAGGER
'\xb0' # 0xA1 -> DEGREE SIGN
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa7' # 0xA4 -> SECTION SIGN
'\u2022' # 0xA5 -> BULLET
'\xb6' # 0xA6 -> PILCROW SIGN
'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
'\xae' # 0xA8 -> REGISTERED SIGN
'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
'\u2122' # 0xAA -> TRADE MARK SIGN
'\xb4' # 0xAB -> ACUTE ACCENT
'\xa8' # 0xAC -> DIAERESIS
'\u2260' # 0xAD -> NOT EQUAL TO
'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
'\u221e' # 0xB0 -> INFINITY
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
'\u2206' # 0xB4 -> INCREMENT
'\xb5' # 0xB5 -> MICRO SIGN
'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
'\u2211' # 0xB7 -> N-ARY SUMMATION
'\u220f' # 0xB8 -> N-ARY PRODUCT
'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
'\u222b' # 0xBA -> INTEGRAL
'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
'\xbf' # 0xC0 -> INVERTED QUESTION MARK
'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
'\xac' # 0xC2 -> NOT SIGN
'\u221a' # 0xC3 -> SQUARE ROOT
'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
'\u2248' # 0xC5 -> ALMOST EQUAL TO
'\u0106' # 0xC6 -> LATIN CAPITAL LETTER C WITH ACUTE
'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
'\xa0' # 0xCA -> NO-BREAK SPACE
'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
'\u2014' # 0xD1 -> EM DASH
'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
'\xf7' # 0xD6 -> DIVISION SIGN
'\u25ca' # 0xD7 -> LOZENGE
'\uf8ff' # 0xD8 -> Apple logo
'\xa9' # 0xD9 -> COPYRIGHT SIGN
'\u2044' # 0xDA -> FRACTION SLASH
'\u20ac' # 0xDB -> EURO SIGN
'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\xc6' # 0xDE -> LATIN CAPITAL LETTER AE
'\xbb' # 0xDF -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2013' # 0xE0 -> EN DASH
'\xb7' # 0xE1 -> MIDDLE DOT
'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
'\u2030' # 0xE4 -> PER MILLE SIGN
'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\u0107' # 0xE6 -> LATIN SMALL LETTER C WITH ACUTE
'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
'\u02dc' # 0xF7 -> SMALL TILDE
'\xaf' # 0xF8 -> MACRON
'\u03c0' # 0xF9 -> GREEK SMALL LETTER PI
'\xcb' # 0xFA -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u02da' # 0xFB -> RING ABOVE
'\xb8' # 0xFC -> CEDILLA
'\xca' # 0xFD -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xe6' # 0xFE -> LATIN SMALL LETTER AE
'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| lgpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.