repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
jdahlin/pygobject | tests/test_overrides.py | 1 | 79982 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
import unittest
import sys
sys.path.insert(0, "../")
from compathelper import _long, _unicode, _bytes
from gi.repository import GLib
from gi.repository import GObject
from gi.repository import Gdk
from gi.repository import Gtk
from gi.repository import Gio
from gi.repository import Pango
from gi.repository import GdkPixbuf
import gi.overrides as overrides
import gi.types
class TestRegistry(unittest.TestCase):
def test_non_gi(self):
class MyClass:
pass
try:
overrides.override(MyClass)
self.fail('unexpected success of overriding non-GI class')
except TypeError as e:
self.assertTrue('Can not override a type MyClass' in str(e))
class TestGLib(unittest.TestCase):
def test_gvariant_create(self):
# simple values
variant = GLib.Variant('i', 42)
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertEqual(variant.get_int32(), 42)
variant = GLib.Variant('s', '')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertEqual(variant.get_string(), '')
variant = GLib.Variant('s', 'hello')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertEqual(variant.get_string(), 'hello')
# boxed variant
variant = GLib.Variant('v', GLib.Variant('i', 42))
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertTrue(isinstance(variant.get_variant(), GLib.Variant))
self.assertEqual(variant.get_type_string(), 'v')
self.assertEqual(variant.get_variant().get_type_string(), 'i')
self.assertEqual(variant.get_variant().get_int32(), 42)
variant = GLib.Variant('v', GLib.Variant('v', GLib.Variant('i', 42)))
self.assertEqual(variant.get_type_string(), 'v')
self.assertEqual(variant.get_variant().get_type_string(), 'v')
self.assertEqual(variant.get_variant().get_variant().get_type_string(), 'i')
self.assertEqual(variant.get_variant().get_variant().get_int32(), 42)
# tuples
variant = GLib.Variant('()', ())
self.assertEqual(variant.get_type_string(), '()')
self.assertEqual(variant.n_children(), 0)
variant = GLib.Variant('(i)', (3,))
self.assertEqual(variant.get_type_string(), '(i)')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertEqual(variant.n_children(), 1)
self.assertTrue(isinstance(variant.get_child_value(0), GLib.Variant))
self.assertEqual(variant.get_child_value(0).get_int32(), 3)
variant = GLib.Variant('(ss)', ('mec', 'mac'))
self.assertEqual(variant.get_type_string(), '(ss)')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(0), GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(1), GLib.Variant))
self.assertEqual(variant.get_child_value(0).get_string(), 'mec')
self.assertEqual(variant.get_child_value(1).get_string(), 'mac')
# nested tuples
variant = GLib.Variant('((si)(ub))', (('hello', -1), (42, True)))
self.assertEqual(variant.get_type_string(), '((si)(ub))')
self.assertEqual(variant.unpack(), (('hello', -1), (_long(42), True)))
# dictionaries
variant = GLib.Variant('a{si}', {})
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertEqual(variant.get_type_string(), 'a{si}')
self.assertEqual(variant.n_children(), 0)
variant = GLib.Variant('a{si}', {'': 1, 'key1': 2, 'key2': 3})
self.assertEqual(variant.get_type_string(), 'a{si}')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(0), GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(1), GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(2), GLib.Variant))
self.assertEqual(variant.unpack(), {'': 1, 'key1': 2, 'key2': 3})
# nested dictionaries
variant = GLib.Variant('a{sa{si}}', {})
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertEqual(variant.get_type_string(), 'a{sa{si}}')
self.assertEqual(variant.n_children(), 0)
d = {'': {'': 1, 'keyn1': 2},
'key1': {'key11': 11, 'key12': 12}}
variant = GLib.Variant('a{sa{si}}', d)
self.assertEqual(variant.get_type_string(), 'a{sa{si}}')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertEqual(variant.unpack(), d)
# arrays
variant = GLib.Variant('ai', [])
self.assertEqual(variant.get_type_string(), 'ai')
self.assertEqual(variant.n_children(), 0)
variant = GLib.Variant('ai', [1, 2])
self.assertEqual(variant.get_type_string(), 'ai')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(0), GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(1), GLib.Variant))
self.assertEqual(variant.get_child_value(0).get_int32(), 1)
self.assertEqual(variant.get_child_value(1).get_int32(), 2)
variant = GLib.Variant('as', [])
self.assertEqual(variant.get_type_string(), 'as')
self.assertEqual(variant.n_children(), 0)
variant = GLib.Variant('as', [''])
self.assertEqual(variant.get_type_string(), 'as')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(0), GLib.Variant))
self.assertEqual(variant.get_child_value(0).get_string(), '')
variant = GLib.Variant('as', ['hello', 'world'])
self.assertEqual(variant.get_type_string(), 'as')
self.assertTrue(isinstance(variant, GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(0), GLib.Variant))
self.assertTrue(isinstance(variant.get_child_value(1), GLib.Variant))
self.assertEqual(variant.get_child_value(0).get_string(), 'hello')
self.assertEqual(variant.get_child_value(1).get_string(), 'world')
# nested arrays
variant = GLib.Variant('aai', [])
self.assertEqual(variant.get_type_string(), 'aai')
self.assertEqual(variant.n_children(), 0)
variant = GLib.Variant('aai', [[]])
self.assertEqual(variant.get_type_string(), 'aai')
self.assertEqual(variant.n_children(), 1)
self.assertEqual(variant.get_child_value(0).n_children(), 0)
variant = GLib.Variant('aai', [[1, 2], [3, 4, 5]])
self.assertEqual(variant.get_type_string(), 'aai')
self.assertEqual(variant.unpack(), [[1, 2], [3, 4, 5]])
#
# complex types
#
variant = GLib.Variant('(as)', ([],))
self.assertEqual(variant.get_type_string(), '(as)')
self.assertEqual(variant.n_children(), 1)
self.assertEqual(variant.get_child_value(0).n_children(), 0)
variant = GLib.Variant('(as)', ([''],))
self.assertEqual(variant.get_type_string(), '(as)')
self.assertEqual(variant.n_children(), 1)
self.assertEqual(variant.get_child_value(0).n_children(), 1)
self.assertEqual(variant.get_child_value(0).get_child_value(0).get_string(), '')
variant = GLib.Variant('(as)', (['hello'],))
self.assertEqual(variant.get_type_string(), '(as)')
self.assertEqual(variant.n_children(), 1)
self.assertEqual(variant.get_child_value(0).n_children(), 1)
self.assertEqual(variant.get_child_value(0).get_child_value(0).get_string(), 'hello')
obj = {'a1': (1, True), 'a2': (2, False)}
variant = GLib.Variant('a{s(ib)}', obj)
self.assertEqual(variant.get_type_string(), 'a{s(ib)}')
self.assertEqual(variant.unpack(), obj)
obj = {'a1': (1, GLib.Variant('b', True)), 'a2': (2, GLib.Variant('y', 255))}
variant = GLib.Variant('a{s(iv)}', obj)
self.assertEqual(variant.get_type_string(), 'a{s(iv)}')
self.assertEqual(variant.unpack(), {'a1': (1, True), 'a2': (2, 255)})
obj = (1, {'a': {'a1': True, 'a2': False},
'b': {'b1': False},
'c': {}
},
'foo')
variant = GLib.Variant('(ia{sa{sb}}s)', obj)
self.assertEqual(variant.get_type_string(), '(ia{sa{sb}}s)')
self.assertEqual(variant.unpack(), obj)
obj = {"frequency": GLib.Variant('t', 738000000),
"hierarchy": GLib.Variant('i', 0),
"bandwidth": GLib.Variant('x', 8),
"code-rate-hp": GLib.Variant('d', 2.0 / 3.0),
"constellation": GLib.Variant('s', "QAM16"),
"guard-interval": GLib.Variant('u', 4)}
variant = GLib.Variant('a{sv}', obj)
self.assertEqual(variant.get_type_string(), 'a{sv}')
self.assertEqual(variant.unpack(), {"frequency": 738000000,
"hierarchy": 0,
"bandwidth": 8,
"code-rate-hp": 2.0 / 3.0,
"constellation": "QAM16",
"guard-interval": 4})
def test_gvariant_create_errors(self):
# excess arguments
self.assertRaises(TypeError, GLib.Variant, 'i', 42, 3)
self.assertRaises(TypeError, GLib.Variant, '(i)', (42, 3))
# not enough arguments
self.assertRaises(TypeError, GLib.Variant, '(ii)', (42,))
# data type mismatch
self.assertRaises(TypeError, GLib.Variant, 'i', 'hello')
self.assertRaises(TypeError, GLib.Variant, 's', 42)
self.assertRaises(TypeError, GLib.Variant, '(ss)', 'mec', 'mac')
# unimplemented data type
self.assertRaises(NotImplementedError, GLib.Variant, 'Q', 1)
def test_gvariant_unpack(self):
# simple values
res = GLib.Variant.new_int32(-42).unpack()
self.assertEqual(res, -42)
res = GLib.Variant.new_uint64(34359738368).unpack()
self.assertEqual(res, 34359738368)
res = GLib.Variant.new_boolean(True).unpack()
self.assertEqual(res, True)
res = GLib.Variant.new_object_path('/foo/Bar').unpack()
self.assertEqual(res, '/foo/Bar')
# variant
res = GLib.Variant('v', GLib.Variant.new_int32(-42)).unpack()
self.assertEqual(res, -42)
GLib.Variant('v', GLib.Variant('v', GLib.Variant('i', 42)))
self.assertEqual(res, -42)
# tuple
res = GLib.Variant.new_tuple(GLib.Variant.new_int32(-1),
GLib.Variant.new_string('hello')).unpack()
self.assertEqual(res, (-1, 'hello'))
# array
vb = GLib.VariantBuilder.new(gi._gi.variant_type_from_string('ai'))
vb.add_value(GLib.Variant.new_int32(-1))
vb.add_value(GLib.Variant.new_int32(3))
res = vb.end().unpack()
self.assertEqual(res, [-1, 3])
# dictionary
res = GLib.Variant('a{si}', {'key1': 1, 'key2': 2}).unpack()
self.assertEqual(res, {'key1': 1, 'key2': 2})
def test_gvariant_iteration(self):
# array index access
vb = GLib.VariantBuilder.new(gi._gi.variant_type_from_string('ai'))
vb.add_value(GLib.Variant.new_int32(-1))
vb.add_value(GLib.Variant.new_int32(3))
v = vb.end()
self.assertEqual(len(v), 2)
self.assertEqual(v[0], -1)
self.assertEqual(v[1], 3)
self.assertEqual(v[-1], 3)
self.assertEqual(v[-2], -1)
self.assertRaises(IndexError, v.__getitem__, 2)
self.assertRaises(IndexError, v.__getitem__, -3)
self.assertRaises(ValueError, v.__getitem__, 'a')
# array iteration
self.assertEqual([x for x in v], [-1, 3])
self.assertEqual(list(v), [-1, 3])
# tuple index access
v = GLib.Variant.new_tuple(GLib.Variant.new_int32(-1),
GLib.Variant.new_string('hello'))
self.assertEqual(len(v), 2)
self.assertEqual(v[0], -1)
self.assertEqual(v[1], 'hello')
self.assertEqual(v[-1], 'hello')
self.assertEqual(v[-2], -1)
self.assertRaises(IndexError, v.__getitem__, 2)
self.assertRaises(IndexError, v.__getitem__, -3)
self.assertRaises(ValueError, v.__getitem__, 'a')
# tuple iteration
self.assertEqual([x for x in v], [-1, 'hello'])
self.assertEqual(tuple(v), (-1, 'hello'))
# dictionary index access
vsi = GLib.Variant('a{si}', {'key1': 1, 'key2': 2})
vis = GLib.Variant('a{is}', {1: 'val1', 5: 'val2'})
self.assertEqual(len(vsi), 2)
self.assertEqual(vsi['key1'], 1)
self.assertEqual(vsi['key2'], 2)
self.assertRaises(KeyError, vsi.__getitem__, 'unknown')
self.assertEqual(len(vis), 2)
self.assertEqual(vis[1], 'val1')
self.assertEqual(vis[5], 'val2')
self.assertRaises(KeyError, vsi.__getitem__, 3)
# dictionary iteration
self.assertEqual(set(vsi.keys()), set(['key1', 'key2']))
self.assertEqual(set(vis.keys()), set([1, 5]))
# string index access
v = GLib.Variant('s', 'hello')
self.assertEqual(len(v), 5)
self.assertEqual(v[0], 'h')
self.assertEqual(v[4], 'o')
self.assertEqual(v[-1], 'o')
self.assertEqual(v[-5], 'h')
self.assertRaises(IndexError, v.__getitem__, 5)
self.assertRaises(IndexError, v.__getitem__, -6)
# string iteration
self.assertEqual([x for x in v], ['h', 'e', 'l', 'l', 'o'])
def test_variant_split_signature(self):
self.assertEqual(GLib.Variant.split_signature('()'), [])
self.assertEqual(GLib.Variant.split_signature('s'), ['s'])
self.assertEqual(GLib.Variant.split_signature('as'), ['as'])
self.assertEqual(GLib.Variant.split_signature('(s)'), ['s'])
self.assertEqual(GLib.Variant.split_signature('(iso)'), ['i', 's', 'o'])
self.assertEqual(GLib.Variant.split_signature('(s(ss)i(ii))'),
['s', '(ss)', 'i', '(ii)'])
self.assertEqual(GLib.Variant.split_signature('(as)'), ['as'])
self.assertEqual(GLib.Variant.split_signature('(s(ss)iaiaasa(ii))'),
['s', '(ss)', 'i', 'ai', 'aas', 'a(ii)'])
self.assertEqual(GLib.Variant.split_signature('(a{iv}(ii)((ss)a{s(ss)}))'),
['a{iv}', '(ii)', '((ss)a{s(ss)})'])
def test_variant_hash(self):
v1 = GLib.Variant('s', 'somestring')
v2 = GLib.Variant('s', 'somestring')
v3 = GLib.Variant('s', 'somestring2')
self.assertTrue(v2 in set([v1, v3]))
self.assertTrue(v2 in frozenset([v1, v3]))
self.assertTrue(v2 in {v1: '1', v3: '2'})
def test_variant_compare(self):
# Check if identical GVariant are equal
def assert_equal(vtype, value):
self.assertEqual(GLib.Variant(vtype, value), GLib.Variant(vtype, value))
def assert_not_equal(vtype1, value1, vtype2, value2):
self.assertNotEqual(GLib.Variant(vtype1, value1), GLib.Variant(vtype2, value2))
numbers = ['y', 'n', 'q', 'i', 'u', 'x', 't', 'h', 'd']
for num in numbers:
assert_equal(num, 42)
assert_not_equal(num, 42, num, 41)
assert_not_equal(num, 42, 's', '42')
assert_equal('s', 'something')
assert_not_equal('s', 'something', 's', 'somethingelse')
assert_not_equal('s', 'something', 'i', 1234)
assert_equal('g', 'dustybinqhogx')
assert_not_equal('g', 'dustybinqhogx', 'g', 'dustybin')
assert_not_equal('g', 'dustybinqhogx', 'i', 1234)
assert_equal('o', '/dev/null')
assert_not_equal('o', '/dev/null', 'o', '/dev/zero')
assert_not_equal('o', '/dev/null', 'i', 1234)
assert_equal('(s)', ('strtuple',))
assert_not_equal('(s)', ('strtuple',), '(s)', ('strtuple2',))
assert_equal('a{si}', {'str': 42})
assert_not_equal('a{si}', {'str': 42}, 'a{si}', {'str': 43})
assert_equal('v', GLib.Variant('i', 42))
assert_not_equal('v', GLib.Variant('i', 42), 'v', GLib.Variant('i', 43))
def test_variant_bool(self):
# Check if the GVariant bool matches the unpacked Pythonic bool
def assert_equals_bool(vtype, value):
self.assertEqual(bool(GLib.Variant(vtype, value)), bool(value))
# simple values
assert_equals_bool('b', True)
assert_equals_bool('b', False)
numbers = ['y', 'n', 'q', 'i', 'u', 'x', 't', 'h', 'd']
for number in numbers:
assert_equals_bool(number, 0)
assert_equals_bool(number, 1)
assert_equals_bool('s', '')
assert_equals_bool('g', '')
assert_equals_bool('s', 'something')
assert_equals_bool('o', '/dev/null')
assert_equals_bool('g', 'dustybinqhogx')
# arrays
assert_equals_bool('ab', [True])
assert_equals_bool('ab', [False])
for number in numbers:
assert_equals_bool('a' + number, [])
assert_equals_bool('a' + number, [0])
assert_equals_bool('as', [])
assert_equals_bool('as', [''])
assert_equals_bool('ao', [])
assert_equals_bool('ao', ['/'])
assert_equals_bool('ag', [])
assert_equals_bool('ag', [''])
assert_equals_bool('aai', [[]])
# tuples
assert_equals_bool('()', ())
for number in numbers:
assert_equals_bool('(' + number + ')', (0,))
assert_equals_bool('(s)', ('',))
assert_equals_bool('(o)', ('/',))
assert_equals_bool('(g)', ('',))
assert_equals_bool('(())', ((),))
# dictionaries
assert_equals_bool('a{si}', {})
assert_equals_bool('a{si}', {'': 0})
# complex types, always True
assert_equals_bool('(as)', ([],))
assert_equals_bool('a{s(i)}', {'': (0,)})
# variant types, recursive unpacking
assert_equals_bool('v', GLib.Variant('i', 0))
assert_equals_bool('v', GLib.Variant('i', 1))
class TestPango(unittest.TestCase):
def test_default_font_description(self):
desc = Pango.FontDescription()
self.assertEqual(desc.get_variant(), Pango.Variant.NORMAL)
def test_font_description(self):
desc = Pango.FontDescription('monospace')
self.assertEqual(desc.get_family(), 'monospace')
self.assertEqual(desc.get_variant(), Pango.Variant.NORMAL)
def test_layout(self):
self.assertRaises(TypeError, Pango.Layout)
context = Pango.Context()
layout = Pango.Layout(context)
self.assertEqual(layout.get_context(), context)
layout.set_markup("Foobar")
self.assertEqual(layout.get_text(), "Foobar")
class TestGdk(unittest.TestCase):
def test_constructor(self):
attribute = Gdk.WindowAttr()
attribute.window_type = Gdk.WindowType.CHILD
attributes_mask = Gdk.WindowAttributesType.X | \
Gdk.WindowAttributesType.Y
window = Gdk.Window(None, attribute, attributes_mask)
self.assertEqual(window.get_window_type(), Gdk.WindowType.CHILD)
def test_color(self):
color = Gdk.Color(100, 200, 300)
self.assertEqual(color.red, 100)
self.assertEqual(color.green, 200)
self.assertEqual(color.blue, 300)
self.assertEqual(color, Gdk.Color(100, 200, 300))
self.assertNotEqual(color, Gdk.Color(1, 2, 3))
def test_color_floats(self):
self.assertEqual(Gdk.Color(13107, 21845, 65535),
Gdk.Color.from_floats(0.2, 1.0 / 3.0, 1.0))
self.assertEqual(Gdk.Color(13107, 21845, 65535).to_floats(),
(0.2, 1.0 / 3.0, 1.0))
self.assertEqual(Gdk.RGBA(0.2, 1.0 / 3.0, 1.0, 0.5).to_color(),
Gdk.Color.from_floats(0.2, 1.0 / 3.0, 1.0))
self.assertEqual(Gdk.RGBA.from_color(Gdk.Color(13107, 21845, 65535)),
Gdk.RGBA(0.2, 1.0 / 3.0, 1.0, 1.0))
def test_rgba(self):
self.assertEqual(Gdk.RGBA, overrides.Gdk.RGBA)
rgba = Gdk.RGBA(0.1, 0.2, 0.3, 0.4)
self.assertEqual(rgba, Gdk.RGBA(0.1, 0.2, 0.3, 0.4))
self.assertNotEqual(rgba, Gdk.RGBA(0.0, 0.2, 0.3, 0.4))
self.assertEqual(rgba.red, 0.1)
self.assertEqual(rgba.green, 0.2)
self.assertEqual(rgba.blue, 0.3)
self.assertEqual(rgba.alpha, 0.4)
rgba.green = 0.9
self.assertEqual(rgba.green, 0.9)
# Iterator/tuple convsersion
self.assertEqual(tuple(Gdk.RGBA(0.1, 0.2, 0.3, 0.4)),
(0.1, 0.2, 0.3, 0.4))
def test_event(self):
event = Gdk.Event.new(Gdk.EventType.CONFIGURE)
self.assertEqual(event.type, Gdk.EventType.CONFIGURE)
self.assertEqual(event.send_event, 0)
event = Gdk.Event.new(Gdk.EventType.DRAG_MOTION)
event.x_root, event.y_root = 0, 5
self.assertEqual(event.x_root, 0)
self.assertEqual(event.y_root, 5)
event = Gdk.Event()
event.type = Gdk.EventType.SCROLL
self.assertRaises(AttributeError, lambda: getattr(event, 'foo_bar'))
def test_event_structures(self):
def button_press_cb(button, event):
self.assertTrue(isinstance(event, Gdk.EventButton))
self.assertTrue(event.type == Gdk.EventType.BUTTON_PRESS)
self.assertEqual(event.send_event, 0)
self.assertEqual(event.get_state(), Gdk.ModifierType.CONTROL_MASK)
self.assertEqual(event.get_root_coords(), (2, 5))
event.time = 12345
self.assertEqual(event.get_time(), 12345)
w = Gtk.Window()
b = Gtk.Button()
b.connect('button-press-event', button_press_cb)
w.add(b)
w.show_all()
Gdk.test_simulate_button(b.get_window(),
2, 5,
0,
Gdk.ModifierType.CONTROL_MASK,
Gdk.EventType.BUTTON_PRESS)
def test_cursor(self):
self.assertEqual(Gdk.Cursor, overrides.Gdk.Cursor)
c = Gdk.Cursor(Gdk.CursorType.WATCH)
self.assertNotEqual(c, None)
c = Gdk.Cursor(cursor_type=Gdk.CursorType.WATCH)
self.assertNotEqual(c, None)
display_manager = Gdk.DisplayManager.get()
display = display_manager.get_default_display()
test_pixbuf = GdkPixbuf.Pixbuf.new(GdkPixbuf.Colorspace.RGB,
False,
8,
5,
10)
c = Gdk.Cursor(display,
test_pixbuf,
y=0, x=0)
self.assertNotEqual(c, None)
self.assertRaises(ValueError, Gdk.Cursor, 1, 2, 3)
class TestGtk(unittest.TestCase):
def test_container(self):
box = Gtk.Box()
self.assertTrue(isinstance(box, Gtk.Box))
self.assertTrue(isinstance(box, Gtk.Container))
self.assertTrue(isinstance(box, Gtk.Widget))
self.assertTrue(box)
label = Gtk.Label()
label2 = Gtk.Label()
box.add(label)
box.add(label2)
self.assertTrue(label in box)
self.assertTrue(label2 in box)
self.assertEqual(len(box), 2)
self.assertTrue(box)
l = [x for x in box]
self.assertEqual(l, [label, label2])
def test_actions(self):
self.assertEqual(Gtk.Action, overrides.Gtk.Action)
self.assertRaises(TypeError, Gtk.Action)
action = Gtk.Action("test", "Test", "Test Action", Gtk.STOCK_COPY)
self.assertEqual(action.get_name(), "test")
self.assertEqual(action.get_label(), "Test")
self.assertEqual(action.get_tooltip(), "Test Action")
self.assertEqual(action.get_stock_id(), Gtk.STOCK_COPY)
self.assertEqual(Gtk.RadioAction, overrides.Gtk.RadioAction)
self.assertRaises(TypeError, Gtk.RadioAction)
action = Gtk.RadioAction("test", "Test", "Test Action", Gtk.STOCK_COPY, 1)
self.assertEqual(action.get_name(), "test")
self.assertEqual(action.get_label(), "Test")
self.assertEqual(action.get_tooltip(), "Test Action")
self.assertEqual(action.get_stock_id(), Gtk.STOCK_COPY)
self.assertEqual(action.get_current_value(), 1)
def test_actiongroup(self):
self.assertEqual(Gtk.ActionGroup, overrides.Gtk.ActionGroup)
self.assertRaises(TypeError, Gtk.ActionGroup)
action_group = Gtk.ActionGroup(name='TestActionGroup')
callback_data = "callback data"
def test_action_callback_data(action, user_data):
self.assertEqual(user_data, callback_data)
def test_radio_action_callback_data(action, current, user_data):
self.assertEqual(user_data, callback_data)
action_group.add_actions([
('test-action1', None, 'Test Action 1',
None, None, test_action_callback_data),
('test-action2', Gtk.STOCK_COPY, 'Test Action 2',
None, None, test_action_callback_data)], callback_data)
action_group.add_toggle_actions([
('test-toggle-action1', None, 'Test Toggle Action 1',
None, None, test_action_callback_data, False),
('test-toggle-action2', Gtk.STOCK_COPY, 'Test Toggle Action 2',
None, None, test_action_callback_data, True)], callback_data)
action_group.add_radio_actions([
('test-radio-action1', None, 'Test Radio Action 1'),
('test-radio-action2', Gtk.STOCK_COPY, 'Test Radio Action 2')], 1,
test_radio_action_callback_data,
callback_data)
expected_results = [('test-action1', Gtk.Action),
('test-action2', Gtk.Action),
('test-toggle-action1', Gtk.ToggleAction),
('test-toggle-action2', Gtk.ToggleAction),
('test-radio-action1', Gtk.RadioAction),
('test-radio-action2', Gtk.RadioAction)]
for action in action_group.list_actions():
a = (action.get_name(), type(action))
self.assertTrue(a in expected_results)
expected_results.remove(a)
action.activate()
def test_uimanager(self):
self.assertEqual(Gtk.UIManager, overrides.Gtk.UIManager)
ui = Gtk.UIManager()
ui.add_ui_from_string(
"""
<ui>
<menubar name="menubar1"></menubar>
</ui>
"""
)
menubar = ui.get_widget("/menubar1")
self.assertEqual(type(menubar), Gtk.MenuBar)
ag = Gtk.ActionGroup(name="ag1")
ui.insert_action_group(ag)
ag2 = Gtk.ActionGroup(name="ag2")
ui.insert_action_group(ag2)
groups = ui.get_action_groups()
self.assertEqual(ag, groups[-2])
self.assertEqual(ag2, groups[-1])
def test_builder(self):
self.assertEqual(Gtk.Builder, overrides.Gtk.Builder)
class SignalTest(GObject.GObject):
__gtype_name__ = "GIOverrideSignalTest"
__gsignals__ = {
"test-signal": (GObject.SignalFlags.RUN_FIRST,
None,
[]),
}
class SignalCheck:
def __init__(self):
self.sentinel = 0
self.after_sentinel = 0
def on_signal_1(self, *args):
self.sentinel += 1
self.after_sentinel += 1
def on_signal_3(self, *args):
self.sentinel += 3
def on_signal_after(self, *args):
if self.after_sentinel == 1:
self.after_sentinel += 1
signal_checker = SignalCheck()
builder = Gtk.Builder()
# add object1 to the builder
builder.add_from_string(
"""
<interface>
<object class="GIOverrideSignalTest" id="object1">
<signal name="test-signal" after="yes" handler="on_signal_after" />
<signal name="test-signal" handler="on_signal_1" />
</object>
</interface>
""")
# only add object3 to the builder
builder.add_objects_from_string(
"""
<interface>
<object class="GIOverrideSignalTest" id="object2">
<signal name="test-signal" handler="on_signal_2" />
</object>
<object class="GIOverrideSignalTest" id="object3">
<signal name="test-signal" handler="on_signal_3" />
</object>
<object class="GIOverrideSignalTest" id="object4">
<signal name="test-signal" handler="on_signal_4" />
</object>
</interface>
""",
['object3'])
# hook up signals
builder.connect_signals(signal_checker)
# call their notify signals and check sentinel
objects = builder.get_objects()
self.assertEqual(len(objects), 2)
for obj in objects:
obj.emit('test-signal')
self.assertEqual(signal_checker.sentinel, 4)
self.assertEqual(signal_checker.after_sentinel, 2)
def test_dialogs(self):
self.assertEqual(Gtk.Dialog, overrides.Gtk.Dialog)
self.assertEqual(Gtk.AboutDialog, overrides.Gtk.AboutDialog)
self.assertEqual(Gtk.MessageDialog, overrides.Gtk.MessageDialog)
self.assertEqual(Gtk.ColorSelectionDialog, overrides.Gtk.ColorSelectionDialog)
self.assertEqual(Gtk.FileChooserDialog, overrides.Gtk.FileChooserDialog)
self.assertEqual(Gtk.FontSelectionDialog, overrides.Gtk.FontSelectionDialog)
self.assertEqual(Gtk.RecentChooserDialog, overrides.Gtk.RecentChooserDialog)
# Gtk.Dialog
dialog = Gtk.Dialog(title='Foo',
flags=Gtk.DialogFlags.MODAL,
buttons=('test-button1', 1))
self.assertTrue(isinstance(dialog, Gtk.Dialog))
self.assertTrue(isinstance(dialog, Gtk.Window))
dialog.add_buttons('test-button2', 2, Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
self.assertEqual('Foo', dialog.get_title())
self.assertTrue(dialog.get_modal())
button = dialog.get_widget_for_response(1)
self.assertEqual('test-button1', button.get_label())
button = dialog.get_widget_for_response(2)
self.assertEqual('test-button2', button.get_label())
button = dialog.get_widget_for_response(Gtk.ResponseType.CLOSE)
self.assertEqual(Gtk.STOCK_CLOSE, button.get_label())
# Gtk.AboutDialog
dialog = Gtk.AboutDialog()
self.assertTrue(isinstance(dialog, Gtk.Dialog))
self.assertTrue(isinstance(dialog, Gtk.Window))
# Gtk.MessageDialog
dialog = Gtk.MessageDialog(title='message dialog test',
flags=Gtk.DialogFlags.MODAL,
buttons=Gtk.ButtonsType.OK,
message_format='dude!')
self.assertTrue(isinstance(dialog, Gtk.Dialog))
self.assertTrue(isinstance(dialog, Gtk.Window))
self.assertEqual('message dialog test', dialog.get_title())
self.assertTrue(dialog.get_modal())
text = dialog.get_property('text')
self.assertEqual('dude!', text)
dialog.format_secondary_text('2nd text')
self.assertEqual(dialog.get_property('secondary-text'), '2nd text')
self.assertFalse(dialog.get_property('secondary-use-markup'))
dialog.format_secondary_markup('2nd markup')
self.assertEqual(dialog.get_property('secondary-text'), '2nd markup')
self.assertTrue(dialog.get_property('secondary-use-markup'))
# Gtk.ColorSelectionDialog
dialog = Gtk.ColorSelectionDialog("color selection dialog test")
self.assertTrue(isinstance(dialog, Gtk.Dialog))
self.assertTrue(isinstance(dialog, Gtk.Window))
self.assertEqual('color selection dialog test', dialog.get_title())
# Gtk.FileChooserDialog
dialog = Gtk.FileChooserDialog(title='file chooser dialog test',
buttons=('test-button1', 1),
action=Gtk.FileChooserAction.SAVE)
self.assertTrue(isinstance(dialog, Gtk.Dialog))
self.assertTrue(isinstance(dialog, Gtk.Window))
dialog.add_buttons('test-button2', 2, Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
self.assertEqual('file chooser dialog test', dialog.get_title())
button = dialog.get_widget_for_response(1)
self.assertEqual('test-button1', button.get_label())
button = dialog.get_widget_for_response(2)
self.assertEqual('test-button2', button.get_label())
button = dialog.get_widget_for_response(Gtk.ResponseType.CLOSE)
self.assertEqual(Gtk.STOCK_CLOSE, button.get_label())
action = dialog.get_property('action')
self.assertEqual(Gtk.FileChooserAction.SAVE, action)
# Gtk.FontSelectionDialog
dialog = Gtk.ColorSelectionDialog("font selection dialog test")
self.assertTrue(isinstance(dialog, Gtk.Dialog))
self.assertTrue(isinstance(dialog, Gtk.Window))
self.assertEqual('font selection dialog test', dialog.get_title())
# Gtk.RecentChooserDialog
test_manager = Gtk.RecentManager()
dialog = Gtk.RecentChooserDialog(title='recent chooser dialog test',
buttons=('test-button1', 1),
manager=test_manager)
self.assertTrue(isinstance(dialog, Gtk.Dialog))
self.assertTrue(isinstance(dialog, Gtk.Window))
dialog.add_buttons('test-button2', 2, Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)
self.assertEqual('recent chooser dialog test', dialog.get_title())
button = dialog.get_widget_for_response(1)
self.assertEqual('test-button1', button.get_label())
button = dialog.get_widget_for_response(2)
self.assertEqual('test-button2', button.get_label())
button = dialog.get_widget_for_response(Gtk.ResponseType.CLOSE)
self.assertEqual(Gtk.STOCK_CLOSE, button.get_label())
class TestClass(GObject.GObject):
__gtype_name__ = "GIOverrideTreeAPITest"
def __init__(self, tester, int_value, string_value):
super(TestGtk.TestClass, self).__init__()
self.tester = tester
self.int_value = int_value
self.string_value = string_value
def check(self, int_value, string_value):
self.tester.assertEqual(int_value, self.int_value)
self.tester.assertEqual(string_value, self.string_value)
def test_tree_store(self):
self.assertEqual(Gtk.TreeStore, overrides.Gtk.TreeStore)
self.assertEqual(Gtk.ListStore, overrides.Gtk.ListStore)
self.assertEqual(Gtk.TreeModel, overrides.Gtk.TreeModel)
self.assertEqual(Gtk.TreeViewColumn, overrides.Gtk.TreeViewColumn)
class TestPyObject(object):
pass
test_pyobj = TestPyObject()
test_pydict = {1: 1, "2": 2, "3": "3"}
test_pylist = [1, "2", "3"]
tree_store = Gtk.TreeStore(int,
'gchararray',
TestGtk.TestClass,
GObject.TYPE_PYOBJECT,
object,
object,
object,
bool,
bool,
GObject.TYPE_UINT,
GObject.TYPE_ULONG,
GObject.TYPE_INT64,
GObject.TYPE_UINT64,
GObject.TYPE_UCHAR,
GObject.TYPE_CHAR)
parent = None
for i in range(97):
label = 'this is child #%d' % i
testobj = TestGtk.TestClass(self, i, label)
parent = tree_store.append(parent, (i,
label,
testobj,
testobj,
test_pyobj,
test_pydict,
test_pylist,
i % 2,
bool(i % 2),
i,
GObject.G_MAXULONG,
GObject.G_MININT64,
0xffffffffffffffff,
254,
_bytes('a')
))
# test set
parent = tree_store.append(parent)
i = 97
label = 'this is child #%d' % i
testobj = TestGtk.TestClass(self, i, label)
tree_store.set(parent, 0, i,
2, testobj,
1, label,
3, testobj,
4, test_pyobj,
5, test_pydict,
6, test_pylist,
7, i % 2,
8, bool(i % 2),
9, i,
10, GObject.G_MAXULONG,
11, GObject.G_MININT64,
12, 0xffffffffffffffff,
13, 254,
14, _bytes('a'))
parent = tree_store.append(parent)
i = 98
label = 'this is child #%d' % i
testobj = TestGtk.TestClass(self, i, label)
tree_store.set(parent, {0: i,
2: testobj,
1: label,
3: testobj,
4: test_pyobj,
5: test_pydict,
6: test_pylist,
7: i % 2,
8: bool(i % 2),
9: i,
10: GObject.G_MAXULONG,
11: GObject.G_MININT64,
12: 0xffffffffffffffff,
13: 254,
14: _bytes('a')})
parent = tree_store.append(parent)
i = 99
label = 'this is child #%d' % i
testobj = TestGtk.TestClass(self, i, label)
tree_store.set(parent, (0, 2, 1, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14),
(i,
testobj,
label,
testobj,
test_pyobj,
test_pydict,
test_pylist,
i % 2,
bool(i % 2),
i,
GObject.G_MAXULONG,
GObject.G_MININT64,
0xffffffffffffffff,
254,
_bytes('a')))
# len gets the number of children in the root node
# since we kept appending to the previous node
# there should only be one child of the root
self.assertEqual(len(tree_store), 1)
# walk the tree to see if the values were stored correctly
parent = None
i = 0
treeiter = tree_store.iter_children(parent)
while treeiter:
i = tree_store.get_value(treeiter, 0)
s = tree_store.get_value(treeiter, 1)
obj = tree_store.get_value(treeiter, 2)
obj.check(i, s)
obj2 = tree_store.get_value(treeiter, 3)
self.assertEqual(obj, obj2)
pyobj = tree_store.get_value(treeiter, 4)
self.assertEqual(pyobj, test_pyobj)
pydict = tree_store.get_value(treeiter, 5)
self.assertEqual(pydict, test_pydict)
pylist = tree_store.get_value(treeiter, 6)
self.assertEqual(pylist, test_pylist)
bool_1 = tree_store.get_value(treeiter, 7)
bool_2 = tree_store.get_value(treeiter, 8)
self.assertEqual(bool_1, bool_2)
self.assertTrue(isinstance(bool_1, bool))
self.assertTrue(isinstance(bool_2, bool))
uint_ = tree_store.get_value(treeiter, 9)
self.assertEqual(uint_, i)
ulong_ = tree_store.get_value(treeiter, 10)
self.assertEqual(ulong_, GObject.G_MAXULONG)
int64_ = tree_store.get_value(treeiter, 11)
self.assertEqual(int64_, GObject.G_MININT64)
uint64_ = tree_store.get_value(treeiter, 12)
self.assertEqual(uint64_, 0xffffffffffffffff)
uchar_ = tree_store.get_value(treeiter, 13)
self.assertEqual(ord(uchar_), 254)
char_ = tree_store.get_value(treeiter, 14)
self.assertEqual(char_, 'a')
parent = treeiter
treeiter = tree_store.iter_children(parent)
self.assertEqual(i, 99)
def test_tree_store_signals(self):
tree_store = Gtk.TreeStore(int, bool)
def on_row_inserted(tree_store, tree_path, tree_iter, signal_list):
signal_list.append('row-inserted')
def on_row_changed(tree_store, tree_path, tree_iter, signal_list):
signal_list.append('row-changed')
signals = []
tree_store.connect('row-inserted', on_row_inserted, signals)
tree_store.connect('row-changed', on_row_changed, signals)
# adding rows with and without data should only call one signal
tree_store.append(None, (0, False))
self.assertEqual(signals, ['row-inserted'])
signals.pop()
tree_store.append(None)
self.assertEqual(signals, ['row-inserted'])
signals.pop()
tree_store.prepend(None, (0, False))
self.assertEqual(signals, ['row-inserted'])
signals.pop()
tree_store.prepend(None)
self.assertEqual(signals, ['row-inserted'])
signals.pop()
tree_store.insert(None, 1, (0, False))
self.assertEqual(signals, ['row-inserted'])
signals.pop()
tree_store.insert(None, 1)
self.assertEqual(signals, ['row-inserted'])
def test_list_store(self):
class TestPyObject(object):
pass
test_pyobj = TestPyObject()
test_pydict = {1: 1, "2": 2, "3": "3"}
test_pylist = [1, "2", "3"]
list_store = Gtk.ListStore(int, str, 'GIOverrideTreeAPITest', object, object, object, bool, bool)
for i in range(1, 93):
label = 'this is row #%d' % i
testobj = TestGtk.TestClass(self, i, label)
list_store.append((i,
label,
testobj,
test_pyobj,
test_pydict,
test_pylist,
i % 2,
bool(i % 2)))
i = 93
label = _unicode('this is row #93')
treeiter = list_store.append()
list_store.set_value(treeiter, 0, i)
list_store.set_value(treeiter, 1, label)
list_store.set_value(treeiter, 2, TestGtk.TestClass(self, i, label))
list_store.set_value(treeiter, 3, test_pyobj)
list_store.set_value(treeiter, 4, test_pydict)
list_store.set_value(treeiter, 5, test_pylist)
list_store.set_value(treeiter, 6, 1)
list_store.set_value(treeiter, 7, True)
# test prepend
label = 'this is row #0'
list_store.prepend((0,
label,
TestGtk.TestClass(self, 0, label),
test_pyobj,
test_pydict,
test_pylist,
0,
False))
# test automatic unicode->str conversion
i = 94
label = _unicode('this is row #94')
treeiter = list_store.append((i,
label,
TestGtk.TestClass(self, i, label),
test_pyobj,
test_pydict,
test_pylist,
0,
False))
# add sorted items out of order to test insert* apis
# also test sending in None to not set a column
i = 97
label = 'this is row #97'
treeiter = list_store.append((None,
None,
None,
test_pyobj,
None,
test_pylist,
1,
None))
list_store.set_value(treeiter, 0, i)
list_store.set_value(treeiter, 1, label)
list_store.set_value(treeiter, 2, TestGtk.TestClass(self, i, label))
list_store.set_value(treeiter, 4, test_pydict)
list_store.set_value(treeiter, 7, True)
# this should append
i = 99
label = 'this is row #99'
list_store.insert(9999, (i,
label,
TestGtk.TestClass(self, i, label),
test_pyobj,
test_pydict,
test_pylist,
1,
True))
i = 96
label = 'this is row #96'
list_store.insert_before(treeiter, (i,
label,
TestGtk.TestClass(self, i, label),
test_pyobj,
test_pydict,
test_pylist,
0,
False))
i = 98
label = 'this is row #98'
list_store.insert_after(treeiter, (i,
label,
TestGtk.TestClass(self, i, label),
test_pyobj,
test_pydict,
test_pylist,
0,
False))
i = 95
label = 'this is row #95'
list_store.insert(95, (i,
label,
TestGtk.TestClass(self, i, label),
test_pyobj,
test_pydict,
test_pylist,
1,
True))
i = 100
label = 'this is row #100'
treeiter = list_store.append()
list_store.set(treeiter, 1, label,
0, i,
2, TestGtk.TestClass(self, i, label),
3, test_pyobj,
4, test_pydict,
5, test_pylist,
6, 0,
7, False)
i = 101
label = 'this is row #101'
treeiter = list_store.append()
list_store.set(treeiter, {1: label,
0: i,
2: TestGtk.TestClass(self, i, label),
3: test_pyobj,
4: test_pydict,
5: test_pylist,
6: 1,
7: True})
i = 102
label = 'this is row #102'
treeiter = list_store.append()
list_store.set(treeiter, (1, 0, 2, 3, 4, 5, 6, 7),
(label,
i,
TestGtk.TestClass(self, i, label),
test_pyobj,
test_pydict,
test_pylist,
0,
False))
self.assertEqual(len(list_store), 103)
# walk the list to see if the values were stored correctly
i = 0
treeiter = list_store.get_iter_first()
counter = 0
while treeiter:
i = list_store.get_value(treeiter, 0)
self.assertEqual(i, counter)
s = list_store.get_value(treeiter, 1)
obj = list_store.get_value(treeiter, 2)
obj.check(i, s)
pyobj = list_store.get_value(treeiter, 3)
self.assertEqual(pyobj, test_pyobj)
pydict = list_store.get_value(treeiter, 4)
self.assertEqual(pydict, test_pydict)
pylist = list_store.get_value(treeiter, 5)
self.assertEqual(pylist, test_pylist)
bool_1 = list_store.get_value(treeiter, 6)
bool_2 = list_store.get_value(treeiter, 7)
self.assertEqual(bool_1, bool_2)
self.assertTrue(isinstance(bool_1, bool))
self.assertTrue(isinstance(bool_2, bool))
treeiter = list_store.iter_next(treeiter)
counter += 1
self.assertEqual(i, 102)
def test_list_store_sort(self):
def comp1(model, row1, row2, user_data):
v1 = model[row1][1]
v2 = model[row2][1]
# make "m" smaller than anything else
if v1.startswith('m') and not v2.startswith('m'):
return -1
if v2.startswith('m') and not v1.startswith('m'):
return 1
return (v1 > v2) - (v1 < v2)
list_store = Gtk.ListStore(int, str)
list_store.set_sort_func(2, comp1, None)
list_store.append((1, 'apples'))
list_store.append((3, 'oranges'))
list_store.append((2, 'mango'))
# not sorted yet, should be original order
self.assertEqual([list(i) for i in list_store],
[[1, 'apples'], [3, 'oranges'], [2, 'mango']])
# sort with our custom function
list_store.set_sort_column_id(2, Gtk.SortType.ASCENDING)
self.assertEqual([list(i) for i in list_store],
[[2, 'mango'], [1, 'apples'], [3, 'oranges']])
list_store.set_sort_column_id(2, Gtk.SortType.DESCENDING)
self.assertEqual([list(i) for i in list_store],
[[3, 'oranges'], [1, 'apples'], [2, 'mango']])
def test_list_store_signals(self):
list_store = Gtk.ListStore(int, bool)
def on_row_inserted(list_store, tree_path, tree_iter, signal_list):
signal_list.append('row-inserted')
def on_row_changed(list_store, tree_path, tree_iter, signal_list):
signal_list.append('row-changed')
signals = []
list_store.connect('row-inserted', on_row_inserted, signals)
list_store.connect('row-changed', on_row_changed, signals)
# adding rows with and without data should only call one signal
list_store.append((0, False))
self.assertEqual(signals, ['row-inserted'])
signals.pop()
list_store.append()
self.assertEqual(signals, ['row-inserted'])
signals.pop()
list_store.prepend((0, False))
self.assertEqual(signals, ['row-inserted'])
signals.pop()
list_store.prepend()
self.assertEqual(signals, ['row-inserted'])
signals.pop()
list_store.insert(1, (0, False))
self.assertEqual(signals, ['row-inserted'])
signals.pop()
list_store.insert(1)
self.assertEqual(signals, ['row-inserted'])
def test_tree_path(self):
p1 = Gtk.TreePath()
p2 = Gtk.TreePath.new_first()
self.assertEqual(p1, p2)
self.assertEqual(str(p1), '0')
p1 = Gtk.TreePath(2)
p2 = Gtk.TreePath.new_from_string('2')
self.assertEqual(p1, p2)
self.assertEqual(str(p1), '2')
p1 = Gtk.TreePath('1:2:3')
p2 = Gtk.TreePath.new_from_string('1:2:3')
self.assertEqual(p1, p2)
self.assertEqual(str(p1), '1:2:3')
p1 = Gtk.TreePath((1, 2, 3))
p2 = Gtk.TreePath.new_from_string('1:2:3')
self.assertEqual(p1, p2)
self.assertEqual(str(p1), '1:2:3')
self.assertTrue(p1 != None)
self.assertFalse(p1 == None)
self.assertTrue(p1 > None)
self.assertTrue(p1 >= None)
self.assertFalse(p1 < None)
self.assertFalse(p1 <= None)
self.assertEqual(tuple(p1), (1, 2, 3))
def test_tree_model(self):
tree_store = Gtk.TreeStore(int, str)
self.assertTrue(tree_store)
self.assertEqual(len(tree_store), 0)
self.assertEqual(tree_store.get_iter_first(), None)
def get_by_index(row, col=None):
if col:
return tree_store[row][col]
else:
return tree_store[row]
self.assertRaises(TypeError, get_by_index, None)
self.assertRaises(TypeError, get_by_index, "")
self.assertRaises(TypeError, get_by_index, ())
self.assertRaises(IndexError, get_by_index, "0")
self.assertRaises(IndexError, get_by_index, 0)
self.assertRaises(IndexError, get_by_index, (0,))
self.assertRaises(ValueError, tree_store.get_iter, "0")
self.assertRaises(ValueError, tree_store.get_iter, 0)
self.assertRaises(ValueError, tree_store.get_iter, (0,))
self.assertRaises(ValueError, tree_store.get_iter_from_string, "0")
for row in tree_store:
self.fail("Should not be reached")
class DerivedIntType(int):
pass
class DerivedStrType(str):
pass
for i in range(100):
label = 'this is row #%d' % i
parent = tree_store.append(None, (DerivedIntType(i), DerivedStrType(label),))
self.assertNotEqual(parent, None)
for j in range(20):
label = 'this is child #%d of node #%d' % (j, i)
child = tree_store.append(parent, (j, label,))
self.assertNotEqual(child, None)
self.assertTrue(tree_store)
self.assertEqual(len(tree_store), 100)
self.assertEqual(tree_store.iter_previous(tree_store.get_iter(0)), None)
for i, row in enumerate(tree_store):
self.assertEqual(row.model, tree_store)
self.assertEqual(row.parent, None)
self.assertEqual(tree_store[i].path, row.path)
self.assertEqual(tree_store[str(i)].path, row.path)
self.assertEqual(tree_store[(i,)].path, row.path)
self.assertEqual(tree_store[i][0], i)
self.assertEqual(tree_store[i][1], "this is row #%d" % i)
aiter = tree_store.get_iter(i)
self.assertEqual(tree_store.get_path(aiter), row.path)
aiter = tree_store.get_iter(str(i))
self.assertEqual(tree_store.get_path(aiter), row.path)
aiter = tree_store.get_iter((i,))
self.assertEqual(tree_store.get_path(aiter), row.path)
self.assertEqual(tree_store.iter_parent(aiter), row.parent)
next = tree_store.iter_next(aiter)
if i < len(tree_store) - 1:
self.assertEqual(tree_store.get_path(next), row.next.path)
self.assertEqual(tree_store.get_path(tree_store.iter_previous(next)),
tree_store.get_path(aiter))
else:
self.assertEqual(next, None)
self.assertEqual(tree_store.iter_n_children(row.iter), 20)
child = tree_store.iter_children(row.iter)
for j, childrow in enumerate(row.iterchildren()):
child_path = tree_store.get_path(child)
self.assertEqual(childrow.path, child_path)
self.assertEqual(childrow.parent.path, row.path)
self.assertEqual(childrow.path, tree_store[child].path)
self.assertEqual(childrow.path, tree_store[child_path].path)
self.assertEqual(childrow[0], tree_store[child][0])
self.assertEqual(childrow[0], j)
self.assertEqual(childrow[1], tree_store[child][1])
self.assertEqual(childrow[1], 'this is child #%d of node #%d' % (j, i))
self.assertRaises(IndexError, get_by_index, child, 2)
tree_store[child][1] = 'this was child #%d of node #%d' % (j, i)
self.assertEqual(childrow[1], 'this was child #%d of node #%d' % (j, i))
nth_child = tree_store.iter_nth_child(row.iter, j)
self.assertEqual(childrow.path, tree_store.get_path(nth_child))
childrow2 = tree_store["%d:%d" % (i, j)]
self.assertEqual(childrow.path, childrow2.path)
childrow2 = tree_store[(i, j,)]
self.assertEqual(childrow.path, childrow2.path)
child = tree_store.iter_next(child)
if j < 19:
self.assertEqual(childrow.next.path, tree_store.get_path(child))
else:
self.assertEqual(child, childrow.next)
self.assertEqual(child, None)
self.assertEqual(j, 19)
self.assertEqual(i, 99)
# negative indices
for i in range(-1, -100, -1):
i_real = i + 100
self.assertEqual(tree_store[i][0], i_real)
row = tree_store[i]
for j in range(-1, -20, -1):
j_real = j + 20
path = (i_real, j_real,)
self.assertEqual(tree_store[path][-2], j_real)
label = 'this was child #%d of node #%d' % (j_real, i_real)
self.assertEqual(tree_store[path][-1], label)
new_label = 'this still is child #%d of node #%d' % (j_real, i_real)
tree_store[path][-1] = new_label
self.assertEqual(tree_store[path][-1], new_label)
self.assertRaises(IndexError, get_by_index, path, -3)
self.assertRaises(IndexError, get_by_index, -101)
last_row = tree_store[99]
self.assertNotEqual(last_row, None)
for i, childrow in enumerate(last_row.iterchildren()):
if i < 19:
self.assertTrue(tree_store.remove(childrow.iter))
else:
self.assertFalse(tree_store.remove(childrow.iter))
self.assertEqual(i, 19)
self.assertEqual(tree_store.iter_n_children(last_row.iter), 0)
for childrow in last_row.iterchildren():
self.fail("Should not be reached")
aiter = tree_store.get_iter(10)
self.assertRaises(TypeError, tree_store.get, aiter, 1, 'a')
self.assertRaises(ValueError, tree_store.get, aiter, 1, -1)
self.assertRaises(ValueError, tree_store.get, aiter, 1, 100)
self.assertEqual(tree_store.get(aiter, 0, 1), (10, 'this is row #10'))
# check __delitem__
self.assertEqual(len(tree_store), 100)
aiter = tree_store.get_iter(10)
del tree_store[aiter]
self.assertEqual(len(tree_store), 99)
self.assertRaises(TypeError, tree_store.__delitem__, None)
self.assertRaises(IndexError, tree_store.__delitem__, -101)
self.assertRaises(IndexError, tree_store.__delitem__, 101)
def test_tree_model_edit(self):
model = Gtk.ListStore(int, str, float)
model.append([1, "one", -0.1])
model.append([2, "two", -0.2])
def set_row(value):
model[1] = value
self.assertRaises(TypeError, set_row, 3)
self.assertRaises(TypeError, set_row, "three")
self.assertRaises(ValueError, set_row, [])
self.assertRaises(ValueError, set_row, [3, "three"])
model[0] = (3, "three", -0.3)
def test_tree_row_slice(self):
model = Gtk.ListStore(int, str, float)
model.append([1, "one", -0.1])
self.assertEqual([1, "one", -0.1], model[0][:])
self.assertEqual([1, "one"], model[0][:2])
self.assertEqual(["one", -0.1], model[0][1:])
self.assertEqual(["one"], model[0][1:-1])
self.assertEqual([1], model[0][:-2])
self.assertEqual([], model[0][5:])
self.assertEqual([1, -0.1], model[0][0:3:2])
model[0][:] = (2, "two", -0.2)
self.assertEqual([2, "two", -0.2], model[0][:])
model[0][:2] = (3, "three")
self.assertEqual([3, "three", -0.2], model[0][:])
model[0][1:] = ("four", -0.4)
self.assertEqual([3, "four", -0.4], model[0][:])
model[0][1:-1] = ("five",)
self.assertEqual([3, "five", -0.4], model[0][:])
model[0][0:3:2] = (6, -0.6)
self.assertEqual([6, "five", -0.6], model[0][:])
def set_row1():
model[0][5:] = ("doesn't", "matter",)
self.assertRaises(ValueError, set_row1)
def set_row2():
model[0][:1] = (0, "zero", 0)
self.assertRaises(ValueError, set_row2)
def set_row3():
model[0][:2] = ("0", 0)
self.assertRaises(ValueError, set_row3)
def test_tree_view(self):
store = Gtk.ListStore(int, str)
store.append((0, "foo"))
store.append((1, "bar"))
view = Gtk.TreeView()
# FIXME: We can't easily call get_cursor() to make sure this works as
# expected as we need to realize and focus the column; the following
# will raise a Gtk-CRITICAL which we ignore for now
old_mask = GLib.log_set_always_fatal(
GLib.LogLevelFlags.LEVEL_WARNING | GLib.LogLevelFlags.LEVEL_ERROR)
view.set_cursor(store[1].path)
view.set_cursor(str(store[1].path))
view.get_cell_area(store[1].path)
view.get_cell_area(str(store[1].path))
GLib.log_set_always_fatal(old_mask)
def test_tree_view_column(self):
cell = Gtk.CellRendererText()
Gtk.TreeViewColumn(title='This is just a test',
cell_renderer=cell,
text=0,
style=2)
def test_tree_selection(self):
store = Gtk.ListStore(int, str)
for i in range(10):
store.append((i, "foo"))
view = Gtk.TreeView()
view.set_model(store)
firstpath = store.get_path(store.get_iter_first())
sel = view.get_selection()
sel.select_path(firstpath)
(m, s) = sel.get_selected()
self.assertEqual(m, store)
self.assertEqual(store.get_path(s), firstpath)
sel.select_path(0)
(m, s) = sel.get_selected()
self.assertEqual(m, store)
self.assertEqual(store.get_path(s), firstpath)
sel.select_path("0:0")
(m, s) = sel.get_selected()
self.assertEqual(m, store)
self.assertEqual(store.get_path(s), firstpath)
sel.select_path((0, 0))
(m, s) = sel.get_selected()
self.assertEqual(m, store)
self.assertEqual(store.get_path(s), firstpath)
def test_text_buffer(self):
self.assertEqual(Gtk.TextBuffer, overrides.Gtk.TextBuffer)
buffer = Gtk.TextBuffer()
tag = buffer.create_tag('title', font='Sans 18')
self.assertEqual(tag.props.name, 'title')
self.assertEqual(tag.props.font, 'Sans 18')
(start, end) = buffer.get_bounds()
mark = buffer.create_mark(None, start)
self.assertFalse(mark.get_left_gravity())
buffer.set_text('Hello Jane Hello Bob')
(start, end) = buffer.get_bounds()
text = buffer.get_text(start, end, False)
self.assertEqual(text, 'Hello Jane Hello Bob')
buffer.set_text('')
(start, end) = buffer.get_bounds()
text = buffer.get_text(start, end, False)
self.assertEqual(text, '')
buffer.insert(end, 'HelloHello')
buffer.insert(end, ' Bob')
cursor_iter = end.copy()
cursor_iter.backward_chars(9)
buffer.place_cursor(cursor_iter)
buffer.insert_at_cursor(' Jane ')
(start, end) = buffer.get_bounds()
text = buffer.get_text(start, end, False)
self.assertEqual(text, 'Hello Jane Hello Bob')
sel = buffer.get_selection_bounds()
self.assertEqual(sel, ())
buffer.select_range(start, end)
sel = buffer.get_selection_bounds()
self.assertTrue(sel[0].equal(start))
self.assertTrue(sel[1].equal(end))
buffer.set_text('')
buffer.insert_with_tags(buffer.get_start_iter(), 'HelloHello', tag)
(start, end) = buffer.get_bounds()
self.assertTrue(start.begins_tag(tag))
self.assertTrue(start.has_tag(tag))
buffer.set_text('')
buffer.insert_with_tags_by_name(buffer.get_start_iter(), 'HelloHello', 'title')
(start, end) = buffer.get_bounds()
self.assertTrue(start.begins_tag(tag))
self.assertTrue(start.has_tag(tag))
self.assertRaises(ValueError, buffer.insert_with_tags_by_name,
buffer.get_start_iter(), 'HelloHello', 'unknowntag')
def test_text_iter(self):
self.assertEqual(Gtk.TextIter, overrides.Gtk.TextIter)
buffer = Gtk.TextBuffer()
buffer.set_text('Hello Jane Hello Bob')
tag = buffer.create_tag('title', font='Sans 18')
(start, end) = buffer.get_bounds()
start.forward_chars(10)
buffer.apply_tag(tag, start, end)
self.assertTrue(start.begins_tag())
self.assertTrue(end.ends_tag())
self.assertTrue(start.toggles_tag())
self.assertTrue(end.toggles_tag())
start.backward_chars(1)
self.assertFalse(start.begins_tag())
self.assertFalse(start.ends_tag())
self.assertFalse(start.toggles_tag())
def test_buttons(self):
self.assertEqual(Gtk.Button, overrides.Gtk.Button)
# test Gtk.Button
button = Gtk.Button()
self.assertTrue(isinstance(button, Gtk.Button))
self.assertTrue(isinstance(button, Gtk.Container))
self.assertTrue(isinstance(button, Gtk.Widget))
button = Gtk.Button(stock=Gtk.STOCK_CLOSE)
self.assertEqual(Gtk.STOCK_CLOSE, button.get_label())
self.assertTrue(button.get_use_stock())
self.assertTrue(button.get_use_underline())
# test Gtk.Button use_stock
button = Gtk.Button(label=Gtk.STOCK_CLOSE, use_stock=True, use_underline=True)
self.assertEqual(Gtk.STOCK_CLOSE, button.get_label())
self.assertTrue(button.get_use_stock())
self.assertTrue(button.get_use_underline())
# test Gtk.LinkButton
self.assertRaises(TypeError, Gtk.LinkButton)
button = Gtk.LinkButton('http://www.Gtk.org', 'Gtk')
self.assertTrue(isinstance(button, Gtk.Button))
self.assertTrue(isinstance(button, Gtk.Container))
self.assertTrue(isinstance(button, Gtk.Widget))
self.assertEqual('http://www.Gtk.org', button.get_uri())
self.assertEqual('Gtk', button.get_label())
def test_inheritance(self):
for name in overrides.Gtk.__all__:
over = getattr(overrides.Gtk, name)
for element in dir(Gtk):
try:
klass = getattr(Gtk, element)
info = klass.__info__
except (NotImplementedError, AttributeError):
continue
# Get all parent classes and interfaces klass inherits from
if isinstance(info, gi.types.ObjectInfo):
classes = list(info.get_interfaces())
parent = info.get_parent()
while parent.get_name() != "Object":
classes.append(parent)
parent = parent.get_parent()
classes = [kl for kl in classes if kl.get_namespace() == "Gtk"]
else:
continue
for kl in classes:
if kl.get_name() == name:
self.assertTrue(issubclass(klass, over,),
"%r does not inherit from override %r" % (klass, over,))
def test_editable(self):
self.assertEqual(Gtk.Editable, overrides.Gtk.Editable)
# need to use Gtk.Entry because Editable is an interface
entry = Gtk.Entry()
pos = entry.insert_text('HeWorld', 0)
self.assertEqual(pos, 7)
pos = entry.insert_text('llo ', 2)
self.assertEqual(pos, 6)
text = entry.get_chars(0, 11)
self.assertEqual('Hello World', text)
def test_label(self):
label = Gtk.Label(label='Hello')
self.assertTrue(isinstance(label, Gtk.Widget))
self.assertEqual(label.get_text(), 'Hello')
def adjustment_check(self, adjustment, value=0.0, lower=0.0, upper=0.0,
step_increment=0.0, page_increment=0.0, page_size=0.0):
self.assertEqual(adjustment.get_value(), value)
self.assertEqual(adjustment.get_lower(), lower)
self.assertEqual(adjustment.get_upper(), upper)
self.assertEqual(adjustment.get_step_increment(), step_increment)
self.assertEqual(adjustment.get_page_increment(), page_increment)
self.assertEqual(adjustment.get_page_size(), page_size)
def test_adjustment(self):
adjustment = Gtk.Adjustment(1, 0, 6, 4, 5, 3)
self.adjustment_check(adjustment, 1, 0, 6, 4, 5, 3)
adjustment = Gtk.Adjustment(1, 0, 6, 4, 5)
self.adjustment_check(adjustment, 1, 0, 6, 4, 5)
adjustment = Gtk.Adjustment(1, 0, 6, 4)
self.adjustment_check(adjustment, 1, 0, 6, 4)
adjustment = Gtk.Adjustment(1, 0, 6)
self.adjustment_check(adjustment, 1, 0, 6)
adjustment = Gtk.Adjustment()
self.adjustment_check(adjustment)
adjustment = Gtk.Adjustment(value=1, lower=0, upper=6,
step_increment=4, page_increment=5, page_size=3)
self.adjustment_check(adjustment, 1, 0, 6, 4, 5, 3)
def test_table(self):
table = Gtk.Table()
self.assertTrue(isinstance(table, Gtk.Table))
self.assertTrue(isinstance(table, Gtk.Container))
self.assertTrue(isinstance(table, Gtk.Widget))
self.assertEqual(table.get_size(), (1, 1))
self.assertEqual(table.get_homogeneous(), False)
table = Gtk.Table(2, 3)
self.assertEqual(table.get_size(), (2, 3))
self.assertEqual(table.get_homogeneous(), False)
table = Gtk.Table(2, 3, True)
self.assertEqual(table.get_size(), (2, 3))
self.assertEqual(table.get_homogeneous(), True)
# Test PyGTK interface
table = Gtk.Table(rows=3, columns=2)
self.assertEqual(table.get_size(), (3, 2))
# Test using the actual property names
table = Gtk.Table(n_rows=2, n_columns=3, homogeneous=True)
self.assertEqual(table.get_size(), (2, 3))
self.assertEqual(table.get_homogeneous(), True)
label = Gtk.Label(label='Hello')
self.assertTrue(isinstance(label, Gtk.Widget))
table.attach(label, 0, 1, 0, 1)
self.assertEqual(label, table.get_children()[0])
def test_scrolledwindow(self):
sw = Gtk.ScrolledWindow()
self.assertTrue(isinstance(sw, Gtk.ScrolledWindow))
self.assertTrue(isinstance(sw, Gtk.Container))
self.assertTrue(isinstance(sw, Gtk.Widget))
sb = sw.get_hscrollbar()
self.assertEqual(sw.get_hadjustment(), sb.get_adjustment())
sb = sw.get_vscrollbar()
self.assertEqual(sw.get_vadjustment(), sb.get_adjustment())
def test_widget_drag_methods(self):
widget = Gtk.Button()
# here we are not checking functionality, only that the methods exist
# and except the right number of arguments
widget.drag_check_threshold(0, 0, 0, 0)
# drag_dest_ methods
widget.drag_dest_set(Gtk.DestDefaults.DROP, None, Gdk.DragAction.COPY)
widget.drag_dest_add_image_targets()
widget.drag_dest_add_text_targets()
widget.drag_dest_add_uri_targets()
widget.drag_dest_get_track_motion()
widget.drag_dest_set_track_motion(True)
widget.drag_dest_get_target_list()
widget.drag_dest_set_target_list(Gtk.TargetList.new([Gtk.TargetEntry.new('test', 0, 0)]))
widget.drag_dest_unset()
widget.drag_highlight()
widget.drag_unhighlight()
# drag_source_ methods
widget.drag_source_set(Gdk.ModifierType.BUTTON1_MASK, None, Gdk.DragAction.MOVE)
widget.drag_source_add_image_targets()
widget.drag_source_add_text_targets()
widget.drag_source_add_uri_targets()
widget.drag_source_set_icon_name("")
widget.drag_source_set_icon_pixbuf(GdkPixbuf.Pixbuf())
widget.drag_source_set_icon_stock("")
widget.drag_source_get_target_list()
widget.drag_source_set_target_list(Gtk.TargetList.new([Gtk.TargetEntry.new('test', 0, 0)]))
widget.drag_source_unset()
# these methods cannot be called because they require a valid drag on
# a real GdkWindow. So we only check that they exist and are callable.
self.assertTrue(hasattr(widget.drag_dest_set_proxy, '__call__'))
self.assertTrue(hasattr(widget.drag_get_data, '__call__'))
def test_scrollbar(self):
# PyGTK compat
adjustment = Gtk.Adjustment()
hscrollbar = Gtk.HScrollbar()
vscrollbar = Gtk.VScrollbar()
self.assertNotEqual(hscrollbar.props.adjustment, adjustment)
self.assertNotEqual(vscrollbar.props.adjustment, adjustment)
hscrollbar = Gtk.HScrollbar(adjustment)
vscrollbar = Gtk.VScrollbar(adjustment)
self.assertEqual(hscrollbar.props.adjustment, adjustment)
self.assertEqual(vscrollbar.props.adjustment, adjustment)
def test_iconview(self):
# PyGTK compat
iconview = Gtk.IconView()
self.assertEqual(iconview.props.model, None)
model = Gtk.ListStore(str)
iconview = Gtk.IconView(model)
self.assertEqual(iconview.props.model, model)
def test_toolbutton(self):
# PyGTK compat
button = Gtk.ToolButton()
self.assertEqual(button.props.stock_id, None)
button = Gtk.ToolButton('gtk-new')
self.assertEqual(button.props.stock_id, 'gtk-new')
icon = Gtk.Image.new_from_stock(Gtk.STOCK_OPEN, Gtk.IconSize.SMALL_TOOLBAR)
button = Gtk.ToolButton(label='mylabel', icon_widget=icon)
self.assertEqual(button.props.label, 'mylabel')
self.assertEqual(button.props.icon_widget, icon)
def test_iconset(self):
# PyGTK compat
Gtk.IconSet()
pixbuf = GdkPixbuf.Pixbuf()
Gtk.IconSet(pixbuf)
def test_viewport(self):
# PyGTK compat
vadjustment = Gtk.Adjustment()
hadjustment = Gtk.Adjustment()
viewport = Gtk.Viewport(hadjustment=hadjustment,
vadjustment=vadjustment)
self.assertEqual(viewport.props.vadjustment, vadjustment)
self.assertEqual(viewport.props.hadjustment, hadjustment)
class TestGio(unittest.TestCase):
def setUp(self):
self.settings = Gio.Settings('org.gnome.test')
# we change the values in the tests, so set them to predictable start
# value
self.settings.reset('test-string')
self.settings.reset('test-array')
def test_file_enumerator(self):
self.assertEqual(Gio.FileEnumerator, overrides.Gio.FileEnumerator)
f = Gio.file_new_for_path("./")
iter_info = []
for info in f.enumerate_children("standard::*", 0, None):
iter_info.append(info.get_name())
next_info = []
enumerator = f.enumerate_children("standard::*", 0, None)
while True:
info = enumerator.next_file(None)
if info is None:
break
next_info.append(info.get_name())
self.assertEqual(iter_info, next_info)
def test_gsettings_native(self):
self.assertTrue('test-array' in self.settings.list_keys())
# get various types
v = self.settings.get_value('test-boolean')
self.assertEqual(v.get_boolean(), True)
self.assertEqual(self.settings.get_boolean('test-boolean'), True)
v = self.settings.get_value('test-string')
self.assertEqual(v.get_string(), 'Hello')
self.assertEqual(self.settings.get_string('test-string'), 'Hello')
v = self.settings.get_value('test-array')
self.assertEqual(v.unpack(), [1, 2])
v = self.settings.get_value('test-tuple')
self.assertEqual(v.unpack(), (1, 2))
# set a value
self.settings.set_string('test-string', 'World')
self.assertEqual(self.settings.get_string('test-string'), 'World')
self.settings.set_value('test-string', GLib.Variant('s', 'Goodbye'))
self.assertEqual(self.settings.get_string('test-string'), 'Goodbye')
def test_gsettings_constructor(self):
# default constructor uses path from schema
self.assertEqual(self.settings.get_property('path'), '/tests/')
# optional constructor arguments
with_path = Gio.Settings('org.gnome.nopathtest', path='/mypath/')
self.assertEqual(with_path.get_property('path'), '/mypath/')
self.assertEqual(with_path['np-int'], 42)
def test_gsettings_override(self):
# dictionary interface
self.assertEqual(len(self.settings), 4)
self.assertTrue('test-array' in self.settings)
self.assertTrue('test-array' in self.settings.keys())
self.assertFalse('nonexisting' in self.settings)
self.assertFalse(4 in self.settings)
self.assertEqual(bool(self.settings), True)
# get various types
self.assertEqual(self.settings['test-boolean'], True)
self.assertEqual(self.settings['test-string'], 'Hello')
self.assertEqual(self.settings['test-array'], [1, 2])
self.assertEqual(self.settings['test-tuple'], (1, 2))
self.assertRaises(KeyError, self.settings.__getitem__, 'unknown')
self.assertRaises(KeyError, self.settings.__getitem__, 2)
# set a value
self.settings['test-string'] = 'Goodbye'
self.assertEqual(self.settings['test-string'], 'Goodbye')
self.settings['test-array'] = [3, 4, 5]
self.assertEqual(self.settings['test-array'], [3, 4, 5])
self.assertRaises(TypeError, self.settings.__setitem__, 'test-string', 1)
self.assertRaises(KeyError, self.settings.__setitem__, 'unknown', 'moo')
def test_gsettings_empty(self):
empty = Gio.Settings('org.gnome.empty', path='/tests/')
self.assertEqual(len(empty), 0)
self.assertEqual(bool(empty), True)
self.assertEqual(empty.keys(), [])
| lgpl-2.1 |
Ant-OS/android_packages_apps_OTAUpdates | jni/boost_1_57_0/libs/mpi/test/python/scan_test.py | 64 | 1125 | # Copyright (C) 2006 Douglas Gregor <doug.gregor -at- gmail.com>.
# Use, modification and distribution is subject to the Boost Software
# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Test scan() collective.
import boost.parallel.mpi as mpi
from generators import *
def scan_test(comm, generator, kind, op, op_kind):
if comm.rank == 0:
print ("Prefix reduction to %s of %s..." % (op_kind, kind)),
my_value = generator(comm.rank)
result = mpi.scan(comm, my_value, op)
expected_result = generator(0);
for p in range(1, comm.rank+1):
expected_result = op(expected_result, generator(p))
assert result == expected_result
if comm.rank == 0:
print "OK."
return
scan_test(mpi.world, int_generator, "integers", lambda x,y:x + y, "sum")
scan_test(mpi.world, int_generator, "integers", lambda x,y:x * y, "product")
scan_test(mpi.world, string_generator, "strings", lambda x,y:x + y, "concatenation")
scan_test(mpi.world, string_list_generator, "list of strings", lambda x,y:x + y, "concatenation")
| apache-2.0 |
myerpengine/odoo | openerp/addons/base/res/res_company.py | 37 | 21361 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import re
import openerp
from openerp import SUPERUSER_ID, tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools import image_resize_image
class multi_company_default(osv.osv):
"""
Manage multi company default value
"""
_name = 'multi_company.default'
_description = 'Default multi company'
_order = 'company_id,sequence,id'
_columns = {
'sequence': fields.integer('Sequence'),
'name': fields.char('Name', size=256, required=True, help='Name it to easily find a record'),
'company_id': fields.many2one('res.company', 'Main Company', required=True,
help='Company where the user is connected'),
'company_dest_id': fields.many2one('res.company', 'Default Company', required=True,
help='Company to store the current record'),
'object_id': fields.many2one('ir.model', 'Object', required=True,
help='Object affected by this rule'),
'expression': fields.char('Expression', size=256, required=True,
help='Expression, must be True to match\nuse context.get or user (browse)'),
'field_id': fields.many2one('ir.model.fields', 'Field', help='Select field property'),
}
_defaults = {
'expression': 'True',
'sequence': 100,
}
def copy(self, cr, uid, id, default=None, context=None):
"""
Add (copy) in the name when duplicate record
"""
if not context:
context = {}
if not default:
default = {}
company = self.browse(cr, uid, id, context=context)
default = default.copy()
default['name'] = company.name + _(' (copy)')
return super(multi_company_default, self).copy(cr, uid, id, default, context=context)
multi_company_default()
class res_company(osv.osv):
_name = "res.company"
_description = 'Companies'
_order = 'name'
def _get_address_data(self, cr, uid, ids, field_names, arg, context=None):
""" Read the 'address' functional fields. """
result = {}
part_obj = self.pool.get('res.partner')
for company in self.browse(cr, uid, ids, context=context):
result[company.id] = {}.fromkeys(field_names, False)
if company.partner_id:
address_data = part_obj.address_get(cr, openerp.SUPERUSER_ID, [company.partner_id.id], adr_pref=['default'])
if address_data['default']:
address = part_obj.read(cr, openerp.SUPERUSER_ID, address_data['default'], field_names, context=context)
for field in field_names:
result[company.id][field] = address[field] or False
return result
def _set_address_data(self, cr, uid, company_id, name, value, arg, context=None):
""" Write the 'address' functional fields. """
company = self.browse(cr, uid, company_id, context=context)
if company.partner_id:
part_obj = self.pool.get('res.partner')
address_data = part_obj.address_get(cr, uid, [company.partner_id.id], adr_pref=['default'])
address = address_data['default']
if address:
part_obj.write(cr, uid, [address], {name: value or False}, context=context)
else:
part_obj.create(cr, uid, {name: value or False, 'parent_id': company.partner_id.id}, context=context)
return True
def _get_logo_web(self, cr, uid, ids, _field_name, _args, context=None):
result = dict.fromkeys(ids, False)
for record in self.browse(cr, uid, ids, context=context):
size = (180, None)
result[record.id] = image_resize_image(record.partner_id.image, size)
return result
def _get_companies_from_partner(self, cr, uid, ids, context=None):
return self.pool['res.company'].search(cr, uid, [('partner_id', 'in', ids)], context=context)
_columns = {
'name': fields.related('partner_id', 'name', string='Company Name', size=128, required=True, store=True, type='char'),
'parent_id': fields.many2one('res.company', 'Parent Company', select=True),
'child_ids': fields.one2many('res.company', 'parent_id', 'Child Companies'),
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
'rml_header': fields.text('RML Header', required=True),
'rml_header1': fields.char('Company Tagline', size=200, help="Appears by default on the top right corner of your printed documents (report header)."),
'rml_header2': fields.text('RML Internal Header', required=True),
'rml_header3': fields.text('RML Internal Header for Landscape Reports', required=True),
'rml_footer': fields.text('Report Footer', help="Footer text displayed at the bottom of all reports."),
'rml_footer_readonly': fields.related('rml_footer', type='text', string='Report Footer', readonly=True),
'custom_footer': fields.boolean('Custom Footer', help="Check this to define the report footer manually. Otherwise it will be filled in automatically."),
'font': fields.many2one('res.font', string="Font", domain=[('mode', 'in', ('Normal', 'Regular', 'all', 'Book'))],
help="Set the font into the report header, it will be used as default font in the RML reports of the user company"),
'logo': fields.related('partner_id', 'image', string="Logo", type="binary"),
'logo_web': fields.function(_get_logo_web, string="Logo Web", type="binary", store={
'res.company': (lambda s, c, u, i, x: i, ['partner_id'], 10),
'res.partner': (_get_companies_from_partner, ['image'], 10),
}),
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
'currency_ids': fields.one2many('res.currency', 'company_id', 'Currency'),
'user_ids': fields.many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', 'Accepted Users'),
'account_no':fields.char('Account No.', size=64),
'street': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street", multi='address'),
'street2': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street2", multi='address'),
'zip': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="Zip", multi='address'),
'city': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="City", multi='address'),
'state_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country.state', string="Fed. State", multi='address'),
'bank_ids': fields.one2many('res.partner.bank','company_id', 'Bank Accounts', help='Bank accounts related to this company'),
'country_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country', string="Country", multi='address'),
'email': fields.related('partner_id', 'email', size=64, type='char', string="Email", store=True),
'phone': fields.related('partner_id', 'phone', size=64, type='char', string="Phone", store=True),
'fax': fields.function(_get_address_data, fnct_inv=_set_address_data, size=64, type='char', string="Fax", multi='address'),
'website': fields.related('partner_id', 'website', string="Website", type="char", size=64),
'vat': fields.related('partner_id', 'vat', string="Tax ID", type="char", size=32),
'company_registry': fields.char('Company Registry', size=64),
'rml_paper_format': fields.selection([('a4', 'A4'), ('us_letter', 'US Letter')], "Paper Format", required=True, oldname='paper_format'),
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The company name must be unique !')
]
def onchange_footer(self, cr, uid, ids, custom_footer, phone, fax, email, website, vat, company_registry, bank_ids, context=None):
if custom_footer:
return {}
# first line (notice that missing elements are filtered out before the join)
res = ' | '.join(filter(bool, [
phone and '%s: %s' % (_('Phone'), phone),
fax and '%s: %s' % (_('Fax'), fax),
email and '%s: %s' % (_('Email'), email),
website and '%s: %s' % (_('Website'), website),
vat and '%s: %s' % (_('TIN'), vat),
company_registry and '%s: %s' % (_('Reg'), company_registry),
]))
# second line: bank accounts
res_partner_bank = self.pool.get('res.partner.bank')
account_data = self.resolve_2many_commands(cr, uid, 'bank_ids', bank_ids, context=context)
account_names = res_partner_bank._prepare_name_get(cr, uid, account_data, context=context)
if account_names:
title = _('Bank Accounts') if len(account_names) > 1 else _('Bank Account')
res += '\n%s: %s' % (title, ', '.join(name for id, name in account_names))
return {'value': {'rml_footer': res, 'rml_footer_readonly': res}}
def onchange_state(self, cr, uid, ids, state_id, context=None):
if state_id:
return {'value':{'country_id': self.pool.get('res.country.state').browse(cr, uid, state_id, context).country_id.id }}
return {}
def onchange_font_name(self, cr, uid, ids, font, rml_header, rml_header2, rml_header3, context=None):
""" To change default header style of all <para> and drawstring. """
def _change_header(header,font):
""" Replace default fontname use in header and setfont tag """
default_para = re.sub('fontName.?=.?".*"', 'fontName="%s"'% font, header)
return re.sub('(<setFont.?name.?=.?)(".*?")(.)', '\g<1>"%s"\g<3>'% font, default_para)
if not font:
return True
fontname = self.pool.get('res.font').browse(cr, uid, font, context=context).name
return {'value':{
'rml_header': _change_header(rml_header, fontname),
'rml_header2':_change_header(rml_header2, fontname),
'rml_header3':_change_header(rml_header3, fontname)
}}
def on_change_country(self, cr, uid, ids, country_id, context=None):
res = {'domain': {'state_id': []}}
currency_id = self._get_euro(cr, uid, context=context)
if country_id:
currency_id = self.pool.get('res.country').browse(cr, uid, country_id, context=context).currency_id.id
res['domain'] = {'state_id': [('country_id','=',country_id)]}
res['value'] = {'currency_id': currency_id}
return res
def name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100):
if context is None:
context = {}
if context.pop('user_preference', None):
# We browse as superuser. Otherwise, the user would be able to
# select only the currently visible companies (according to rules,
# which are probably to allow to see the child companies) even if
# she belongs to some other companies.
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
cmp_ids = list(set([user.company_id.id] + [cmp.id for cmp in user.company_ids]))
uid = SUPERUSER_ID
args = (args or []) + [('id', 'in', cmp_ids)]
return super(res_company, self).name_search(cr, uid, name=name, args=args, operator=operator, context=context, limit=limit)
def _company_default_get(self, cr, uid, object=False, field=False, context=None):
"""
Check if the object for this company have a default value
"""
if not context:
context = {}
proxy = self.pool.get('multi_company.default')
args = [
('object_id.model', '=', object),
('field_id', '=', field),
]
ids = proxy.search(cr, uid, args, context=context)
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
for rule in proxy.browse(cr, uid, ids, context):
if eval(rule.expression, {'context': context, 'user': user}):
return rule.company_dest_id.id
return user.company_id.id
@tools.ormcache()
def _get_company_children(self, cr, uid=None, company=None):
if not company:
return []
ids = self.search(cr, uid, [('parent_id','child_of',[company])])
return ids
def _get_partner_hierarchy(self, cr, uid, company_id, context=None):
if company_id:
parent_id = self.browse(cr, uid, company_id)['parent_id']
if parent_id:
return self._get_partner_hierarchy(cr, uid, parent_id.id, context)
else:
return self._get_partner_descendance(cr, uid, company_id, [], context)
return []
def _get_partner_descendance(self, cr, uid, company_id, descendance, context=None):
descendance.append(self.browse(cr, uid, company_id).partner_id.id)
for child_id in self._get_company_children(cr, uid, company_id):
if child_id != company_id:
descendance = self._get_partner_descendance(cr, uid, child_id, descendance)
return descendance
#
# This function restart the cache on the _get_company_children method
#
def cache_restart(self, cr):
self._get_company_children.clear_cache(self)
def create(self, cr, uid, vals, context=None):
if not vals.get('name', False) or vals.get('partner_id', False):
self.cache_restart(cr)
return super(res_company, self).create(cr, uid, vals, context=context)
obj_partner = self.pool.get('res.partner')
partner_id = obj_partner.create(cr, uid, {'name': vals['name'], 'is_company':True, 'image': vals.get('logo', False)}, context=context)
vals.update({'partner_id': partner_id})
self.cache_restart(cr)
company_id = super(res_company, self).create(cr, uid, vals, context=context)
obj_partner.write(cr, uid, [partner_id], {'company_id': company_id}, context=context)
return company_id
def write(self, cr, uid, ids, values, context=None):
self.cache_restart(cr)
return super(res_company, self).write(cr, uid, ids, values, context=context)
def _get_euro(self, cr, uid, context=None):
rate_obj = self.pool.get('res.currency.rate')
rate_id = rate_obj.search(cr, uid, [('rate', '=', 1)], context=context)
return rate_id and rate_obj.browse(cr, uid, rate_id[0], context=context).currency_id.id or False
def _get_logo(self, cr, uid, ids):
return open(os.path.join( tools.config['root_path'], 'addons', 'base', 'res', 'res_company_logo.png'), 'rb') .read().encode('base64')
def _get_font(self, cr, uid, ids):
font_obj = self.pool.get('res.font')
res = font_obj.search(cr, uid, [('family', '=', 'Helvetica'), ('mode', '=', 'all')], limit=1)
return res and res[0] or False
_header = """
<header>
<pageTemplate>
<frame id="first" x1="28.0" y1="28.0" width="%s" height="%s"/>
<stylesheet>
<!-- Set here the default font to use for all <para> tags -->
<paraStyle name='Normal' fontName="DejaVuSans"/>
</stylesheet>
<pageGraphics>
<fill color="black"/>
<stroke color="black"/>
<setFont name="DejaVuSans" size="8"/>
<drawString x="%s" y="%s"> [[ formatLang(time.strftime("%%Y-%%m-%%d"), date=True) ]] [[ time.strftime("%%H:%%M") ]]</drawString>
<setFont name="DejaVuSans-Bold" size="10"/>
<drawCentredString x="%s" y="%s">[[ company.partner_id.name ]]</drawCentredString>
<stroke color="#000000"/>
<lines>%s</lines>
<!-- Set here the default font to use for all <drawString> tags -->
<!-- don't forget to change the 2 other occurence of <setFont> above if needed -->
<setFont name="DejaVuSans" size="8"/>
</pageGraphics>
</pageTemplate>
</header>"""
_header2 = _header % (539, 772, "1.0cm", "28.3cm", "11.1cm", "28.3cm", "1.0cm 28.1cm 20.1cm 28.1cm")
_header3 = _header % (786, 525, 25, 555, 440, 555, "25 550 818 550")
def _get_header(self,cr,uid,ids):
try :
header_file = tools.file_open(os.path.join('base', 'report', 'corporate_rml_header.rml'))
try:
return header_file.read()
finally:
header_file.close()
except:
return self._header_a4
_header_main = """
<header>
<pageTemplate>
<frame id="first" x1="1.3cm" y1="3.0cm" height="%s" width="19.0cm"/>
<stylesheet>
<!-- Set here the default font to use for all <para> tags -->
<paraStyle name='Normal' fontName="DejaVuSans"/>
<paraStyle name="main_footer" fontSize="8.0" alignment="CENTER"/>
<paraStyle name="main_header" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
</stylesheet>
<pageGraphics>
<!-- Set here the default font to use for all <drawString> tags -->
<setFont name="DejaVuSans" size="8"/>
<!-- You Logo - Change X,Y,Width and Height -->
<image x="1.3cm" y="%s" height="40.0" >[[ company.logo or removeParentNode('image') ]]</image>
<fill color="black"/>
<stroke color="black"/>
<!-- page header -->
<lines>1.3cm %s 20cm %s</lines>
<drawRightString x="20cm" y="%s">[[ company.rml_header1 ]]</drawRightString>
<drawString x="1.3cm" y="%s">[[ company.partner_id.name ]]</drawString>
<place x="1.3cm" y="%s" height="1.8cm" width="15.0cm">
<para style="main_header">[[ display_address(company.partner_id) or '' ]]</para>
</place>
<drawString x="1.3cm" y="%s">Phone:</drawString>
<drawRightString x="7cm" y="%s">[[ company.partner_id.phone or '' ]]</drawRightString>
<drawString x="1.3cm" y="%s">Mail:</drawString>
<drawRightString x="7cm" y="%s">[[ company.partner_id.email or '' ]]</drawRightString>
<lines>1.3cm %s 7cm %s</lines>
<!-- left margin -->
<rotate degrees="90"/>
<fill color="grey"/>
<drawString x="2.65cm" y="-0.4cm">generated by OpenERP.com</drawString>
<fill color="black"/>
<rotate degrees="-90"/>
<!--page bottom-->
<lines>1.2cm 2.65cm 19.9cm 2.65cm</lines>
<place x="1.3cm" y="0cm" height="2.55cm" width="19.0cm">
<para style="main_footer">[[ company.rml_footer ]]</para>
<para style="main_footer">Contact : [[ user.name ]] - Page: <pageNumber/></para>
</place>
</pageGraphics>
</pageTemplate>
</header>"""
_header_a4 = _header_main % ('21.7cm', '27.7cm', '27.7cm', '27.7cm', '27.8cm', '27.3cm', '25.3cm', '25.0cm', '25.0cm', '24.6cm', '24.6cm', '24.5cm', '24.5cm')
_header_letter = _header_main % ('20cm', '26.0cm', '26.0cm', '26.0cm', '26.1cm', '25.6cm', '23.6cm', '23.3cm', '23.3cm', '22.9cm', '22.9cm', '22.8cm', '22.8cm')
def onchange_rml_paper_format(self, cr, uid, ids, rml_paper_format, context=None):
if rml_paper_format == 'us_letter':
return {'value': {'rml_header': self._header_letter}}
return {'value': {'rml_header': self._header_a4}}
def act_discover_fonts(self, cr, uid, ids, context=None):
return self.pool.get("res.font").font_scan(cr, uid, context=context)
_defaults = {
'currency_id': _get_euro,
'rml_paper_format': 'a4',
'rml_header':_get_header,
'rml_header2': _header2,
'rml_header3': _header3,
'logo':_get_logo,
'font':_get_font,
}
_constraints = [
(osv.osv._check_recursion, 'Error! You can not create recursive companies.', ['parent_id'])
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
MortalViews/incubator-airflow | airflow/operators/mysql_operator.py | 16 | 2061 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.hooks.mysql_hook import MySqlHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
:param database: name of database which overwrite defined one in connection
:type database: string
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
autocommit=False, database=None, *args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
self.database = database
def execute(self, context):
self.log.info('Executing: %s', self.sql)
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id,
schema=self.database)
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
| apache-2.0 |
jsgreenwell/teaching-python | classnotes/Knapsack/make_change.alpha.py | 1 | 1842 | from collections import OrderedDict
import pprint
'''Basic change making program using 100 base (American Currency): will create full knapsack version in
later example. This example will also be expanded, in steps, until it becomes a simple OO-based
(ie. we'll add classes) POS system with database tie-in.'''
def get_change(diff_owed):
'''Calculates the change due given the cost of the item and payment
Starting with the greedy method
denominations contains the count of needed quarters, dimes, etc'''
#dicts are inherently unsorted so the numeric order is not set
denominations = {20.0:0, 10.0:0, 5.0:0, 1.0:0, .25:0, .1:0, .05:0, .01:0}
#reduce diff_owed until less than each denomination
#using Ordered Dict to get sorted list of keys (docs.python.org/2/library/collections.html)
#Always write working code first, then optimize it
for den in OrderedDict(reversed(sorted(denominations.items(), key=lambda t: t[0]))):
while diff_owed >= den:
denominations[den] += 1
diff_owed = diff_owed - den
return denominations
def main():
item_cost = 0.0
customer_payment = 0.0
cost_pay_diff = 0.0
item_cost = float(raw_input("Enter cost of item: "))
customer_payment = float(raw_input("Enter customer payment: "))
cost_pay_diff = customer_payment - item_cost
#will change to try statement when loops or classes added
if cost_pay_diff == 0:
print "No change due"
elif cost_pay_diff > 0:
#A better display will be created once full program is working
#Using translate function to show bills and coins needed
print "Your change is "
pprint.pprint(get_change(cost_pay_diff))
else:
print "Problem with payment: either too little or other problem"
main()
| mit |
tbeadle/django | tests/template_tests/syntax_tests/test_firstof.py | 177 | 3215 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class FirstOfTagTests(SimpleTestCase):
@setup({'firstof01': '{% firstof a b c %}'})
def test_firstof01(self):
output = self.engine.render_to_string('firstof01', {'a': 0, 'c': 0, 'b': 0})
self.assertEqual(output, '')
@setup({'firstof02': '{% firstof a b c %}'})
def test_firstof02(self):
output = self.engine.render_to_string('firstof02', {'a': 1, 'c': 0, 'b': 0})
self.assertEqual(output, '1')
@setup({'firstof03': '{% firstof a b c %}'})
def test_firstof03(self):
output = self.engine.render_to_string('firstof03', {'a': 0, 'c': 0, 'b': 2})
self.assertEqual(output, '2')
@setup({'firstof04': '{% firstof a b c %}'})
def test_firstof04(self):
output = self.engine.render_to_string('firstof04', {'a': 0, 'c': 3, 'b': 0})
self.assertEqual(output, '3')
@setup({'firstof05': '{% firstof a b c %}'})
def test_firstof05(self):
output = self.engine.render_to_string('firstof05', {'a': 1, 'c': 3, 'b': 2})
self.assertEqual(output, '1')
@setup({'firstof06': '{% firstof a b c %}'})
def test_firstof06(self):
output = self.engine.render_to_string('firstof06', {'c': 3, 'b': 0})
self.assertEqual(output, '3')
@setup({'firstof07': '{% firstof a b "c" %}'})
def test_firstof07(self):
output = self.engine.render_to_string('firstof07', {'a': 0})
self.assertEqual(output, 'c')
@setup({'firstof08': '{% firstof a b "c and d" %}'})
def test_firstof08(self):
output = self.engine.render_to_string('firstof08', {'a': 0, 'b': 0})
self.assertEqual(output, 'c and d')
@setup({'firstof09': '{% firstof %}'})
def test_firstof09(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('firstof09')
@setup({'firstof10': '{% firstof a %}'})
def test_firstof10(self):
output = self.engine.render_to_string('firstof10', {'a': '<'})
self.assertEqual(output, '<')
@setup({'firstof11': '{% firstof a b %}'})
def test_firstof11(self):
output = self.engine.render_to_string('firstof11', {'a': '<', 'b': '>'})
self.assertEqual(output, '<')
@setup({'firstof12': '{% firstof a b %}'})
def test_firstof12(self):
output = self.engine.render_to_string('firstof12', {'a': '', 'b': '>'})
self.assertEqual(output, '>')
@setup({'firstof13': '{% autoescape off %}{% firstof a %}{% endautoescape %}'})
def test_firstof13(self):
output = self.engine.render_to_string('firstof13', {'a': '<'})
self.assertEqual(output, '<')
@setup({'firstof14': '{% firstof a|safe b %}'})
def test_firstof14(self):
output = self.engine.render_to_string('firstof14', {'a': '<'})
self.assertEqual(output, '<')
@setup({'firstof15': '{% firstof a b c as myvar %}'})
def test_firstof15(self):
ctx = {'a': 0, 'b': 2, 'c': 3}
output = self.engine.render_to_string('firstof15', ctx)
self.assertEqual(ctx['myvar'], '2')
self.assertEqual(output, '')
| bsd-3-clause |
rhattersley/iris | lib/iris/tests/unit/analysis/test_RMS.py | 17 | 3959 | # (C) British Crown Copyright 2013 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the :data:`iris.analysis.RMS` aggregator."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
import numpy.ma as ma
from iris.analysis import RMS
class Test_aggregate(tests.IrisTest):
def test_1d(self):
# 1-dimensional input
data = np.array([5, 2, 6, 4], dtype=np.float64)
rms = RMS.aggregate(data, 0)
expected_rms = 4.5
self.assertAlmostEqual(rms, expected_rms)
def test_2d(self):
# 2-dimensional input
data = np.array([[5, 2, 6, 4], [12, 4, 10, 8]], dtype=np.float64)
expected_rms = np.array([4.5, 9.0], dtype=np.float64)
rms = RMS.aggregate(data, 1)
self.assertArrayAlmostEqual(rms, expected_rms)
def test_1d_weighted(self):
# 1-dimensional input with weights
data = np.array([4, 7, 10, 8], dtype=np.float64)
weights = np.array([1, 4, 3, 2], dtype=np.float64)
expected_rms = 8.0
rms = RMS.aggregate(data, 0, weights=weights)
self.assertAlmostEqual(rms, expected_rms)
def test_2d_weighted(self):
# 2-dimensional input with weights
data = np.array([[4, 7, 10, 8], [14, 16, 20, 8]], dtype=np.float64)
weights = np.array([[1, 4, 3, 2], [2, 1, 1.5, 0.5]], dtype=np.float64)
expected_rms = np.array([8.0, 16.0], dtype=np.float64)
rms = RMS.aggregate(data, 1, weights=weights)
self.assertArrayAlmostEqual(rms, expected_rms)
def test_unit_weighted(self):
# unit weights should be the same as no weights
data = np.array([5, 2, 6, 4], dtype=np.float64)
weights = np.ones_like(data)
rms = RMS.aggregate(data, 0, weights=weights)
expected_rms = 4.5
self.assertAlmostEqual(rms, expected_rms)
def test_masked(self):
# masked entries should be completely ignored
data = ma.array([5, 10, 2, 11, 6, 4],
mask=[False, True, False, True, False, False],
dtype=np.float64)
expected_rms = 4.5
rms = RMS.aggregate(data, 0)
self.assertAlmostEqual(rms, expected_rms)
def test_masked_weighted(self):
# weights should work properly with masked arrays
data = ma.array([4, 7, 18, 10, 11, 8],
mask=[False, False, True, False, True, False],
dtype=np.float64)
weights = np.array([1, 4, 5, 3, 8, 2], dtype=np.float64)
expected_rms = 8.0
rms = RMS.aggregate(data, 0, weights=weights)
self.assertAlmostEqual(rms, expected_rms)
class Test_name(tests.IrisTest):
def test(self):
self.assertEqual(RMS.name(), 'root_mean_square')
class Test_aggregate_shape(tests.IrisTest):
def test(self):
shape = ()
kwargs = dict()
self.assertTupleEqual(RMS.aggregate_shape(**kwargs), shape)
kwargs = dict(tom='jerry', calvin='hobbes')
self.assertTupleEqual(RMS.aggregate_shape(**kwargs), shape)
if __name__ == "__main__":
tests.main()
| lgpl-3.0 |
goodwinnk/intellij-community | plugins/hg4idea/testData/bin/mercurial/help.py | 91 | 18018 | # help.py - help data for mercurial
#
# Copyright 2006 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from i18n import gettext, _
import itertools, sys, os, error
import extensions, revset, fileset, templatekw, templatefilters, filemerge
import encoding, util, minirst
import cmdutil
def listexts(header, exts, indent=1):
'''return a text listing of the given extensions'''
rst = []
if exts:
rst.append('\n%s\n\n' % header)
for name, desc in sorted(exts.iteritems()):
rst.append('%s:%s: %s\n' % (' ' * indent, name, desc))
return rst
def extshelp():
rst = loaddoc('extensions')().splitlines(True)
rst.extend(listexts(_('enabled extensions:'), extensions.enabled()))
rst.extend(listexts(_('disabled extensions:'), extensions.disabled()))
doc = ''.join(rst)
return doc
def optrst(options, verbose):
data = []
multioccur = False
for option in options:
if len(option) == 5:
shortopt, longopt, default, desc, optlabel = option
else:
shortopt, longopt, default, desc = option
optlabel = _("VALUE") # default label
if _("DEPRECATED") in desc and not verbose:
continue
so = ''
if shortopt:
so = '-' + shortopt
lo = '--' + longopt
if default:
desc += _(" (default: %s)") % default
if isinstance(default, list):
lo += " %s [+]" % optlabel
multioccur = True
elif (default is not None) and not isinstance(default, bool):
lo += " %s" % optlabel
data.append((so, lo, desc))
rst = minirst.maketable(data, 1)
if multioccur:
rst.append(_("\n[+] marked option can be specified multiple times\n"))
return ''.join(rst)
def indicateomitted(rst, omitted, notomitted=None):
rst.append('\n\n.. container:: omitted\n\n %s\n\n' % omitted)
if notomitted:
rst.append('\n\n.. container:: notomitted\n\n %s\n\n' % notomitted)
def topicmatch(kw):
"""Return help topics matching kw.
Returns {'section': [(name, summary), ...], ...} where section is
one of topics, commands, extensions, or extensioncommands.
"""
kw = encoding.lower(kw)
def lowercontains(container):
return kw in encoding.lower(container) # translated in helptable
results = {'topics': [],
'commands': [],
'extensions': [],
'extensioncommands': [],
}
for names, header, doc in helptable:
if (sum(map(lowercontains, names))
or lowercontains(header)
or lowercontains(doc())):
results['topics'].append((names[0], header))
import commands # avoid cycle
for cmd, entry in commands.table.iteritems():
if cmd.startswith('debug'):
continue
if len(entry) == 3:
summary = entry[2]
else:
summary = ''
# translate docs *before* searching there
docs = _(getattr(entry[0], '__doc__', None)) or ''
if kw in cmd or lowercontains(summary) or lowercontains(docs):
doclines = docs.splitlines()
if doclines:
summary = doclines[0]
cmdname = cmd.split('|')[0].lstrip('^')
results['commands'].append((cmdname, summary))
for name, docs in itertools.chain(
extensions.enabled().iteritems(),
extensions.disabled().iteritems()):
# extensions.load ignores the UI argument
mod = extensions.load(None, name, '')
if lowercontains(name) or lowercontains(docs):
# extension docs are already translated
results['extensions'].append((name, docs.splitlines()[0]))
for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems():
if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])):
cmdname = cmd.split('|')[0].lstrip('^')
if entry[0].__doc__:
cmddoc = gettext(entry[0].__doc__).splitlines()[0]
else:
cmddoc = _('(no help text available)')
results['extensioncommands'].append((cmdname, cmddoc))
return results
def loaddoc(topic):
"""Return a delayed loader for help/topic.txt."""
def loader():
if util.mainfrozen():
module = sys.executable
else:
module = __file__
base = os.path.dirname(module)
for dir in ('.', '..'):
docdir = os.path.join(base, dir, 'help')
if os.path.isdir(docdir):
break
path = os.path.join(docdir, topic + ".txt")
doc = gettext(util.readfile(path))
for rewriter in helphooks.get(topic, []):
doc = rewriter(topic, doc)
return doc
return loader
helptable = sorted([
(["config", "hgrc"], _("Configuration Files"), loaddoc('config')),
(["dates"], _("Date Formats"), loaddoc('dates')),
(["patterns"], _("File Name Patterns"), loaddoc('patterns')),
(['environment', 'env'], _('Environment Variables'),
loaddoc('environment')),
(['revisions', 'revs'], _('Specifying Single Revisions'),
loaddoc('revisions')),
(['multirevs', 'mrevs'], _('Specifying Multiple Revisions'),
loaddoc('multirevs')),
(['revsets', 'revset'], _("Specifying Revision Sets"), loaddoc('revsets')),
(['filesets', 'fileset'], _("Specifying File Sets"), loaddoc('filesets')),
(['diffs'], _('Diff Formats'), loaddoc('diffs')),
(['merge-tools', 'mergetools'], _('Merge Tools'), loaddoc('merge-tools')),
(['templating', 'templates', 'template', 'style'], _('Template Usage'),
loaddoc('templates')),
(['urls'], _('URL Paths'), loaddoc('urls')),
(["extensions"], _("Using Additional Features"), extshelp),
(["subrepos", "subrepo"], _("Subrepositories"), loaddoc('subrepos')),
(["hgweb"], _("Configuring hgweb"), loaddoc('hgweb')),
(["glossary"], _("Glossary"), loaddoc('glossary')),
(["hgignore", "ignore"], _("Syntax for Mercurial Ignore Files"),
loaddoc('hgignore')),
(["phases"], _("Working with Phases"), loaddoc('phases')),
])
# Map topics to lists of callable taking the current topic help and
# returning the updated version
helphooks = {}
def addtopichook(topic, rewriter):
helphooks.setdefault(topic, []).append(rewriter)
def makeitemsdoc(topic, doc, marker, items):
"""Extract docstring from the items key to function mapping, build a
.single documentation block and use it to overwrite the marker in doc
"""
entries = []
for name in sorted(items):
text = (items[name].__doc__ or '').rstrip()
if not text:
continue
text = gettext(text)
lines = text.splitlines()
doclines = [(lines[0])]
for l in lines[1:]:
# Stop once we find some Python doctest
if l.strip().startswith('>>>'):
break
doclines.append(' ' + l.strip())
entries.append('\n'.join(doclines))
entries = '\n\n'.join(entries)
return doc.replace(marker, entries)
def addtopicsymbols(topic, marker, symbols):
def add(topic, doc):
return makeitemsdoc(topic, doc, marker, symbols)
addtopichook(topic, add)
addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols)
addtopicsymbols('merge-tools', '.. internaltoolsmarker', filemerge.internals)
addtopicsymbols('revsets', '.. predicatesmarker', revset.symbols)
addtopicsymbols('templates', '.. keywordsmarker', templatekw.dockeywords)
addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters)
def help_(ui, name, unknowncmd=False, full=True, **opts):
'''
Generate the help for 'name' as unformatted restructured text. If
'name' is None, describe the commands available.
'''
import commands # avoid cycle
def helpcmd(name):
try:
aliases, entry = cmdutil.findcmd(name, commands.table,
strict=unknowncmd)
except error.AmbiguousCommand, inst:
# py3k fix: except vars can't be used outside the scope of the
# except block, nor can be used inside a lambda. python issue4617
prefix = inst.args[0]
select = lambda c: c.lstrip('^').startswith(prefix)
rst = helplist(select)
return rst
rst = []
# check if it's an invalid alias and display its error if it is
if getattr(entry[0], 'badalias', False):
if not unknowncmd:
ui.pushbuffer()
entry[0](ui)
rst.append(ui.popbuffer())
return rst
# synopsis
if len(entry) > 2:
if entry[2].startswith('hg'):
rst.append("%s\n" % entry[2])
else:
rst.append('hg %s %s\n' % (aliases[0], entry[2]))
else:
rst.append('hg %s\n' % aliases[0])
# aliases
if full and not ui.quiet and len(aliases) > 1:
rst.append(_("\naliases: %s\n") % ', '.join(aliases[1:]))
rst.append('\n')
# description
doc = gettext(entry[0].__doc__)
if not doc:
doc = _("(no help text available)")
if util.safehasattr(entry[0], 'definition'): # aliased command
if entry[0].definition.startswith('!'): # shell alias
doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
else:
doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
doc = doc.splitlines(True)
if ui.quiet or not full:
rst.append(doc[0])
else:
rst.extend(doc)
rst.append('\n')
# check if this command shadows a non-trivial (multi-line)
# extension help text
try:
mod = extensions.find(name)
doc = gettext(mod.__doc__) or ''
if '\n' in doc.strip():
msg = _('use "hg help -e %s" to show help for '
'the %s extension') % (name, name)
rst.append('\n%s\n' % msg)
except KeyError:
pass
# options
if not ui.quiet and entry[1]:
rst.append('\n%s\n\n' % _("options:"))
rst.append(optrst(entry[1], ui.verbose))
if ui.verbose:
rst.append('\n%s\n\n' % _("global options:"))
rst.append(optrst(commands.globalopts, ui.verbose))
if not ui.verbose:
if not full:
rst.append(_('\nuse "hg help %s" to show the full help text\n')
% name)
elif not ui.quiet:
omitted = _('use "hg -v help %s" to show more complete'
' help and the global options') % name
notomitted = _('use "hg -v help %s" to show'
' the global options') % name
indicateomitted(rst, omitted, notomitted)
return rst
def helplist(select=None):
# list of commands
if name == "shortlist":
header = _('basic commands:\n\n')
else:
header = _('list of commands:\n\n')
h = {}
cmds = {}
for c, e in commands.table.iteritems():
f = c.split("|", 1)[0]
if select and not select(f):
continue
if (not select and name != 'shortlist' and
e[0].__module__ != commands.__name__):
continue
if name == "shortlist" and not f.startswith("^"):
continue
f = f.lstrip("^")
if not ui.debugflag and f.startswith("debug"):
continue
doc = e[0].__doc__
if doc and 'DEPRECATED' in doc and not ui.verbose:
continue
doc = gettext(doc)
if not doc:
doc = _("(no help text available)")
h[f] = doc.splitlines()[0].rstrip()
cmds[f] = c.lstrip("^")
rst = []
if not h:
if not ui.quiet:
rst.append(_('no commands defined\n'))
return rst
if not ui.quiet:
rst.append(header)
fns = sorted(h)
for f in fns:
if ui.verbose:
commacmds = cmds[f].replace("|",", ")
rst.append(" :%s: %s\n" % (commacmds, h[f]))
else:
rst.append(' :%s: %s\n' % (f, h[f]))
if not name:
exts = listexts(_('enabled extensions:'), extensions.enabled())
if exts:
rst.append('\n')
rst.extend(exts)
rst.append(_("\nadditional help topics:\n\n"))
topics = []
for names, header, doc in helptable:
topics.append((names[0], header))
for t, desc in topics:
rst.append(" :%s: %s\n" % (t, desc))
optlist = []
if not ui.quiet:
if ui.verbose:
optlist.append((_("global options:"), commands.globalopts))
if name == 'shortlist':
optlist.append((_('use "hg help" for the full list '
'of commands'), ()))
else:
if name == 'shortlist':
msg = _('use "hg help" for the full list of commands '
'or "hg -v" for details')
elif name and not full:
msg = _('use "hg help %s" to show the full help '
'text') % name
else:
msg = _('use "hg -v help%s" to show builtin aliases and '
'global options') % (name and " " + name or "")
optlist.append((msg, ()))
if optlist:
for title, options in optlist:
rst.append('\n%s\n' % title)
if options:
rst.append('\n%s\n' % optrst(options, ui.verbose))
return rst
def helptopic(name):
for names, header, doc in helptable:
if name in names:
break
else:
raise error.UnknownCommand(name)
rst = [minirst.section(header)]
# description
if not doc:
rst.append(" %s\n" % _("(no help text available)"))
if util.safehasattr(doc, '__call__'):
rst += [" %s\n" % l for l in doc().splitlines()]
if not ui.verbose:
omitted = (_('use "hg help -v %s" to show more complete help') %
name)
indicateomitted(rst, omitted)
try:
cmdutil.findcmd(name, commands.table)
rst.append(_('\nuse "hg help -c %s" to see help for '
'the %s command\n') % (name, name))
except error.UnknownCommand:
pass
return rst
def helpext(name):
try:
mod = extensions.find(name)
doc = gettext(mod.__doc__) or _('no help text available')
except KeyError:
mod = None
doc = extensions.disabledext(name)
if not doc:
raise error.UnknownCommand(name)
if '\n' not in doc:
head, tail = doc, ""
else:
head, tail = doc.split('\n', 1)
rst = [_('%s extension - %s\n\n') % (name.split('.')[-1], head)]
if tail:
rst.extend(tail.splitlines(True))
rst.append('\n')
if not ui.verbose:
omitted = (_('use "hg help -v %s" to show more complete help') %
name)
indicateomitted(rst, omitted)
if mod:
try:
ct = mod.cmdtable
except AttributeError:
ct = {}
modcmds = set([c.split('|', 1)[0] for c in ct])
rst.extend(helplist(modcmds.__contains__))
else:
rst.append(_('use "hg help extensions" for information on enabling '
'extensions\n'))
return rst
def helpextcmd(name):
cmd, ext, mod = extensions.disabledcmd(ui, name,
ui.configbool('ui', 'strict'))
doc = gettext(mod.__doc__).splitlines()[0]
rst = listexts(_("'%s' is provided by the following "
"extension:") % cmd, {ext: doc}, indent=4)
rst.append('\n')
rst.append(_('use "hg help extensions" for information on enabling '
'extensions\n'))
return rst
rst = []
kw = opts.get('keyword')
if kw:
matches = topicmatch(kw)
for t, title in (('topics', _('Topics')),
('commands', _('Commands')),
('extensions', _('Extensions')),
('extensioncommands', _('Extension Commands'))):
if matches[t]:
rst.append('%s:\n\n' % title)
rst.extend(minirst.maketable(sorted(matches[t]), 1))
rst.append('\n')
elif name and name != 'shortlist':
i = None
if unknowncmd:
queries = (helpextcmd,)
elif opts.get('extension'):
queries = (helpext,)
elif opts.get('command'):
queries = (helpcmd,)
else:
queries = (helptopic, helpcmd, helpext, helpextcmd)
for f in queries:
try:
rst = f(name)
i = None
break
except error.UnknownCommand, inst:
i = inst
if i:
raise i
else:
# program name
if not ui.quiet:
rst = [_("Mercurial Distributed SCM\n"), '\n']
rst.extend(helplist())
return ''.join(rst)
| apache-2.0 |
joshrule/LOTlib | LOTlib/Examples/Number/Adapt.py | 2 | 1558 | # -*- coding: utf-8 -*-
"""
Use optimal adaptation code to adapt show possible adpatations to the Number grammar.
"""
import pickle
import LOTlib
from LOTlib.Examples.Number.Model import *
from LOTlib.Miscellaneous import Infinity
from LOTlib.sandbox.OptimalGrammarAdaptation import print_subtree_adaptations
## WHAT VALUE SHOULD THESE BE??
N_SUBTREES_PER_NODE = 1
SUBTREE_P = 1.0
if __name__ == "__main__":
## Set up how much data we want
datas = map(make_data, xrange(0, 400, 10))
print "# Generated data!"
#hypotheses = set([ NumberExpression(G) for i in xrange(10)])
hypotheses = pickle.load(open("../out/2014Feb10_small.pkl", 'r')).get_all()
print "# Loaded hypotheses"
# Clean out ones with 0 probability, or else KL computation in print_subtree_adaptations goes to hell
hypotheses = filter(lambda h: sum(h.compute_posterior(datas[0])) > -Infinity, hypotheses)
## And evaluate each hypothesis on it
posteriors = map( lambda d: [ sum(h.compute_posterior(d)) for h in hypotheses], datas)
print "# Rescored hypotheses!"
## Generate a set of subtrees
subtrees = set()
for h in LOTlib.break_ctrlc(hypotheses):
for x in h.value: # for each subtree
for i in xrange(N_SUBTREES_PER_NODE): #take subtree_multiplier random partial subtrees
subtrees.add( x.random_partial_subtree(p=SUBTREE_P) )
print "# Generated", len(subtrees), "subtrees"
## And call from OptimalGrammarAdaptation
print_subtree_adaptations(hypotheses, posteriors, subtrees)
| gpl-3.0 |
timlinux/QGIS | python/pyplugin_installer/unzip.py | 53 | 2524 | # -*- coding:utf-8 -*-
"""
/***************************************************************************
Plugin Installer module
unzip function
-------------------
Date : May 2013
Copyright : (C) 2013 by Borys Jurgiel
Email : info at borysjurgiel dot pl
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import zipfile
import os
def unzip(file, targetDir, password=None):
""" Creates directory structure and extracts the zip contents to it.
file (file object) - the zip file to extract
targetDir (str) - target location
password (str; optional) - password to decrypt the zip file (if encrypted)
"""
# convert password to bytes
if isinstance(password, str):
password = bytes(password, 'utf8')
# create destination directory if doesn't exist
if not targetDir.endswith(':') and not os.path.exists(targetDir):
os.makedirs(targetDir)
zf = zipfile.ZipFile(file)
for name in zf.namelist():
# Skip directories - they will be created when necessary by os.makedirs
if name.endswith('/'):
continue
# Read the source file before creating any output,
# so no directories are created if user doesn't know the password
memberContent = zf.read(name, password)
# create directory if doesn't exist
localDir = os.path.split(name)[0]
fullDir = os.path.normpath(os.path.join(targetDir, localDir))
if not os.path.exists(fullDir):
os.makedirs(fullDir)
# extract file
fullPath = os.path.normpath(os.path.join(targetDir, name))
outfile = open(fullPath, 'wb')
outfile.write(memberContent)
outfile.flush()
outfile.close()
zf.close()
| gpl-2.0 |
adngdb/socorro | minidump-stackwalk/jsoncpp-src-0.5.0/test/rununittests.py | 249 | 2507 | import sys
import os
import os.path
import subprocess
from glob import glob
import optparse
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
class TestProxy(object):
def __init__( self, test_exe_path, use_valgrind=False ):
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
self.use_valgrind = use_valgrind
def run( self, options ):
if self.use_valgrind:
cmd = VALGRIND_CMD.split()
else:
cmd = []
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
stdout = process.communicate()[0]
if process.returncode:
return False, stdout
return True, stdout
def runAllTests( exe_path, use_valgrind=False ):
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
status, test_names = test_proxy.run( ['--list-tests'] )
if not status:
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
return 1
test_names = [name.strip() for name in test_names.strip().split('\n')]
failures = []
for name in test_names:
print 'TESTING %s:' % name,
succeed, result = test_proxy.run( ['--test', name] )
if succeed:
print 'OK'
else:
failures.append( (name, result) )
print 'FAILED'
failed_count = len(failures)
pass_count = len(test_names) - failed_count
if failed_count:
print
for name, result in failures:
print result
print '%d/%d tests passed (%d failure(s))' % (
pass_count, len(test_names), failed_count)
return 1
else:
print 'All %d tests passed' % len(test_names)
return 0
def main():
from optparse import OptionParser
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
parser.add_option("--valgrind",
action="store_true", dest="valgrind", default=False,
help="run all the tests using valgrind to detect memory leaks")
parser.enable_interspersed_args()
options, args = parser.parse_args()
if len(args) != 1:
parser.error( 'Must provides at least path to test_lib_json executable.' )
sys.exit( 1 )
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
sys.exit( exit_code )
if __name__ == '__main__':
main()
| mpl-2.0 |
appsecyogi/Mobile-Security-Framework-MobSF | install/windows/rpc_client.py | 4 | 5192 | """MobSF rpc_client for static windows app analysis."""
# pylint: disable=C0325,W0603,C0103
import os
from os.path import expanduser
import re
import subprocess
import configparser # pylint: disable-msg=E0401
import hashlib
import random
import string
import base64
from xmlrpc.server import SimpleXMLRPCServer # pylint: disable-msg=E0401
import rsa
config = None
challenge = None
pub_key = None
def _init_key():
global pub_key
pub_key = rsa.PublicKey.load_pkcs1(
open(config['MobSF']['pub_key']).read()
)
def _check_challenge(signature):
signature = base64.b64decode(signature)
try:
rsa.verify(challenge.encode('utf-8'), signature, pub_key)
print("[*] Challenge successfully verified.")
_revoke_challenge()
except rsa.pkcs1.VerificationError:
print("[!] Received wrong signature for challenge.")
raise Exception("Access Denied.")
except (TypeError, AttributeError):
print("[!] Challenge already unset.")
raise Exception("Access Denied.")
def _revoke_challenge():
"""Revoke the challenge (to prevent replay attacks)"""
global challenge
challenge = None
def get_challenge():
"""Return an ascii challenge to validate authentication in _check_challenge."""
global challenge
# Not using os.urandom for Python 2/3 transfer errors
challenge = ''.join(
random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(256)
)
return "{}".format(challenge)
def test_challenge(signature):
"""Test function to check if rsa is working."""
_check_challenge(signature)
print("Check complete")
return "OK!"
def upload_file(sample_file, signature):
"""Upload a file."""
# Check challenge
_check_challenge(signature)
# Get md5
md5 = hashlib.md5()
md5.update(sample_file.data)
# Save the file to disk
with open(
os.path.join(
config['MobSF']['samples'],
md5.hexdigest()
),
"wb"
) as handle:
handle.write(sample_file.data)
# Return md5 as reference to the sample
return md5.hexdigest()
def binskim(sample, signature):
"""Perform an static analysis on the sample and return the json"""
# Check challenge
_check_challenge(signature)
# Check if param is a md5 to prevent attacks (we only use lower-case)
if len(re.findall(r"([a-f\d]{32})", sample)) == 0:
return "Wrong Input!"
# Set params for execution of binskim
binskim_path = config['binskim']['file_x64']
command = "analyze"
path = config['MobSF']['samples'] + sample
output_p = "-o"
output_d = config['MobSF']['samples'] + sample + "_binskim"
# verbose = "-v"
policy_p = "--config"
policy_d = "default" # TODO(Other policies?)
# Assemble
params = [
binskim_path,
command,
path,
output_p, output_d,
# verbose,
policy_p, policy_d
]
# Execute process
pipe = subprocess.Popen(subprocess.list2cmdline(params))
pipe.wait() # Wait for the process to finish..
# Open the file and return the json
out_file = open(output_d)
return out_file.read()
def binscope(sample, signature):
"""Run binscope against an sample file."""
# Check challenge
_check_challenge(signature)
# Set params for execution of binskim
binscope_path = [config['binscope']['file']]
target = [config['MobSF']['samples'] + sample]
out_type = ["/Red", "/v"]
output = ["/l", target[0] + "_binscope"]
checks = [
'/Checks', 'ATLVersionCheck',
'/Checks', 'ATLVulnCheck',
'/Checks', 'AppContainerCheck',
'/Checks', 'CompilerVersionCheck',
'/Checks', 'DBCheck',
'/Checks', 'DefaultGSCookieCheck',
'/Checks', 'ExecutableImportsCheck',
'/Checks', 'FunctionPointersCheck',
'/Checks', 'GSCheck',
'/Checks', 'GSFriendlyInitCheck',
'/Checks', 'GSFunctionSafeBuffersCheck',
'/Checks', 'HighEntropyVACheck',
'/Checks', 'NXCheck',
'/Checks', 'RSA32Check',
'/Checks', 'SafeSEHCheck',
'/Checks', 'SharedSectionCheck',
'/Checks', 'VB6Check',
'/Checks', 'WXCheck',
]
# Assemble
params = (
binscope_path +
target +
out_type +
output +
checks
)
# Execute process
p = subprocess.Popen(subprocess.list2cmdline(params))
p.wait() # Wait for the process to finish..
# Open the file and return the json
f = open(output[1])
return f.read()
if __name__ == '__main__':
# Init configparser
config = configparser.ConfigParser()
config.read(expanduser("~") + "\\MobSF\\Config\\config.txt")
_init_key()
server = SimpleXMLRPCServer(("0.0.0.0", 8000))
print("Listening on port 8000...")
server.register_function(get_challenge, "get_challenge")
server.register_function(test_challenge, "test_challenge")
server.register_function(upload_file, "upload_file")
server.register_function(binskim, "binskim")
server.register_function(binscope, "binscope")
server.serve_forever()
| gpl-3.0 |
manmeetsaini/etools | etools-module/backup/find_callid.py | 1 | 1271 | # Python Regex module to find Call ID in SIP Trace
#function will search first for SIP event "INVITE sip:" and Start Loggin Information
#Logging will be continued till string "Content-Length:" which indicates end of SIP Message
#A sub-function will search for sting "CallID:" and print that entire line
import re
import string
def getCallID():
'''CallID:" '''
samplefile="c:\\Python3\\sample.log"
startstring = "INVITE sip:"
endstring = "Content-Length:"
string = "CallID:"
with open(samplefile , "rb") as infile:
for result in re.findall(b"INVITE sip:(.*?)Content-Length:", infile.read(), re.S): #This will find INVITE transaction
callid = re.findall(b"Call-ID:(.*?)\n", result, re.DOTALL) #This will find CallID within that INVITE Transaction
for i in callid: #Need to iterate list as findall creates a list.
print (i.decode("utf-8")) #This will convert Binary string to regular string
infile.close()
with open('Callid.log', 'wb') as f:
f.write(result)
f.close()
getCallID()
| gpl-3.0 |
robgolding63/tasklib | setup.py | 2 | 1199 | from setuptools import setup, find_packages
install_requirements = ['pytz', 'tzlocal']
version = '2.3.0'
try:
import importlib
except ImportError:
install_requirements.append('importlib')
setup(
name='tasklib',
version=version,
description='Python Task Warrior library',
long_description=open('README.rst').read(),
author='Rob Golding',
author_email='rob@robgolding.com',
license='BSD',
url='https://github.com/robgolding/tasklib',
download_url='https://github.com/robgolding/tasklib/downloads',
packages=find_packages(),
include_package_data=True,
test_suite='tasklib.tests',
install_requires=install_requirements,
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
'License :: OSI Approved :: BSD License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Intended Audience :: Developers',
],
)
| bsd-3-clause |
inessadl/kinect-2-libras | Kinect2Libras/KinectFingerTracking/Lib/htmllib.py | 312 | 12869 | """HTML 2.0 parser.
See the HTML 2.0 specification:
http://www.w3.org/hypertext/WWW/MarkUp/html-spec/html-spec_toc.html
"""
from warnings import warnpy3k
warnpy3k("the htmllib module has been removed in Python 3.0",
stacklevel=2)
del warnpy3k
import sgmllib
from formatter import AS_IS
__all__ = ["HTMLParser", "HTMLParseError"]
class HTMLParseError(sgmllib.SGMLParseError):
"""Error raised when an HTML document can't be parsed."""
class HTMLParser(sgmllib.SGMLParser):
"""This is the basic HTML parser class.
It supports all entity names required by the XHTML 1.0 Recommendation.
It also defines handlers for all HTML 2.0 and many HTML 3.0 and 3.2
elements.
"""
from htmlentitydefs import entitydefs
def __init__(self, formatter, verbose=0):
"""Creates an instance of the HTMLParser class.
The formatter parameter is the formatter instance associated with
the parser.
"""
sgmllib.SGMLParser.__init__(self, verbose)
self.formatter = formatter
def error(self, message):
raise HTMLParseError(message)
def reset(self):
sgmllib.SGMLParser.reset(self)
self.savedata = None
self.isindex = 0
self.title = None
self.base = None
self.anchor = None
self.anchorlist = []
self.nofill = 0
self.list_stack = []
# ------ Methods used internally; some may be overridden
# --- Formatter interface, taking care of 'savedata' mode;
# shouldn't need to be overridden
def handle_data(self, data):
if self.savedata is not None:
self.savedata = self.savedata + data
else:
if self.nofill:
self.formatter.add_literal_data(data)
else:
self.formatter.add_flowing_data(data)
# --- Hooks to save data; shouldn't need to be overridden
def save_bgn(self):
"""Begins saving character data in a buffer instead of sending it
to the formatter object.
Retrieve the stored data via the save_end() method. Use of the
save_bgn() / save_end() pair may not be nested.
"""
self.savedata = ''
def save_end(self):
"""Ends buffering character data and returns all data saved since
the preceding call to the save_bgn() method.
If the nofill flag is false, whitespace is collapsed to single
spaces. A call to this method without a preceding call to the
save_bgn() method will raise a TypeError exception.
"""
data = self.savedata
self.savedata = None
if not self.nofill:
data = ' '.join(data.split())
return data
# --- Hooks for anchors; should probably be overridden
def anchor_bgn(self, href, name, type):
"""This method is called at the start of an anchor region.
The arguments correspond to the attributes of the <A> tag with
the same names. The default implementation maintains a list of
hyperlinks (defined by the HREF attribute for <A> tags) within
the document. The list of hyperlinks is available as the data
attribute anchorlist.
"""
self.anchor = href
if self.anchor:
self.anchorlist.append(href)
def anchor_end(self):
"""This method is called at the end of an anchor region.
The default implementation adds a textual footnote marker using an
index into the list of hyperlinks created by the anchor_bgn()method.
"""
if self.anchor:
self.handle_data("[%d]" % len(self.anchorlist))
self.anchor = None
# --- Hook for images; should probably be overridden
def handle_image(self, src, alt, *args):
"""This method is called to handle images.
The default implementation simply passes the alt value to the
handle_data() method.
"""
self.handle_data(alt)
# --------- Top level elememts
def start_html(self, attrs): pass
def end_html(self): pass
def start_head(self, attrs): pass
def end_head(self): pass
def start_body(self, attrs): pass
def end_body(self): pass
# ------ Head elements
def start_title(self, attrs):
self.save_bgn()
def end_title(self):
self.title = self.save_end()
def do_base(self, attrs):
for a, v in attrs:
if a == 'href':
self.base = v
def do_isindex(self, attrs):
self.isindex = 1
def do_link(self, attrs):
pass
def do_meta(self, attrs):
pass
def do_nextid(self, attrs): # Deprecated
pass
# ------ Body elements
# --- Headings
def start_h1(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_font(('h1', 0, 1, 0))
def end_h1(self):
self.formatter.end_paragraph(1)
self.formatter.pop_font()
def start_h2(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_font(('h2', 0, 1, 0))
def end_h2(self):
self.formatter.end_paragraph(1)
self.formatter.pop_font()
def start_h3(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_font(('h3', 0, 1, 0))
def end_h3(self):
self.formatter.end_paragraph(1)
self.formatter.pop_font()
def start_h4(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_font(('h4', 0, 1, 0))
def end_h4(self):
self.formatter.end_paragraph(1)
self.formatter.pop_font()
def start_h5(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_font(('h5', 0, 1, 0))
def end_h5(self):
self.formatter.end_paragraph(1)
self.formatter.pop_font()
def start_h6(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_font(('h6', 0, 1, 0))
def end_h6(self):
self.formatter.end_paragraph(1)
self.formatter.pop_font()
# --- Block Structuring Elements
def do_p(self, attrs):
self.formatter.end_paragraph(1)
def start_pre(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_font((AS_IS, AS_IS, AS_IS, 1))
self.nofill = self.nofill + 1
def end_pre(self):
self.formatter.end_paragraph(1)
self.formatter.pop_font()
self.nofill = max(0, self.nofill - 1)
def start_xmp(self, attrs):
self.start_pre(attrs)
self.setliteral('xmp') # Tell SGML parser
def end_xmp(self):
self.end_pre()
def start_listing(self, attrs):
self.start_pre(attrs)
self.setliteral('listing') # Tell SGML parser
def end_listing(self):
self.end_pre()
def start_address(self, attrs):
self.formatter.end_paragraph(0)
self.formatter.push_font((AS_IS, 1, AS_IS, AS_IS))
def end_address(self):
self.formatter.end_paragraph(0)
self.formatter.pop_font()
def start_blockquote(self, attrs):
self.formatter.end_paragraph(1)
self.formatter.push_margin('blockquote')
def end_blockquote(self):
self.formatter.end_paragraph(1)
self.formatter.pop_margin()
# --- List Elements
def start_ul(self, attrs):
self.formatter.end_paragraph(not self.list_stack)
self.formatter.push_margin('ul')
self.list_stack.append(['ul', '*', 0])
def end_ul(self):
if self.list_stack: del self.list_stack[-1]
self.formatter.end_paragraph(not self.list_stack)
self.formatter.pop_margin()
def do_li(self, attrs):
self.formatter.end_paragraph(0)
if self.list_stack:
[dummy, label, counter] = top = self.list_stack[-1]
top[2] = counter = counter+1
else:
label, counter = '*', 0
self.formatter.add_label_data(label, counter)
def start_ol(self, attrs):
self.formatter.end_paragraph(not self.list_stack)
self.formatter.push_margin('ol')
label = '1.'
for a, v in attrs:
if a == 'type':
if len(v) == 1: v = v + '.'
label = v
self.list_stack.append(['ol', label, 0])
def end_ol(self):
if self.list_stack: del self.list_stack[-1]
self.formatter.end_paragraph(not self.list_stack)
self.formatter.pop_margin()
def start_menu(self, attrs):
self.start_ul(attrs)
def end_menu(self):
self.end_ul()
def start_dir(self, attrs):
self.start_ul(attrs)
def end_dir(self):
self.end_ul()
def start_dl(self, attrs):
self.formatter.end_paragraph(1)
self.list_stack.append(['dl', '', 0])
def end_dl(self):
self.ddpop(1)
if self.list_stack: del self.list_stack[-1]
def do_dt(self, attrs):
self.ddpop()
def do_dd(self, attrs):
self.ddpop()
self.formatter.push_margin('dd')
self.list_stack.append(['dd', '', 0])
def ddpop(self, bl=0):
self.formatter.end_paragraph(bl)
if self.list_stack:
if self.list_stack[-1][0] == 'dd':
del self.list_stack[-1]
self.formatter.pop_margin()
# --- Phrase Markup
# Idiomatic Elements
def start_cite(self, attrs): self.start_i(attrs)
def end_cite(self): self.end_i()
def start_code(self, attrs): self.start_tt(attrs)
def end_code(self): self.end_tt()
def start_em(self, attrs): self.start_i(attrs)
def end_em(self): self.end_i()
def start_kbd(self, attrs): self.start_tt(attrs)
def end_kbd(self): self.end_tt()
def start_samp(self, attrs): self.start_tt(attrs)
def end_samp(self): self.end_tt()
def start_strong(self, attrs): self.start_b(attrs)
def end_strong(self): self.end_b()
def start_var(self, attrs): self.start_i(attrs)
def end_var(self): self.end_i()
# Typographic Elements
def start_i(self, attrs):
self.formatter.push_font((AS_IS, 1, AS_IS, AS_IS))
def end_i(self):
self.formatter.pop_font()
def start_b(self, attrs):
self.formatter.push_font((AS_IS, AS_IS, 1, AS_IS))
def end_b(self):
self.formatter.pop_font()
def start_tt(self, attrs):
self.formatter.push_font((AS_IS, AS_IS, AS_IS, 1))
def end_tt(self):
self.formatter.pop_font()
def start_a(self, attrs):
href = ''
name = ''
type = ''
for attrname, value in attrs:
value = value.strip()
if attrname == 'href':
href = value
if attrname == 'name':
name = value
if attrname == 'type':
type = value.lower()
self.anchor_bgn(href, name, type)
def end_a(self):
self.anchor_end()
# --- Line Break
def do_br(self, attrs):
self.formatter.add_line_break()
# --- Horizontal Rule
def do_hr(self, attrs):
self.formatter.add_hor_rule()
# --- Image
def do_img(self, attrs):
align = ''
alt = '(image)'
ismap = ''
src = ''
width = 0
height = 0
for attrname, value in attrs:
if attrname == 'align':
align = value
if attrname == 'alt':
alt = value
if attrname == 'ismap':
ismap = value
if attrname == 'src':
src = value
if attrname == 'width':
try: width = int(value)
except ValueError: pass
if attrname == 'height':
try: height = int(value)
except ValueError: pass
self.handle_image(src, alt, ismap, align, width, height)
# --- Really Old Unofficial Deprecated Stuff
def do_plaintext(self, attrs):
self.start_pre(attrs)
self.setnomoretags() # Tell SGML parser
# --- Unhandled tags
def unknown_starttag(self, tag, attrs):
pass
def unknown_endtag(self, tag):
pass
def test(args = None):
import sys, formatter
if not args:
args = sys.argv[1:]
silent = args and args[0] == '-s'
if silent:
del args[0]
if args:
file = args[0]
else:
file = 'test.html'
if file == '-':
f = sys.stdin
else:
try:
f = open(file, 'r')
except IOError, msg:
print file, ":", msg
sys.exit(1)
data = f.read()
if f is not sys.stdin:
f.close()
if silent:
f = formatter.NullFormatter()
else:
f = formatter.AbstractFormatter(formatter.DumbWriter())
p = HTMLParser(f)
p.feed(data)
p.close()
if __name__ == '__main__':
test()
| apache-2.0 |
djmonta/iTerm2 | tests/esctest/tests/decset_tite_inhibit.py | 31 | 2003 | from tests.save_restore_cursor import SaveRestoreCursorTests
import esccmd
from escutil import knownBug
class DECSETTiteInhibitTests(SaveRestoreCursorTests):
def __init__(self):
SaveRestoreCursorTests.__init__(self)
def saveCursor(self):
esccmd.DECSET(esccmd.SaveRestoreCursor)
def restoreCursor(self):
esccmd.DECRESET(esccmd.SaveRestoreCursor)
@knownBug(terminal="iTerm2", reason="Not implemented")
def test_SaveRestoreCursor_Basic(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_Basic(self)
@knownBug(terminal="iTerm2", reason="Not implemented")
def test_SaveRestoreCursor_MoveToHomeWhenNotSaved(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_MoveToHomeWhenNotSaved(self)
@knownBug(terminal="iTerm2", reason="Not implemented")
def test_SaveRestoreCursor_ResetsOriginMode(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_ResetsOriginMode(self)
@knownBug(terminal="iTerm2", reason="Not implemented")
def test_SaveRestoreCursor_WorksInLRM(self, shouldWork=True):
SaveRestoreCursorTests.test_SaveRestoreCursor_WorksInLRM(self)
@knownBug(terminal="iTerm2", reason="Not implemented")
def test_SaveRestoreCursor_AltVsMain(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_AltVsMain(self)
@knownBug(terminal="iTerm2", reason="Not implemented")
def test_SaveRestoreCursor_Protection(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_Protection(self)
@knownBug(terminal="iTerm2", reason="Not implemented")
def test_SaveRestoreCursor_Wrap(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_Wrap(self)
@knownBug(terminal="iTerm2", reason="Not implemented", noop=True)
def test_SaveRestoreCursor_ReverseWrapNotAffected(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_ReverseWrapNotAffected(self)
@knownBug(terminal="iTerm2", reason="Not implemented", noop=True)
def test_SaveRestoreCursor_InsertNotAffected(self):
SaveRestoreCursorTests.test_SaveRestoreCursor_InsertNotAffected(self)
| gpl-2.0 |
jamiefolsom/edx-platform | lms/djangoapps/class_dashboard/tests/test_dashboard_data.py | 88 | 13672 | """
Tests for class dashboard (Metrics tab in instructor dashboard)
"""
import json
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from mock import patch
from nose.plugins.attrib import attr
from capa.tests.response_xml_factory import StringResponseXMLFactory
from courseware.tests.factories import StudentModuleFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory, AdminFactory
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from class_dashboard.dashboard_data import (
get_problem_grade_distribution, get_sequential_open_distrib,
get_problem_set_grade_distrib, get_d3_problem_grade_distrib,
get_d3_sequential_open_distrib, get_d3_section_grade_distrib,
get_section_display_name, get_array_section_has_problem,
get_students_opened_subsection, get_students_problem_grades,
)
from class_dashboard.views import has_instructor_access_for_class
USER_COUNT = 11
@attr('shard_1')
class TestGetProblemGradeDistribution(SharedModuleStoreTestCase):
"""
Tests related to class_dashboard/dashboard_data.py
"""
@classmethod
def setUpClass(cls):
super(TestGetProblemGradeDistribution, cls).setUpClass()
cls.course = CourseFactory.create(
display_name=u"test course omega \u03a9",
)
with cls.store.bulk_operations(cls.course.id, emit_signals=False):
section = ItemFactory.create(
parent_location=cls.course.location,
category="chapter",
display_name=u"test factory section omega \u03a9",
)
cls.sub_section = ItemFactory.create(
parent_location=section.location,
category="sequential",
display_name=u"test subsection omega \u03a9",
)
cls.unit = ItemFactory.create(
parent_location=cls.sub_section.location,
category="vertical",
metadata={'graded': True, 'format': 'Homework'},
display_name=u"test unit omega \u03a9",
)
cls.items = []
for i in xrange(USER_COUNT - 1):
item = ItemFactory.create(
parent_location=cls.unit.location,
category="problem",
data=StringResponseXMLFactory().build_xml(answer='foo'),
metadata={'rerandomize': 'always'},
display_name=u"test problem omega \u03a9 " + str(i)
)
cls.items.append(item)
cls.item = item
def setUp(self):
super(TestGetProblemGradeDistribution, self).setUp()
self.request_factory = RequestFactory()
self.instructor = AdminFactory.create()
self.client.login(username=self.instructor.username, password='test')
self.attempts = 3
self.users = [
UserFactory.create(username="metric" + str(__))
for __ in xrange(USER_COUNT)
]
for user in self.users:
CourseEnrollmentFactory.create(user=user, course_id=self.course.id)
for i, item in enumerate(self.items):
for j, user in enumerate(self.users):
StudentModuleFactory.create(
grade=1 if i < j else 0,
max_grade=1 if i < j else 0.5,
student=user,
course_id=self.course.id,
module_state_key=item.location,
state=json.dumps({'attempts': self.attempts}),
)
for j, user in enumerate(self.users):
StudentModuleFactory.create(
course_id=self.course.id,
module_type='sequential',
module_state_key=item.location,
)
def test_get_problem_grade_distribution(self):
prob_grade_distrib, total_student_count = get_problem_grade_distribution(self.course.id)
for problem in prob_grade_distrib:
max_grade = prob_grade_distrib[problem]['max_grade']
self.assertEquals(1, max_grade)
for val in total_student_count.values():
self.assertEquals(USER_COUNT, val)
def test_get_sequential_open_distibution(self):
sequential_open_distrib = get_sequential_open_distrib(self.course.id)
for problem in sequential_open_distrib:
num_students = sequential_open_distrib[problem]
self.assertEquals(USER_COUNT, num_students)
def test_get_problemset_grade_distrib(self):
prob_grade_distrib, __ = get_problem_grade_distribution(self.course.id)
probset_grade_distrib = get_problem_set_grade_distrib(self.course.id, prob_grade_distrib)
for problem in probset_grade_distrib:
max_grade = probset_grade_distrib[problem]['max_grade']
self.assertEquals(1, max_grade)
grade_distrib = probset_grade_distrib[problem]['grade_distrib']
sum_attempts = 0
for item in grade_distrib:
sum_attempts += item[1]
self.assertEquals(USER_COUNT, sum_attempts)
def test_get_d3_problem_grade_distrib(self):
d3_data = get_d3_problem_grade_distrib(self.course.id)
for data in d3_data:
for stack_data in data['data']:
sum_values = 0
for problem in stack_data['stackData']:
sum_values += problem['value']
self.assertEquals(USER_COUNT, sum_values)
def test_get_d3_sequential_open_distrib(self):
d3_data = get_d3_sequential_open_distrib(self.course.id)
for data in d3_data:
for stack_data in data['data']:
for problem in stack_data['stackData']:
value = problem['value']
self.assertEquals(0, value)
def test_get_d3_section_grade_distrib(self):
d3_data = get_d3_section_grade_distrib(self.course.id, 0)
for stack_data in d3_data:
sum_values = 0
for problem in stack_data['stackData']:
sum_values += problem['value']
self.assertEquals(USER_COUNT, sum_values)
def test_get_students_problem_grades(self):
attributes = '?module_id=' + self.item.location.to_deprecated_string()
request = self.request_factory.get(reverse('get_students_problem_grades') + attributes)
response = get_students_problem_grades(request)
response_content = json.loads(response.content)['results']
response_max_exceeded = json.loads(response.content)['max_exceeded']
self.assertEquals(USER_COUNT, len(response_content))
self.assertEquals(False, response_max_exceeded)
for item in response_content:
if item['grade'] == 0:
self.assertEquals(0, item['percent'])
else:
self.assertEquals(100, item['percent'])
def test_get_students_problem_grades_max(self):
with patch('class_dashboard.dashboard_data.MAX_SCREEN_LIST_LENGTH', 2):
attributes = '?module_id=' + self.item.location.to_deprecated_string()
request = self.request_factory.get(reverse('get_students_problem_grades') + attributes)
response = get_students_problem_grades(request)
response_results = json.loads(response.content)['results']
response_max_exceeded = json.loads(response.content)['max_exceeded']
# Only 2 students in the list and response_max_exceeded is True
self.assertEquals(2, len(response_results))
self.assertEquals(True, response_max_exceeded)
def test_get_students_problem_grades_csv(self):
tooltip = 'P1.2.1 Q1 - 3382 Students (100%: 1/1 questions)'
attributes = '?module_id=' + self.item.location.to_deprecated_string() + '&tooltip=' + tooltip + '&csv=true'
request = self.request_factory.get(reverse('get_students_problem_grades') + attributes)
response = get_students_problem_grades(request)
# Check header and a row for each student in csv response
self.assertContains(response, '"Name","Username","Grade","Percent"')
self.assertContains(response, '"metric0","0.0","0.0"')
self.assertContains(response, '"metric1","0.0","0.0"')
self.assertContains(response, '"metric2","0.0","0.0"')
self.assertContains(response, '"metric3","0.0","0.0"')
self.assertContains(response, '"metric4","0.0","0.0"')
self.assertContains(response, '"metric5","0.0","0.0"')
self.assertContains(response, '"metric6","0.0","0.0"')
self.assertContains(response, '"metric7","0.0","0.0"')
self.assertContains(response, '"metric8","0.0","0.0"')
self.assertContains(response, '"metric9","0.0","0.0"')
self.assertContains(response, '"metric10","1.0","100.0"')
def test_get_students_opened_subsection(self):
attributes = '?module_id=' + self.item.location.to_deprecated_string()
request = self.request_factory.get(reverse('get_students_opened_subsection') + attributes)
response = get_students_opened_subsection(request)
response_results = json.loads(response.content)['results']
response_max_exceeded = json.loads(response.content)['max_exceeded']
self.assertEquals(USER_COUNT, len(response_results))
self.assertEquals(False, response_max_exceeded)
def test_get_students_opened_subsection_max(self):
with patch('class_dashboard.dashboard_data.MAX_SCREEN_LIST_LENGTH', 2):
attributes = '?module_id=' + self.item.location.to_deprecated_string()
request = self.request_factory.get(reverse('get_students_opened_subsection') + attributes)
response = get_students_opened_subsection(request)
response_results = json.loads(response.content)['results']
response_max_exceeded = json.loads(response.content)['max_exceeded']
# Only 2 students in the list and response_max_exceeded is True
self.assertEquals(2, len(response_results))
self.assertEquals(True, response_max_exceeded)
def test_get_students_opened_subsection_csv(self):
tooltip = '4162 students opened Subsection 5: Relational Algebra Exercises'
attributes = '?module_id=' + self.item.location.to_deprecated_string() + '&tooltip=' + tooltip + '&csv=true'
request = self.request_factory.get(reverse('get_students_opened_subsection') + attributes)
response = get_students_opened_subsection(request)
self.assertContains(response, '"Name","Username"')
# Check response contains 1 line for each user +1 for the header
self.assertEquals(USER_COUNT + 1, len(response.content.splitlines()))
def test_post_metrics_data_subsections_csv(self):
url = reverse('post_metrics_data_csv')
sections = json.dumps(["Introduction"])
tooltips = json.dumps([[{"subsection_name": "Pre-Course Survey", "subsection_num": 1, "type": "subsection", "num_students": 18963}]])
course_id = self.course.id
data_type = 'subsection'
data = json.dumps({'sections': sections,
'tooltips': tooltips,
'course_id': course_id.to_deprecated_string(),
'data_type': data_type,
})
response = self.client.post(url, {'data': data})
# Check response contains 1 line for header, 1 line for Section and 1 line for Subsection
self.assertEquals(3, len(response.content.splitlines()))
def test_post_metrics_data_problems_csv(self):
url = reverse('post_metrics_data_csv')
sections = json.dumps(["Introduction"])
tooltips = json.dumps([[[
{'student_count_percent': 0,
'problem_name': 'Q1',
'grade': 0,
'percent': 0,
'label': 'P1.2.1',
'max_grade': 1,
'count_grade': 26,
'type': u'problem'},
{'student_count_percent': 99,
'problem_name': 'Q1',
'grade': 1,
'percent': 100,
'label': 'P1.2.1',
'max_grade': 1,
'count_grade': 4763,
'type': 'problem'},
]]])
course_id = self.course.id
data_type = 'problem'
data = json.dumps({'sections': sections,
'tooltips': tooltips,
'course_id': course_id.to_deprecated_string(),
'data_type': data_type,
})
response = self.client.post(url, {'data': data})
# Check response contains 1 line for header, 1 line for Sections and 2 lines for problems
self.assertEquals(4, len(response.content.splitlines()))
def test_get_section_display_name(self):
section_display_name = get_section_display_name(self.course.id)
self.assertMultiLineEqual(section_display_name[0], u"test factory section omega \u03a9")
def test_get_array_section_has_problem(self):
b_section_has_problem = get_array_section_has_problem(self.course.id)
self.assertEquals(b_section_has_problem[0], True)
def test_has_instructor_access_for_class(self):
"""
Test for instructor access
"""
ret_val = bool(has_instructor_access_for_class(self.instructor, self.course.id))
self.assertEquals(ret_val, True)
| agpl-3.0 |
photoninger/ansible | test/units/plugins/test_plugins.py | 51 | 5435 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.compat.tests import BUILTINS, unittest
from ansible.compat.tests.mock import mock_open, patch, MagicMock
from ansible.plugins.loader import MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, PluginLoader
class TestErrors(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch.object(PluginLoader, '_get_paths')
def test_print_paths(self, mock_method):
mock_method.return_value = ['/path/one', '/path/two', '/path/three']
pl = PluginLoader('foo', 'foo', '', 'test_plugins')
paths = pl.print_paths()
expected_paths = os.pathsep.join(['/path/one', '/path/two', '/path/three'])
self.assertEqual(paths, expected_paths)
def test_plugins__get_package_paths_no_package(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
self.assertEqual(pl._get_package_paths(), [])
def test_plugins__get_package_paths_with_package(self):
# the _get_package_paths() call uses __import__ to load a
# python library, and then uses the __file__ attribute of
# the result for that to get the library path, so we mock
# that here and patch the builtin to use our mocked result
foo = MagicMock()
bar = MagicMock()
bam = MagicMock()
bam.__file__ = '/path/to/my/foo/bar/bam/__init__.py'
bar.bam = bam
foo.return_value.bar = bar
pl = PluginLoader('test', 'foo.bar.bam', 'test', 'test_plugin')
with patch('{0}.__import__'.format(BUILTINS), foo):
self.assertEqual(pl._get_package_paths(), ['/path/to/my/foo/bar/bam'])
def test_plugins__get_paths(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
pl._paths = ['/path/one', '/path/two']
self.assertEqual(pl._get_paths(), ['/path/one', '/path/two'])
# NOT YET WORKING
# def fake_glob(path):
# if path == 'test/*':
# return ['test/foo', 'test/bar', 'test/bam']
# elif path == 'test/*/*'
# m._paths = None
# mock_glob = MagicMock()
# mock_glob.return_value = []
# with patch('glob.glob', mock_glob):
# pass
def assertPluginLoaderConfigBecomes(self, arg, expected):
pl = PluginLoader('test', '', arg, 'test_plugin')
self.assertEqual(pl.config, expected)
def test_plugin__init_config_list(self):
config = ['/one', '/two']
self.assertPluginLoaderConfigBecomes(config, config)
def test_plugin__init_config_str(self):
self.assertPluginLoaderConfigBecomes('test', ['test'])
def test_plugin__init_config_none(self):
self.assertPluginLoaderConfigBecomes(None, [])
def test__load_module_source_no_duplicate_names(self):
'''
This test simulates importing 2 plugins with the same name,
and validating that the import is shortcirtuited if a file with the same name
has already been imported
'''
fixture_path = os.path.join(os.path.dirname(__file__), 'loader_fixtures')
pl = PluginLoader('test', '', 'test', 'test_plugin')
one = pl._load_module_source('import_fixture', os.path.join(fixture_path, 'import_fixture.py'))
# This line wouldn't even succeed if we didn't short cirtuit on finding a duplicate name
two = pl._load_module_source('import_fixture', '/path/to/import_fixture.py')
self.assertEqual(one, two)
@patch('ansible.plugins.loader.glob')
@patch.object(PluginLoader, '_get_paths')
def test_all_no_duplicate_names(self, gp_mock, glob_mock):
'''
This test goes along with ``test__load_module_source_no_duplicate_names``
and ensures that we ignore duplicate imports on multiple paths
'''
fixture_path = os.path.join(os.path.dirname(__file__), 'loader_fixtures')
gp_mock.return_value = [
fixture_path,
'/path/to'
]
glob_mock.glob.side_effect = [
[os.path.join(fixture_path, 'import_fixture.py')],
['/path/to/import_fixture.py']
]
pl = PluginLoader('test', '', 'test', 'test_plugin')
# Aside from needing ``list()`` so we can do a len, ``PluginLoader.all`` returns a generator
# so ``list()`` actually causes ``PluginLoader.all`` to run.
plugins = list(pl.all())
self.assertEqual(len(plugins), 1)
self.assertIn(os.path.join(fixture_path, 'import_fixture.py'), pl._module_cache)
self.assertNotIn('/path/to/import_fixture.py', pl._module_cache)
| gpl-3.0 |
pcm17/tensorflow | tensorflow/contrib/rnn/python/kernel_tests/rnn_test.py | 34 | 19680 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for rnn module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from tensorflow.contrib.rnn.python.ops import core_rnn_cell_impl
from tensorflow.contrib.rnn.python.ops import rnn
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
class StackBidirectionalRNNTest(test.TestCase):
def setUp(self):
self._seed = 23489
np.random.seed(self._seed)
def _createStackBidirectionalRNN(self,
use_gpu,
use_shape,
use_sequence_length,
initial_states_fw=None,
initial_states_bw=None,
scope=None):
self.layers = [2, 3]
input_size = 5
batch_size = 2
max_length = 8
initializer = init_ops.random_uniform_initializer(
-0.01, 0.01, seed=self._seed)
sequence_length = array_ops.placeholder(
dtypes.int64) if use_sequence_length else None
self.cells_fw = [
core_rnn_cell_impl.LSTMCell(
num_units,
input_size,
initializer=initializer,
state_is_tuple=False) for num_units in self.layers
]
self.cells_bw = [
core_rnn_cell_impl.LSTMCell(
num_units,
input_size,
initializer=initializer,
state_is_tuple=False) for num_units in self.layers
]
inputs = max_length * [
array_ops.placeholder(
dtypes.float32,
shape=(batch_size, input_size) if use_shape else (None, input_size))
]
outputs, state_fw, state_bw = rnn.stack_bidirectional_rnn(
self.cells_fw,
self.cells_bw,
inputs,
initial_states_fw,
initial_states_bw,
dtype=dtypes.float32,
sequence_length=sequence_length,
scope=scope)
self.assertEqual(len(outputs), len(inputs))
for out in outputs:
self.assertAlmostEqual(
out.get_shape().as_list(),
[batch_size if use_shape else None, 2 * self.layers[-1]])
input_value = np.random.randn(batch_size, input_size)
outputs = array_ops.stack(outputs)
return input_value, inputs, outputs, state_fw, state_bw, sequence_length
def _testStackBidirectionalRNN(self, use_gpu, use_shape):
with self.test_session(use_gpu=use_gpu, graph=ops.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalRNN(use_gpu, use_shape, True))
variables.global_variables_initializer().run()
# Run with pre-specified sequence lengths of 2, 3.
out, s_fw, s_bw = sess.run(
[outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward states of the first layer
# must be the same.
# For the next layers, since the input is a concat of forward and backward
# outputs of the previous layers the symmetry is broken and the following
# states and outputs differ.
# We cannot access the intermediate values between layers but we can
# check that the forward and backward states of the first layer match.
self.assertAllClose(s_fw[0], s_bw[0])
# If outputs are not concat between layers the output of the forward
# and backward would be the same but symmetric.
# Check that it is not the case.
# Due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
# First sequence in batch is length=2
# Check that the time=0 forward output is not equal to time=1 backward.
self.assertNotEqual(out[0][0][0], out[1][0][3])
self.assertNotEqual(out[0][0][1], out[1][0][4])
self.assertNotEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is not equal to time=0 backward.
self.assertNotEqual(out[1][0][0], out[0][0][3])
self.assertNotEqual(out[1][0][1], out[0][0][4])
self.assertNotEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is not equal to time=2 backward.
self.assertNotEqual(out[0][1][0], out[2][1][3])
self.assertNotEqual(out[0][1][1], out[2][1][4])
self.assertNotEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is not equal to time=1 backward.
self.assertNotEqual(out[1][1][0], out[1][1][3])
self.assertNotEqual(out[1][1][1], out[1][1][4])
self.assertNotEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is not equal to time=0 backward.
self.assertNotEqual(out[2][1][0], out[0][1][3])
self.assertNotEqual(out[2][1][1], out[0][1][4])
self.assertNotEqual(out[2][1][2], out[0][1][5])
def _testStackBidirectionalRNNStates(self, use_gpu):
# Check that the states are correctly initialized.
# - Create a net and iterate for 3 states. Keep the state (state_3).
# - Reset states, and iterate for 5 steps. Last state is state_5.
# - Reset the sets to state_3 and iterate for 2 more steps,
# last state will be state_5'.
# - Check that the state_5 and state_5' (forward and backward) are the
# same for the first layer (it does not apply for the second layer since
# it has forward-backward dependencies).
with self.test_session(use_gpu=use_gpu, graph=ops.Graph()) as sess:
batch_size = 2
# Create states placeholders.
initial_states_fw = [
array_ops.placeholder(
dtypes.float32, shape=(batch_size, layer * 2))
for layer in self.layers
]
initial_states_bw = [
array_ops.placeholder(
dtypes.float32, shape=(batch_size, layer * 2))
for layer in self.layers
]
# Create the net
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalRNN(use_gpu, True, True,
initial_states_fw,
initial_states_bw))
variables.global_variables_initializer().run()
# Run 3 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [3, 2]}
# Initialize to empty state.
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
_, st_3_fw, st_3_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net and run 5 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [5, 3]}
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
_, st_5_fw, st_5_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net to state_3 and run 2 more steps.
feed_dict = {inputs[0]: input_value, sequence_length: [2, 1]}
for i, _ in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = st_3_fw[i]
feed_dict[initial_states_bw[i]] = st_3_bw[i]
out_5p, st_5p_fw, st_5p_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Check that the 3+2 and 5 first layer states.
self.assertAllEqual(st_5_fw[0], st_5p_fw[0])
self.assertAllEqual(st_5_bw[0], st_5p_bw[0])
def testStackBidirectionalRNN(self):
self._testStackBidirectionalRNN(use_gpu=False, use_shape=False)
self._testStackBidirectionalRNN(use_gpu=True, use_shape=False)
self._testStackBidirectionalRNN(use_gpu=False, use_shape=True)
self._testStackBidirectionalRNN(use_gpu=True, use_shape=True)
self._testStackBidirectionalRNNStates(use_gpu=False)
self._testStackBidirectionalRNNStates(use_gpu=True)
def _createStackBidirectionalDynamicRNN(self,
use_gpu,
use_shape,
use_state_tuple,
initial_states_fw=None,
initial_states_bw=None,
scope=None):
self.layers = [2, 3]
input_size = 5
batch_size = 2
max_length = 8
initializer = init_ops.random_uniform_initializer(
-0.01, 0.01, seed=self._seed)
sequence_length = array_ops.placeholder(dtypes.int64)
self.cells_fw = [
core_rnn_cell_impl.LSTMCell(
num_units,
input_size,
initializer=initializer,
state_is_tuple=False) for num_units in self.layers
]
self.cells_bw = [
core_rnn_cell_impl.LSTMCell(
num_units,
input_size,
initializer=initializer,
state_is_tuple=False) for num_units in self.layers
]
inputs = max_length * [
array_ops.placeholder(
dtypes.float32,
shape=(batch_size, input_size) if use_shape else (None, input_size))
]
inputs_c = array_ops.stack(inputs)
inputs_c = array_ops.transpose(inputs_c, [1, 0, 2])
outputs, st_fw, st_bw = rnn.stack_bidirectional_dynamic_rnn(
self.cells_fw,
self.cells_bw,
inputs_c,
initial_states_fw=initial_states_fw,
initial_states_bw=initial_states_bw,
dtype=dtypes.float32,
sequence_length=sequence_length,
scope=scope)
# Outputs has shape (batch_size, max_length, 2* layer[-1].
output_shape = [None, max_length, 2 * self.layers[-1]]
if use_shape:
output_shape[0] = batch_size
self.assertAllEqual(outputs.get_shape().as_list(), output_shape)
input_value = np.random.randn(batch_size, input_size)
return input_value, inputs, outputs, st_fw, st_bw, sequence_length
def _testStackBidirectionalDynamicRNN(self, use_gpu, use_shape,
use_state_tuple):
with self.test_session(use_gpu=use_gpu, graph=ops.Graph()) as sess:
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalDynamicRNN(use_gpu, use_shape,
use_state_tuple))
variables.global_variables_initializer().run()
# Run with pre-specified sequence length of 2, 3
out, s_fw, s_bw = sess.run(
[outputs, state_fw, state_bw],
feed_dict={inputs[0]: input_value,
sequence_length: [2, 3]})
# Since the forward and backward LSTM cells were initialized with the
# same parameters, the forward and backward states of the first layer has
# to be the same.
# For the next layers, since the input is a concat of forward and backward
# outputs of the previous layers the symmetry is broken and the following
# states and outputs differ.
# We cannot access the intermediate values between layers but we can
# check that the forward and backward states of the first layer match.
self.assertAllClose(s_fw[0], s_bw[0])
out = np.swapaxes(out, 0, 1)
# If outputs are not concat between layers the output of the forward
# and backward would be the same but symmetric.
# Check that is not the case.
# Due to depth concatenation (as num_units=3 for both RNNs):
# - forward output: out[][][depth] for 0 <= depth < 3
# - backward output: out[][][depth] for 4 <= depth < 6
# First sequence in batch is length=2
# Check that the time=0 forward output is not equal to time=1 backward.
self.assertNotEqual(out[0][0][0], out[1][0][3])
self.assertNotEqual(out[0][0][1], out[1][0][4])
self.assertNotEqual(out[0][0][2], out[1][0][5])
# Check that the time=1 forward output is not equal to time=0 backward.
self.assertNotEqual(out[1][0][0], out[0][0][3])
self.assertNotEqual(out[1][0][1], out[0][0][4])
self.assertNotEqual(out[1][0][2], out[0][0][5])
# Second sequence in batch is length=3
# Check that the time=0 forward output is not equal to time=2 backward.
self.assertNotEqual(out[0][1][0], out[2][1][3])
self.assertNotEqual(out[0][1][1], out[2][1][4])
self.assertNotEqual(out[0][1][2], out[2][1][5])
# Check that the time=1 forward output is not equal to time=1 backward.
self.assertNotEqual(out[1][1][0], out[1][1][3])
self.assertNotEqual(out[1][1][1], out[1][1][4])
self.assertNotEqual(out[1][1][2], out[1][1][5])
# Check that the time=2 forward output is not equal to time=0 backward.
self.assertNotEqual(out[2][1][0], out[0][1][3])
self.assertNotEqual(out[2][1][1], out[0][1][4])
self.assertNotEqual(out[2][1][2], out[0][1][5])
def _testStackBidirectionalDynamicRNNStates(self, use_gpu):
# Check that the states are correctly initialized.
# - Create a net and iterate for 3 states. Keep the state (state_3).
# - Reset states, and iterate for 5 steps. Last state is state_5.
# - Reset the sets to state_3 and iterate for 2 more steps,
# last state will be state_5'.
# - Check that the state_5 and state_5' (forward and backward) are the
# same for the first layer (it does not apply for the second layer since
# it has forward-backward dependencies).
with self.test_session(use_gpu=use_gpu, graph=ops.Graph()) as sess:
batch_size = 2
# Create states placeholders.
initial_states_fw = [
array_ops.placeholder(
dtypes.float32, shape=(batch_size, layer * 2))
for layer in self.layers
]
initial_states_bw = [
array_ops.placeholder(
dtypes.float32, shape=(batch_size, layer * 2))
for layer in self.layers
]
# Create the net
input_value, inputs, outputs, state_fw, state_bw, sequence_length = (
self._createStackBidirectionalDynamicRNN(
use_gpu,
use_shape=True,
use_state_tuple=False,
initial_states_fw=initial_states_fw,
initial_states_bw=initial_states_bw))
variables.global_variables_initializer().run()
# Run 3 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [3, 2]}
# Initialize to empty state.
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
_, st_3_fw, st_3_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net and run 5 steps.
feed_dict = {inputs[0]: input_value, sequence_length: [5, 3]}
for i, layer in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
feed_dict[initial_states_bw[i]] = np.zeros(
(batch_size, layer * 2), dtype=np.float32)
_, st_5_fw, st_5_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Reset the net to state_3 and run 2 more steps.
feed_dict = {inputs[0]: input_value, sequence_length: [2, 1]}
for i, _ in enumerate(self.layers):
feed_dict[initial_states_fw[i]] = st_3_fw[i]
feed_dict[initial_states_bw[i]] = st_3_bw[i]
out_5p, st_5p_fw, st_5p_bw = sess.run([outputs, state_fw, state_bw],
feed_dict=feed_dict)
# Check that the 3+2 and 5 first layer states.
self.assertAllEqual(st_5_fw[0], st_5p_fw[0])
self.assertAllEqual(st_5_bw[0], st_5p_bw[0])
def testBidirectionalRNN(self):
# Generate 2^3 option values
# from [True, True, True] to [False, False, False]
options = itertools.product([True, False], repeat=3)
for option in options:
self._testStackBidirectionalDynamicRNN(
use_gpu=option[0], use_shape=option[1], use_state_tuple=option[2])
# Check States.
self._testStackBidirectionalDynamicRNNStates(use_gpu=False)
self._testStackBidirectionalDynamicRNNStates(use_gpu=True)
def _testScope(self, factory, prefix="prefix", use_outer_scope=True):
# REMARKS: factory(scope) is a function accepting a scope
# as an argument, such scope can be None, a string
# or a VariableScope instance.
with self.test_session(use_gpu=True, graph=ops.Graph()):
if use_outer_scope:
with variable_scope.variable_scope(prefix) as scope:
factory(scope)
else:
factory(prefix)
# check that all the variables names starts with the proper scope.
variables.global_variables_initializer()
all_vars = variables.global_variables()
prefix = prefix or "stack_bidirectional_rnn"
scope_vars = [v for v in all_vars if v.name.startswith(prefix + "/")]
tf_logging.info("StackRNN with scope: %s (%s)" %
(prefix, "scope" if use_outer_scope else "str"))
for v in scope_vars:
tf_logging.info(v.name)
self.assertEqual(len(scope_vars), len(all_vars))
def testStackBidirectionalRNNScope(self):
def factory(scope):
return self._createStackBidirectionalRNN(
use_gpu=True, use_shape=True, use_sequence_length=True, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
def testBidirectionalDynamicRNNScope(self):
def factory(scope):
return self._createStackBidirectionalDynamicRNN(
use_gpu=True, use_shape=True, use_state_tuple=True, scope=scope)
self._testScope(factory, use_outer_scope=True)
self._testScope(factory, use_outer_scope=False)
self._testScope(factory, prefix=None, use_outer_scope=False)
if __name__ == "__main__":
test.main()
| apache-2.0 |
icereval/raven-python | raven/contrib/pylons/__init__.py | 8 | 1113 | """
raven.contrib.pylons
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from raven.middleware import Sentry as Middleware
from raven.base import Client
def list_from_setting(config, setting):
value = config.get(setting)
if not value:
return None
return value.split()
class Sentry(Middleware):
def __init__(self, app, config, client_cls=Client):
client = client_cls(
dsn=config.get('sentry.dsn'),
servers=list_from_setting(config, 'sentry.servers'),
name=config.get('sentry.name'),
public_key=config.get('sentry.public_key'),
secret_key=config.get('sentry.secret_key'),
project=config.get('sentry.project'),
site=config.get('sentry.site'),
include_paths=list_from_setting(config, 'sentry.include_paths'),
exclude_paths=list_from_setting(config, 'sentry.exclude_paths'),
)
super(Sentry, self).__init__(app, client)
| bsd-3-clause |
vganapath/rally | doc/ext/cli_reference.py | 5 | 8151 | # Copyright 2016: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import inspect
from docutils.parsers import rst
from rally.cli import cliutils
from rally.cli import main
from rally.cli import manage
from utils import (category, subcategory, hint, make_definition, note,
paragraph, parse_text, warning)
class Parser(object):
"""A simplified interface of argparse.ArgumentParser"""
def __init__(self):
self.parsers = {}
self.subparser = None
self.defaults = {}
self.arguments = []
def add_parser(self, name, help=None, description=None,
formatter_class=None):
parser = Parser()
self.parsers[name] = {"description": description,
"help": help,
"fclass": formatter_class,
"parser": parser}
return parser
def set_defaults(self, command_object=None, action_fn=None,
action_kwargs=None):
if command_object:
self.defaults["command_object"] = command_object
if action_fn:
self.defaults["action_fn"] = action_fn
if action_kwargs:
self.defaults["action_kwargs"] = action_kwargs
def add_subparsers(self, dest):
# NOTE(andreykurilin): there is only one expected call
if self.subparser:
raise ValueError("Can't add one more subparser.")
self.subparser = Parser()
return self.subparser
def add_argument(self, *args, **kwargs):
if "action_args" in args:
return
self.arguments.append((args, kwargs))
DEFAULT_UUIDS_CMD = {
"deployment": ["rally deployment create"],
"task": ["rally task start"],
"verification": ["rally verify start", "rally verify import_results"]
}
def compose_note_about_default_uuids(argument, dest):
# TODO(andreykurilin): add references to commands
return note("The default value for the ``%(arg)s`` argument is taken from "
"the Rally environment. Usually, the default value is equal to"
" the UUID of the last successful run of ``%(cmd)s``, if the "
"``--no-use`` argument was not used." % {
"arg": argument,
"cmd": "``, ``".join(DEFAULT_UUIDS_CMD[dest])})
def compose_use_cmd_hint_msg(cmd):
return hint("You can set the default value by executing ``%(cmd)s <uuid>``"
" (ref__).\n\n __ #%(ref)s" % {"cmd": cmd,
"ref": cmd.replace(" ", "-")})
def make_arguments_section(category_name, cmd_name, arguments, defaults):
elements = [paragraph("**Command arguments**:")]
for args, kwargs in arguments:
# for future changes...
# :param args: a single command argument which can represented by
# several names(for example, --uuid and --task-id) in cli.
# :type args: tuple
# :param kwargs: description of argument. Have next format:
# {"dest": "action_kwarg_<name of keyword argument in code>",
# "help": "just a description of argument"
# "metavar": "[optional] metavar of argument. Example:"
# "Example: argument '--file'; metavar 'path' ",
# "type": "[optional] class object of argument's type",
# "required": "[optional] boolean value"}
# :type kwargs: dict
dest = kwargs.get("dest").replace("action_kwarg_", "")
description = []
if cmd_name != "use":
# lets add notes about specific default values and hint about
# "use" command with reference
if dest in ("deployment", "task"):
description.append(compose_note_about_default_uuids(
args[0], dest))
description.append(
compose_use_cmd_hint_msg("rally %s use" % dest))
elif dest == "verification":
description.append(compose_note_about_default_uuids(
args[0], dest))
description.append(
compose_use_cmd_hint_msg("rally verify use"))
description.append(kwargs.get("help"))
action = kwargs.get("action")
if not action:
arg_type = kwargs.get("type")
if arg_type:
description.append("**Type**: %s" % arg_type.__name__)
skip_default = dest in ("deployment",
"task_id",
"verification")
if not skip_default and dest in defaults:
description.append("**Default**: %s" % defaults[dest])
metavar = kwargs.get("metavar")
ref = "%s_%s_%s" % (category_name, cmd_name, args[0].replace("-", ""))
if metavar:
args = ["%s %s" % (arg, metavar) for arg in args]
elements.extend(make_definition(", ".join(args), ref, description))
return elements
def get_defaults(func):
"""Return a map of argument:default_value for specified function."""
spec = inspect.getargspec(func)
if spec.defaults:
return dict(zip(spec.args[-len(spec.defaults):], spec.defaults))
return {}
def make_command_section(category_name, name, parser):
# NOTE(andreykurilin): there is only one category in rally-manage, so
# let's just hardcode it.
cmd = "rally-manage" if category_name == "db" else "rally"
section = subcategory("%s %s %s" % (cmd, category_name, name))
section.extend(parse_text(parser["description"]))
if parser["parser"].arguments:
defaults = get_defaults(parser["parser"].defaults["action_fn"])
section.extend(make_arguments_section(
category_name, name, parser["parser"].arguments, defaults))
return section
def make_category_section(name, parser):
category_obj = category("Category: %s" % name)
# NOTE(andreykurilin): we are re-using `_add_command_parsers` method from
# `rally.cli.cliutils`, but, since it was designed to print help message,
# generated description for categories contains specification for all
# sub-commands. We don't need information about sub-commands at this point,
# so let's skip "generated description" and take it directly from category
# class.
description = parser.defaults["command_object"].__doc__
# TODO(andreykurilin): write a decorator which will mark cli-class as
# deprecated without changing its docstring.
if description.startswith("[Deprecated"):
i = description.find("]")
msg = description[1:i]
description = description[i+1:].strip()
category_obj.append(warning(msg))
category_obj.extend(parse_text(description))
for command in sorted(parser.subparser.parsers.keys()):
subparser = parser.subparser.parsers[command]
category_obj.append(make_command_section(name, command, subparser))
return category_obj
class CLIReferenceDirective(rst.Directive):
def run(self):
parser = Parser()
categories = copy.copy(main.categories)
categories["db"] = manage.DBCommands
cliutils._add_command_parsers(categories, parser)
content = []
for category in sorted(categories.keys()):
content.append(make_category_section(
category, parser.parsers[category]["parser"]))
return content
def setup(app):
app.add_directive("make_cli_reference", CLIReferenceDirective)
| apache-2.0 |
toshywoshy/ansible | lib/ansible/modules/remote_management/ucs/ucs_vhba_template.py | 5 | 11061 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: ucs_vhba_template
short_description: Configures vHBA templates on Cisco UCS Manager
description:
- Configures vHBA templates on Cisco UCS Manager.
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify vHBA templates are present and will create if needed.
- If C(absent), will verify vHBA templates are absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the virtual HBA template.
- This name can be between 1 and 16 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the template is created.
required: yes
description:
description:
- A user-defined description of the template.
- Enter up to 256 characters.
- "You can use any characters or spaces except the following:"
- "` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
fabric:
description:
- The Fabric ID field.
- The name of the fabric interconnect that vHBAs created with this template are associated with.
choices: [A, B]
default: A
redundancy_type:
description:
- The Redundancy Type used for template pairing from the Primary or Secondary redundancy template.
- "primary — Creates configurations that can be shared with the Secondary template."
- Any other shared changes on the Primary template are automatically synchronized to the Secondary template.
- "secondary — All shared configurations are inherited from the Primary template."
- "none - Legacy vHBA template behavior. Select this option if you do not want to use redundancy."
choices: [none, primary, secondary]
default: none
vsan:
description:
- The VSAN to associate with vHBAs created from this template.
default: default
template_type:
description:
- The Template Type field.
- "This can be one of the following:"
- "initial-template — vHBAs created from this template are not updated if the template changes."
- "updating-template - vHBAs created from this template are updated if the template changes."
choices: [initial-template, updating-template]
default: initial-template
max_data:
description:
- The Max Data Field Size field.
- The maximum size of the Fibre Channel frame payload bytes that the vHBA supports.
- Enter an string between '256' and '2112'.
default: '2048'
wwpn_pool:
description:
- The WWPN pool that a vHBA created from this template uses to derive its WWPN address.
default: default
qos_policy:
description:
- The QoS policy that is associated with vHBAs created from this template.
pin_group:
description:
- The SAN pin group that is associated with vHBAs created from this template.
stats_policy:
description:
- The statistics collection policy that is associated with vHBAs created from this template.
default: default
org_dn:
description:
- Org dn (distinguished name)
default: org-root
requirements:
- ucsmsdk
author:
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
version_added: '2.5'
'''
EXAMPLES = r'''
- name: Configure vHBA template
ucs_vhba_template:
hostname: 172.16.143.150
username: admin
password: password
name: vHBA-A
fabric: A
vsan: VSAN-A
wwpn_pool: WWPN-Pool-A
- name: Remote vHBA template
ucs_vhba_template:
hostname: 172.16.143.150
username: admin
password: password
name: vHBA-A
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str'),
descr=dict(type='str'),
fabric=dict(type='str', default='A', choices=['A', 'B']),
redundancy_type=dict(type='str', default='none', choices=['none', 'primary', 'secondary']),
vsan=dict(type='str', default='default'),
template_type=dict(type='str', default='initial-template', choices=['initial-template', 'updating-template']),
max_data=dict(type='str', default='2048'),
wwpn_pool=dict(type='str', default='default'),
qos_policy=dict(type='str'),
pin_group=dict(type='str'),
stats_policy=dict(type='str', default='default'),
state=dict(type='str', default='present', choices=['present', 'absent']),
vhba_template_list=dict(type='list'),
)
# Note that use of vhba_template_list is an experimental feature which allows multiple resource updates with a single UCSM connection.
# Support for vhba_template_list may change or be removed once persistent UCS connections are supported.
# Either vhba_template_list or name is required (user can specify either a list of single resource).
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_one_of=[
['vhba_template_list', 'name']
],
mutually_exclusive=[
['vhba_template_list', 'name']
],
)
ucs = UCSModule(module)
err = False
from ucsmsdk.mometa.vnic.VnicSanConnTempl import VnicSanConnTempl
from ucsmsdk.mometa.vnic.VnicFcIf import VnicFcIf
changed = False
try:
# Only documented use is a single resource, but to also support experimental
# feature allowing multiple updates all params are converted to a vhba_template_list below.
if module.params['vhba_template_list']:
# directly use the list (single resource and list are mutually exclusive
vhba_template_list = module.params['vhba_template_list']
else:
# single resource specified, create list from the current params
vhba_template_list = [module.params]
for vhba_template in vhba_template_list:
mo_exists = False
props_match = False
# set default params. Done here to set values for lists which can't be done in the argument_spec
if not vhba_template.get('descr'):
vhba_template['descr'] = ''
if not vhba_template.get('fabric'):
vhba_template['fabric'] = 'A'
if not vhba_template.get('redundancy_type'):
vhba_template['redundancy_type'] = 'none'
if not vhba_template.get('vsan'):
vhba_template['vsan'] = 'default'
if not vhba_template.get('template_type'):
vhba_template['template_type'] = 'initial-template'
if not vhba_template.get('max_data'):
vhba_template['max_data'] = '2048'
if not vhba_template.get('wwpn_pool'):
vhba_template['wwpn_pool'] = 'default'
if not vhba_template.get('qos_policy'):
vhba_template['qos_policy'] = ''
if not vhba_template.get('pin_group'):
vhba_template['pin_group'] = ''
if not vhba_template.get('stats_policy'):
vhba_template['stats_policy'] = 'default'
# dn is <org_dn>/san-conn-templ-<name>
dn = module.params['org_dn'] + '/san-conn-templ-' + vhba_template['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
# check top-level mo props
kwargs = dict(descr=vhba_template['descr'])
kwargs['switch_id'] = vhba_template['fabric']
kwargs['redundancy_pair_type'] = vhba_template['redundancy_type']
kwargs['templ_type'] = vhba_template['template_type']
kwargs['max_data_field_size'] = vhba_template['max_data']
kwargs['ident_pool_name'] = vhba_template['wwpn_pool']
kwargs['qos_policy_name'] = vhba_template['qos_policy']
kwargs['pin_to_group_name'] = vhba_template['pin_group']
kwargs['stats_policy_name'] = vhba_template['stats_policy']
if (mo.check_prop_match(**kwargs)):
# top-level props match, check next level mo/props
child_dn = dn + '/if-default'
mo_1 = ucs.login_handle.query_dn(child_dn)
if mo_1:
kwargs = dict(name=vhba_template['vsan'])
if (mo_1.check_prop_match(**kwargs)):
props_match = True
if module.params['state'] == 'absent':
# mo must exist but all properties do not have to match
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
changed = True
else:
if not props_match:
if not module.check_mode:
# create if mo does not already exist
mo = VnicSanConnTempl(
parent_mo_or_dn=module.params['org_dn'],
name=vhba_template['name'],
descr=vhba_template['descr'],
switch_id=vhba_template['fabric'],
redundancy_pair_type=vhba_template['redundancy_type'],
templ_type=vhba_template['template_type'],
max_data_field_size=vhba_template['max_data'],
ident_pool_name=vhba_template['wwpn_pool'],
qos_policy_name=vhba_template['qos_policy'],
pin_to_group_name=vhba_template['pin_group'],
stats_policy_name=vhba_template['stats_policy'],
)
mo_1 = VnicFcIf(
parent_mo_or_dn=mo,
name=vhba_template['vsan'],
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
changed = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
ucs.result['changed'] = changed
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
thaim/ansible | lib/ansible/module_utils/mysql.py | 8 | 4390 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Jonathan Mainguy <jon@soh.re>, 2015
# Most of this was originally added by Sven Schliesing @muffl0n in the mysql_user.py module
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
try:
import pymysql as mysql_driver
_mysql_cursor_param = 'cursor'
except ImportError:
try:
import MySQLdb as mysql_driver
import MySQLdb.cursors
_mysql_cursor_param = 'cursorclass'
except ImportError:
mysql_driver = None
from ansible.module_utils._text import to_native
mysql_driver_fail_msg = 'The PyMySQL (Python 2.7 and Python 3.X) or MySQL-python (Python 2.X) module is required.'
def mysql_connect(module, login_user=None, login_password=None, config_file='', ssl_cert=None, ssl_key=None, ssl_ca=None, db=None, cursor_class=None,
connect_timeout=30):
config = {}
if ssl_ca is not None or ssl_key is not None or ssl_cert is not None:
config['ssl'] = {}
if module.params['login_unix_socket']:
config['unix_socket'] = module.params['login_unix_socket']
else:
config['host'] = module.params['login_host']
config['port'] = module.params['login_port']
if os.path.exists(config_file):
config['read_default_file'] = config_file
# If login_user or login_password are given, they should override the
# config file
if login_user is not None:
config['user'] = login_user
if login_password is not None:
config['passwd'] = login_password
if ssl_cert is not None:
config['ssl']['cert'] = ssl_cert
if ssl_key is not None:
config['ssl']['key'] = ssl_key
if ssl_ca is not None:
config['ssl']['ca'] = ssl_ca
if db is not None:
config['db'] = db
if connect_timeout is not None:
config['connect_timeout'] = connect_timeout
try:
db_connection = mysql_driver.connect(**config)
except Exception as e:
module.fail_json(msg="unable to connect to database: %s" % to_native(e))
if cursor_class == 'DictCursor':
return db_connection.cursor(**{_mysql_cursor_param: mysql_driver.cursors.DictCursor})
else:
return db_connection.cursor()
def mysql_common_argument_spec():
return dict(
login_user=dict(type='str', default=None),
login_password=dict(type='str', no_log=True),
login_host=dict(type='str', default='localhost'),
login_port=dict(type='int', default=3306),
login_unix_socket=dict(type='str'),
config_file=dict(type='path', default='~/.my.cnf'),
connect_timeout=dict(type='int', default=30),
client_cert=dict(type='path', aliases=['ssl_cert']),
client_key=dict(type='path', aliases=['ssl_key']),
ca_cert=dict(type='path', aliases=['ssl_ca']),
)
| mit |
campbe13/openhatch | vendor/packages/twisted/twisted/conch/test/test_knownhosts.py | 18 | 34421 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.client.knownhosts}.
"""
import os
from binascii import Error as BinasciiError, b2a_base64, a2b_base64
try:
import Crypto
import pyasn1
except ImportError:
skip = "PyCrypto and PyASN1 required for twisted.conch.knownhosts."
else:
from twisted.conch.ssh.keys import Key, BadKeyError
from twisted.conch.client.knownhosts import \
PlainEntry, HashedEntry, KnownHostsFile, UnparsedEntry, ConsoleUI
from twisted.conch.client import default
from zope.interface.verify import verifyObject
from twisted.python.filepath import FilePath
from twisted.trial.unittest import TestCase
from twisted.internet.defer import Deferred
from twisted.conch.interfaces import IKnownHostEntry
from twisted.conch.error import HostKeyChanged, UserRejectedKey, InvalidEntry
sampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAsV0VMRbGmzhqxxayLRHmvnFvtyNqgbNKV46dU1bVFB+3y'
'tNvue4Riqv/SVkPRNwMb7eWH29SviXaBxUhYyzKkDoNUq3rTNnH1Vnif6d6X4JCrUb5d3W+Dm'
'YClyJrZ5HgD/hUpdSkTRqdbQ2TrvSAxRacj+vHHT4F4dm1bJSewm3B2D8HVOoi/CbVh3dsIiC'
'dp8VltdZx4qYVfYe2LwVINCbAa3d3tj9ma7RVfw3OH2Mfb+toLd1N5tBQFb7oqTt2nC6I/6Bd'
'4JwPUld+IEitw/suElq/AIJVQXXujeyiZlea90HE65U2mF1ytr17HTAIT2ySokJWyuBANGACk'
'6iIaw==')
otherSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAIEAwaeCZd3UCuPXhX39+/p9qO028jTF76DMVd9mPvYVDVXuf'
'WckKZauF7+0b7qm+ChT7kan6BzRVo4++gCVNfAlMzLysSt3ylmOR48tFpAfygg9UCX3DjHz0E'
'lOOUKh3iifc9aUShD0OPaK3pR5JJ8jfiBfzSYWt/hDi/iZ4igsSs8=')
thirdSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAl/TQakPkePlnwCBRPitIVUTg6Z8VzN1en+DGkyo/evkmLw'
'7o4NWR5qbysk9A9jXW332nxnEuAnbcCam9SHe1su1liVfyIK0+3bdn0YRB0sXIbNEtMs2LtCho'
'/aV3cXPS+Cf1yut3wvIpaRnAzXxuKPCTXQ7/y0IXa8TwkRBH58OJa3RqfQ/NsSp5SAfdsrHyH2'
'aitiVKm2jfbTKzSEqOQG/zq4J9GXTkq61gZugory/Tvl5/yPgSnOR6C9jVOMHf27ZPoRtyj9SY'
'343Hd2QHiIE0KPZJEgCynKeWoKz8v6eTSK8n4rBnaqWdp8MnGZK1WGy05MguXbyCDuTC8AmJXQ'
'==')
sampleKey = a2b_base64(sampleEncodedKey)
otherSampleKey = a2b_base64(otherSampleEncodedKey)
thirdSampleKey = a2b_base64(thirdSampleEncodedKey)
samplePlaintextLine = (
"www.twistedmatrix.com ssh-rsa " + sampleEncodedKey + "\n")
otherSamplePlaintextLine = (
"divmod.com ssh-rsa " + otherSampleEncodedKey + "\n")
sampleHostIPLine = (
"www.twistedmatrix.com,198.49.126.131 ssh-rsa " + sampleEncodedKey + "\n")
sampleHashedLine = (
"|1|gJbSEPBG9ZSBoZpHNtZBD1bHKBA=|bQv+0Xa0dByrwkA1EB0E7Xop/Fo= ssh-rsa " +
sampleEncodedKey + "\n")
class EntryTestsMixin:
"""
Tests for implementations of L{IKnownHostEntry}. Subclasses must set the
'entry' attribute to a provider of that interface, the implementation of
that interface under test.
@ivar entry: a provider of L{IKnownHostEntry} with a hostname of
www.twistedmatrix.com and an RSA key of sampleKey.
"""
def test_providesInterface(self):
"""
The given entry should provide IKnownHostEntry.
"""
verifyObject(IKnownHostEntry, self.entry)
def test_fromString(self):
"""
Constructing a plain text entry from an unhashed known_hosts entry will
result in an L{IKnownHostEntry} provider with 'keyString', 'hostname',
and 'keyType' attributes. While outside the interface in question,
these attributes are held in common by L{PlainEntry} and L{HashedEntry}
implementations; other implementations should override this method in
subclasses.
"""
entry = self.entry
self.assertEqual(entry.publicKey, Key.fromString(sampleKey))
self.assertEqual(entry.keyType, "ssh-rsa")
def test_matchesKey(self):
"""
L{IKnownHostEntry.matchesKey} checks to see if an entry matches a given
SSH key.
"""
twistedmatrixDotCom = Key.fromString(sampleKey)
divmodDotCom = Key.fromString(otherSampleKey)
self.assertEqual(
True,
self.entry.matchesKey(twistedmatrixDotCom))
self.assertEqual(
False,
self.entry.matchesKey(divmodDotCom))
def test_matchesHost(self):
"""
L{IKnownHostEntry.matchesHost} checks to see if an entry matches a
given hostname.
"""
self.assertEqual(True, self.entry.matchesHost(
"www.twistedmatrix.com"))
self.assertEqual(False, self.entry.matchesHost(
"www.divmod.com"))
class PlainEntryTests(EntryTestsMixin, TestCase):
"""
Test cases for L{PlainEntry}.
"""
plaintextLine = samplePlaintextLine
hostIPLine = sampleHostIPLine
def setUp(self):
"""
Set 'entry' to a sample plain-text entry with sampleKey as its key.
"""
self.entry = PlainEntry.fromString(self.plaintextLine)
def test_matchesHostIP(self):
"""
A "hostname,ip" formatted line will match both the host and the IP.
"""
self.entry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(True, self.entry.matchesHost("198.49.126.131"))
self.test_matchesHost()
def test_toString(self):
"""
L{PlainEntry.toString} generates the serialized OpenSSL format string
for the entry, sans newline.
"""
self.assertEqual(self.entry.toString(), self.plaintextLine.rstrip("\n"))
multiHostEntry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(multiHostEntry.toString(), self.hostIPLine.rstrip("\n"))
class PlainTextWithCommentTests(PlainEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
plaintextLine = samplePlaintextLine[:-1] + " plain text comment.\n"
hostIPLine = sampleHostIPLine[:-1] + " text following host/IP line\n"
class HashedEntryTests(EntryTestsMixin, TestCase):
"""
Tests for L{HashedEntry}.
This suite doesn't include any tests for host/IP pairs because hashed
entries store IP addresses the same way as hostnames and does not support
comma-separated lists. (If you hash the IP and host together you can't
tell if you've got the key already for one or the other.)
"""
hashedLine = sampleHashedLine
def setUp(self):
"""
Set 'entry' to a sample hashed entry for twistedmatrix.com with
sampleKey as its key.
"""
self.entry = HashedEntry.fromString(self.hashedLine)
def test_toString(self):
"""
L{HashedEntry.toString} generates the serialized OpenSSL format string
for the entry, sans the newline.
"""
self.assertEqual(self.entry.toString(), self.hashedLine.rstrip("\n"))
class HashedEntryWithCommentTests(HashedEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
hashedLine = sampleHashedLine[:-1] + " plain text comment.\n"
class UnparsedEntryTests(TestCase, EntryTestsMixin):
"""
Tests for L{UnparsedEntry}
"""
def setUp(self):
"""
Set up the 'entry' to be an unparsed entry for some random text.
"""
self.entry = UnparsedEntry(" This is a bogus entry. \n")
def test_fromString(self):
"""
Creating an L{UnparsedEntry} should simply record the string it was
passed.
"""
self.assertEqual(" This is a bogus entry. \n",
self.entry._string)
def test_matchesHost(self):
"""
An unparsed entry can't match any hosts.
"""
self.assertEqual(False, self.entry.matchesHost("www.twistedmatrix.com"))
def test_matchesKey(self):
"""
An unparsed entry can't match any keys.
"""
self.assertEqual(False, self.entry.matchesKey(Key.fromString(sampleKey)))
def test_toString(self):
"""
L{UnparsedEntry.toString} returns its input string, sans trailing newline.
"""
self.assertEqual(" This is a bogus entry. ", self.entry.toString())
class ParseErrorTests(TestCase):
"""
L{HashedEntry.fromString} and L{PlainEntry.fromString} can raise a variety
of errors depending on misformattings of certain strings. These tests make
sure those errors are caught. Since many of the ways that this can go
wrong are in the lower-level APIs being invoked by the parsing logic,
several of these are integration tests with the L{base64} and
L{twisted.conch.ssh.keys} modules.
"""
def invalidEntryTest(self, cls):
"""
If there are fewer than three elements, C{fromString} should raise
L{InvalidEntry}.
"""
self.assertRaises(InvalidEntry, cls.fromString, "invalid")
def notBase64Test(self, cls):
"""
If the key is not base64, C{fromString} should raise L{BinasciiError}.
"""
self.assertRaises(BinasciiError, cls.fromString, "x x x")
def badKeyTest(self, cls, prefix):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{fromString} should raise L{BadKeyError}.
"""
self.assertRaises(BadKeyError, cls.fromString, ' '.join(
[prefix, "ssh-rsa", b2a_base64(
"Hey, this isn't an SSH key!").strip()]))
def test_invalidPlainEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, L{PlainEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(PlainEntry)
def test_invalidHashedEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, or the hostname salt/hash portion has more than two elements,
L{HashedEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(HashedEntry)
a, b, c = sampleHashedLine.split()
self.assertRaises(InvalidEntry, HashedEntry.fromString, ' '.join(
[a + "||", b, c]))
def test_plainNotBase64(self):
"""
If the key portion of a plain entry is not decodable as base64,
C{fromString} should raise L{BinasciiError}.
"""
self.notBase64Test(PlainEntry)
def test_hashedNotBase64(self):
"""
If the key, host salt, or host hash portion of a hashed entry is not
encoded, it will raise L{BinasciiError}.
"""
self.notBase64Test(HashedEntry)
a, b, c = sampleHashedLine.split()
# Salt not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|" + b2a_base64("stuff").strip(), b, c]))
# Host hash not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join([HashedEntry.MAGIC + b2a_base64("stuff").strip() + "|x", b, c]))
# Neither salt nor hash valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|x", b, c]))
def test_hashedBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{HashedEntry.fromString} should raise L{BadKeyError}.
"""
a, b, c = sampleHashedLine.split()
self.badKeyTest(HashedEntry, a)
def test_plainBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{PlainEntry.fromString} should raise L{BadKeyError}.
"""
self.badKeyTest(PlainEntry, "hostname")
class KnownHostsDatabaseTests(TestCase):
"""
Tests for L{KnownHostsFile}.
"""
def pathWithContent(self, content):
"""
Return a FilePath with the given initial content.
"""
fp = FilePath(self.mktemp())
fp.setContent(content)
return fp
def loadSampleHostsFile(self, content=(
sampleHashedLine + otherSamplePlaintextLine +
"\n# That was a blank line.\n"
"This is just unparseable.\n"
"This also unparseable.\n")):
"""
Return a sample hosts file, with keys for www.twistedmatrix.com and
divmod.com present.
"""
return KnownHostsFile.fromPath(self.pathWithContent(content))
def test_loadFromPath(self):
"""
Loading a L{KnownHostsFile} from a path with six entries in it will
result in a L{KnownHostsFile} object with six L{IKnownHostEntry}
providers in it, each of the appropriate type.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(len(hostsFile._entries), 6)
self.assertIsInstance(hostsFile._entries[0], HashedEntry)
self.assertEqual(True, hostsFile._entries[0].matchesHost(
"www.twistedmatrix.com"))
self.assertIsInstance(hostsFile._entries[1], PlainEntry)
self.assertEqual(True, hostsFile._entries[1].matchesHost(
"divmod.com"))
self.assertIsInstance(hostsFile._entries[2], UnparsedEntry)
self.assertEqual(hostsFile._entries[2].toString(), "")
self.assertIsInstance(hostsFile._entries[3], UnparsedEntry)
self.assertEqual(hostsFile._entries[3].toString(),
"# That was a blank line.")
self.assertIsInstance(hostsFile._entries[4], UnparsedEntry)
self.assertEqual(hostsFile._entries[4].toString(),
"This is just unparseable.")
self.assertIsInstance(hostsFile._entries[5], UnparsedEntry)
self.assertEqual(hostsFile._entries[5].toString(),
"This also unparseable.")
def test_loadNonExistent(self):
"""
Loading a L{KnownHostsFile} from a path that does not exist should
result in an empty L{KnownHostsFile} that will save back to that path.
"""
pn = self.mktemp()
knownHostsFile = KnownHostsFile.fromPath(FilePath(pn))
self.assertEqual([], list(knownHostsFile._entries))
self.assertEqual(False, FilePath(pn).exists())
knownHostsFile.save()
self.assertEqual(True, FilePath(pn).exists())
def test_loadNonExistentParent(self):
"""
Loading a L{KnownHostsFile} from a path whose parent directory does not
exist should result in an empty L{KnownHostsFile} that will save back
to that path, creating its parent directory(ies) in the process.
"""
thePath = FilePath(self.mktemp())
knownHostsPath = thePath.child("foo").child("known_hosts")
knownHostsFile = KnownHostsFile.fromPath(knownHostsPath)
knownHostsFile.save()
knownHostsPath.restat(False)
self.assertEqual(True, knownHostsPath.exists())
def test_savingAddsEntry(self):
"""
L{KnownHostsFile.save()} will write out a new file with any entries
that have been added.
"""
path = self.pathWithContent(sampleHashedLine +
otherSamplePlaintextLine)
knownHostsFile = KnownHostsFile.fromPath(path)
newEntry = knownHostsFile.addHostKey("some.example.com", Key.fromString(thirdSampleKey))
expectedContent = (
sampleHashedLine +
otherSamplePlaintextLine + HashedEntry.MAGIC +
b2a_base64(newEntry._hostSalt).strip() + "|" +
b2a_base64(newEntry._hostHash).strip() + " ssh-rsa " +
thirdSampleEncodedKey + "\n")
# Sanity check, let's make sure the base64 API being used for the test
# isn't inserting spurious newlines.
self.assertEqual(3, expectedContent.count("\n"))
knownHostsFile.save()
self.assertEqual(expectedContent, path.getContent())
def test_hasPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{True} when a key for the given
hostname is present and matches the expected key.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(True, hostsFile.hasHostKey(
"www.twistedmatrix.com", Key.fromString(sampleKey)))
def test_hasNonPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{False} when a key for the given
hostname is not present.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(False, hostsFile.hasHostKey(
"non-existent.example.com", Key.fromString(sampleKey)))
def test_hasKeyMismatch(self):
"""
L{KnownHostsFile.hasHostKey} raises L{HostKeyChanged} if the host key
is present, but different from the expected one. The resulting
exception should have an offendingEntry indicating the given entry.
"""
hostsFile = self.loadSampleHostsFile()
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.twistedmatrix.com", Key.fromString(otherSampleKey))
self.assertEqual(exception.offendingEntry, hostsFile._entries[0])
self.assertEqual(exception.lineno, 1)
self.assertEqual(exception.path, hostsFile._savePath)
def test_addHostKey(self):
"""
L{KnownHostsFile.addHostKey} adds a new L{HashedEntry} to the host
file, and returns it.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertEqual(False,
hostsFile.hasHostKey("somewhere.example.com", aKey))
newEntry = hostsFile.addHostKey("somewhere.example.com", aKey)
# The code in OpenSSH requires host salts to be 20 characters long.
# This is the required length of a SHA-1 HMAC hash, so it's just a
# sanity check.
self.assertEqual(20, len(newEntry._hostSalt))
self.assertEqual(True,
newEntry.matchesHost("somewhere.example.com"))
self.assertEqual(newEntry.keyType, "ssh-rsa")
self.assertEqual(aKey, newEntry.publicKey)
self.assertEqual(True,
hostsFile.hasHostKey("somewhere.example.com", aKey))
def test_randomSalts(self):
"""
L{KnownHostsFile.addHostKey} generates a random salt for each new key,
so subsequent salts will be different.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertNotEqual(
hostsFile.addHostKey("somewhere.example.com", aKey)._hostSalt,
hostsFile.addHostKey("somewhere-else.example.com", aKey)._hostSalt)
def test_verifyValidKey(self):
"""
Verifying a valid key should return a L{Deferred} which fires with
True.
"""
hostsFile = self.loadSampleHostsFile()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
ui = FakeUI()
d = hostsFile.verifyHostKey(ui, "www.twistedmatrix.com", "1.2.3.4",
Key.fromString(sampleKey))
l = []
d.addCallback(l.append)
self.assertEqual(l, [True])
def test_verifyInvalidKey(self):
"""
Verfying an invalid key should return a L{Deferred} which fires with a
L{HostKeyChanged} failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "1.2.3.4", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def verifyNonPresentKey(self):
"""
Set up a test to verify a key that isn't present. Return a 3-tuple of
the UI, a list set up to collect the result of the verifyHostKey call,
and the sample L{KnownHostsFile} being used.
This utility method avoids returning a L{Deferred}, and records results
in the returned list instead, because the events which get generated
here are pre-recorded in the 'ui' object. If the L{Deferred} in
question does not fire, the it will fail quickly with an empty list.
"""
hostsFile = self.loadSampleHostsFile()
absentKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
l = []
d = hostsFile.verifyHostKey(
ui, "sample-host.example.com", "4.3.2.1", absentKey)
d.addBoth(l.append)
self.assertEqual([], l)
self.assertEqual(
ui.promptText,
"The authenticity of host 'sample-host.example.com (4.3.2.1)' "
"can't be established.\n"
"RSA key fingerprint is "
"89:4e:cc:8c:57:83:96:48:ef:63:ad:ee:99:00:4c:8f.\n"
"Are you sure you want to continue connecting (yes/no)? ")
return ui, l, hostsFile
def test_verifyNonPresentKey_Yes(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says yes, the Deferred should fire with True.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(True)
self.assertEqual([True], l)
reloaded = KnownHostsFile.fromPath(knownHostsFile._savePath)
self.assertEqual(
True,
reloaded.hasHostKey("4.3.2.1", Key.fromString(thirdSampleKey)))
self.assertEqual(
True,
reloaded.hasHostKey("sample-host.example.com",
Key.fromString(thirdSampleKey)))
def test_verifyNonPresentKey_No(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says no, the Deferred should fail with
UserRejectedKey.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(False)
l[0].trap(UserRejectedKey)
def test_verifyHostIPMismatch(self):
"""
Verifying a key where the host is present (and correct), but the IP is
present and different, should result the deferred firing in a
HostKeyChanged failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "4.3.2.1", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def test_verifyKeyForHostAndIP(self):
"""
Verifying a key where the hostname is present but the IP is not should
result in the key being added for the IP and the user being warned
about the change.
"""
ui = FakeUI()
hostsFile = self.loadSampleHostsFile()
expectedKey = Key.fromString(sampleKey)
hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "5.4.3.2", expectedKey)
self.assertEqual(
True, KnownHostsFile.fromPath(hostsFile._savePath).hasHostKey(
"5.4.3.2", expectedKey))
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'5.4.3.2' to the list of known hosts."],
ui.userWarnings)
class FakeFile(object):
"""
A fake file-like object that acts enough like a file for
L{ConsoleUI.prompt}.
"""
def __init__(self):
self.inlines = []
self.outchunks = []
self.closed = False
def readline(self):
"""
Return a line from the 'inlines' list.
"""
return self.inlines.pop(0)
def write(self, chunk):
"""
Append the given item to the 'outchunks' list.
"""
if self.closed:
raise IOError("the file was closed")
self.outchunks.append(chunk)
def close(self):
"""
Set the 'closed' flag to True, explicitly marking that it has been
closed.
"""
self.closed = True
class ConsoleUITests(TestCase):
"""
Test cases for L{ConsoleUI}.
"""
def setUp(self):
"""
Create a L{ConsoleUI} pointed at a L{FakeFile}.
"""
self.fakeFile = FakeFile()
self.ui = ConsoleUI(self.openFile)
def openFile(self):
"""
Return the current fake file.
"""
return self.fakeFile
def newFile(self, lines):
"""
Create a new fake file (the next file that self.ui will open) with the
given list of lines to be returned from readline().
"""
self.fakeFile = FakeFile()
self.fakeFile.inlines = lines
def test_promptYes(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'yes', then it returns a L{Deferred} that fires with
True.
"""
for okYes in ['yes', 'Yes', 'yes\n']:
self.newFile([okYes])
l = []
self.ui.prompt("Hello, world!").addCallback(l.append)
self.assertEqual(["Hello, world!"], self.fakeFile.outchunks)
self.assertEqual([True], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptNo(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'no', then it returns a L{Deferred} that fires with
False.
"""
for okNo in ['no', 'No', 'no\n']:
self.newFile([okNo])
l = []
self.ui.prompt("Goodbye, world!").addCallback(l.append)
self.assertEqual(["Goodbye, world!"], self.fakeFile.outchunks)
self.assertEqual([False], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptRepeatedly(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is neither 'yes' nor 'no', then it says "Please enter
'yes' or 'no'" until it gets a 'yes' or a 'no', at which point it
returns a Deferred that answers either True or False.
"""
self.newFile(['what', 'uh', 'okay', 'yes'])
l = []
self.ui.prompt("Please say something useful.").addCallback(l.append)
self.assertEqual([True], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something useful."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
self.newFile(['blah', 'stuff', 'feh', 'no'])
l = []
self.ui.prompt("Please say something negative.").addCallback(l.append)
self.assertEqual([False], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something negative."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
def test_promptOpenFailed(self):
"""
If the C{opener} passed to L{ConsoleUI} raises an exception, that
exception will fail the L{Deferred} returned from L{ConsoleUI.prompt}.
"""
def raiseIt():
raise IOError()
ui = ConsoleUI(raiseIt)
d = ui.prompt("This is a test.")
return self.assertFailure(d, IOError)
def test_warn(self):
"""
L{ConsoleUI.warn} should output a message to the console object.
"""
self.ui.warn("Test message.")
self.assertEqual(["Test message."], self.fakeFile.outchunks)
self.assertEqual(True, self.fakeFile.closed)
def test_warnOpenFailed(self):
"""
L{ConsoleUI.warn} should log a traceback if the output can't be opened.
"""
def raiseIt():
1 / 0
ui = ConsoleUI(raiseIt)
ui.warn("This message never makes it.")
self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1)
class FakeUI(object):
"""
A fake UI object, adhering to the interface expected by
L{KnownHostsFile.verifyHostKey}
@ivar userWarnings: inputs provided to 'warn'.
@ivar promptDeferred: last result returned from 'prompt'.
@ivar promptText: the last input provided to 'prompt'.
"""
def __init__(self):
self.userWarnings = []
self.promptDeferred = None
self.promptText = None
def prompt(self, text):
"""
Issue the user an interactive prompt, which they can accept or deny.
"""
self.promptText = text
self.promptDeferred = Deferred()
return self.promptDeferred
def warn(self, text):
"""
Issue a non-interactive warning to the user.
"""
self.userWarnings.append(text)
class FakeObject(object):
"""
A fake object that can have some attributes. Used to fake
L{SSHClientTransport} and L{SSHClientFactory}.
"""
class DefaultAPITests(TestCase):
"""
The API in L{twisted.conch.client.default.verifyHostKey} is the integration
point between the code in the rest of conch and L{KnownHostsFile}.
"""
def patchedOpen(self, fname, mode):
"""
The patched version of 'open'; this returns a L{FakeFile} that the
instantiated L{ConsoleUI} can use.
"""
self.assertEqual(fname, "/dev/tty")
self.assertEqual(mode, "r+b")
return self.fakeFile
def setUp(self):
"""
Patch 'open' in verifyHostKey.
"""
self.fakeFile = FakeFile()
self.patch(default, "_open", self.patchedOpen)
self.hostsOption = self.mktemp()
knownHostsFile = KnownHostsFile(FilePath(self.hostsOption))
knownHostsFile.addHostKey("exists.example.com", Key.fromString(sampleKey))
knownHostsFile.addHostKey("4.3.2.1", Key.fromString(sampleKey))
knownHostsFile.save()
self.fakeTransport = FakeObject()
self.fakeTransport.factory = FakeObject()
self.options = self.fakeTransport.factory.options = {
'host': "exists.example.com",
'known-hosts': self.hostsOption
}
def test_verifyOKKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host, IP, and key which already match the
known_hosts file it is supposed to check.
"""
l = []
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def replaceHome(self, tempHome):
"""
Replace the HOME environment variable until the end of the current
test, with the given new home-directory, so that L{os.path.expanduser}
will yield controllable, predictable results.
@param tempHome: the pathname to replace the HOME variable with.
@type tempHome: L{str}
"""
oldHome = os.environ.get('HOME')
def cleanupHome():
if oldHome is None:
del os.environ['HOME']
else:
os.environ['HOME'] = oldHome
self.addCleanup(cleanupHome)
os.environ['HOME'] = tempHome
def test_noKnownHostsOption(self):
"""
L{default.verifyHostKey} should find your known_hosts file in
~/.ssh/known_hosts if you don't specify one explicitly on the command
line.
"""
l = []
tmpdir = self.mktemp()
oldHostsOption = self.hostsOption
hostsNonOption = FilePath(tmpdir).child(".ssh").child("known_hosts")
hostsNonOption.parent().makedirs()
FilePath(oldHostsOption).moveTo(hostsNonOption)
self.replaceHome(tmpdir)
self.options['known-hosts'] = None
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def test_verifyHostButNotIP(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host which matches with an IP is not present in its
known_hosts file, and should also warn the user that it has added the
IP address.
"""
l = []
default.verifyHostKey(self.fakeTransport, "8.7.6.5", sampleKey,
"Fingerprint not required.").addCallback(l.append)
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'8.7.6.5' to the list of known hosts."],
self.fakeFile.outchunks)
self.assertEqual([1], l)
knownHostsFile = KnownHostsFile.fromPath(FilePath(self.hostsOption))
self.assertEqual(True, knownHostsFile.hasHostKey("8.7.6.5",
Key.fromString(sampleKey)))
def test_verifyQuestion(self):
"""
L{default.verifyHostKey} should return a L{Default} which fires with
C{0} when passed a unknown host that the user refuses to acknowledge.
"""
self.fakeTransport.factory.options['host'] = 'fake.example.com'
self.fakeFile.inlines.append("no")
d = default.verifyHostKey(
self.fakeTransport, "9.8.7.6", otherSampleKey, "No fingerprint!")
self.assertEqual(
["The authenticity of host 'fake.example.com (9.8.7.6)' "
"can't be established.\n"
"RSA key fingerprint is "
"57:a1:c2:a1:07:a0:2b:f4:ce:b5:e5:b7:ae:cc:e1:99.\n"
"Are you sure you want to continue connecting (yes/no)? "],
self.fakeFile.outchunks)
return self.assertFailure(d, UserRejectedKey)
def test_verifyBadKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fails with
L{HostKeyChanged} if the host key is incorrect.
"""
d = default.verifyHostKey(
self.fakeTransport, "4.3.2.1", otherSampleKey,
"Again, not required.")
return self.assertFailure(d, HostKeyChanged)
| agpl-3.0 |
sabi0/intellij-community | python/testData/highlighting/builtins.py | 21 | 1169 | # bg is always black.
# effect is white
# func decl: red bold
# class decl: blue bold
# predefined decl: green bold
# predefined usage: yellow bold
<info descr="PY.BUILTIN_NAME" type="INFORMATION" foreground="0x00ff00" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">len</info>("")
len = [] # redefine
len # no highlight
class <info descr="PY.CLASS_DEFINITION" type="INFORMATION">A</info>(<info descr="PY.BUILTIN_NAME" type="INFORMATION" foreground="0x00ff00" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">object</info>):
<info descr="PY.PREDEFINED_USAGE" type="INFORMATION" foreground="0xffff00" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">__metaclass__</info> = M # assignment target
<info descr="PY.DECORATOR">@</info><info descr="PY.DECORATOR">classmethod</info>
def <info descr="PY.FUNC_DEFINITION">foo</info>(<info descr="PY.SELF_PARAMETER">cls</info>):
pass
try:
1/0
except <info descr="PY.BUILTIN_NAME" type="INFORMATION" foreground="0x00ff00" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">ArithmeticError</info>:
pass
| apache-2.0 |
pkscout/script.argustv-helper | resources/common/fileops.py | 1 | 2425 | # v.0.3.0
import ntpath, xbmcvfs
def checkDir( path ):
log_lines = []
log_lines.append( 'checking for directory ' + path )
if not xbmcvfs.exists( path ):
log_lines.append( 'directory does not exist, creating it' )
xbmcvfs.mkdirs( path )
return False, log_lines
else:
log_lines.append( 'directory exists' )
return True, log_lines
def deleteFile( filename ):
log_lines = []
if xbmcvfs.exists( filename ):
try:
xbmcvfs.delete( filename )
log_lines.append( 'deleting file ' + filename )
except IOError:
log_lines.append( 'unable to delete ' + filename )
return False, log_lines
except Exception, e:
log_lines.append( 'unknown error while attempting to delete ' + filename )
log_lines.append( e )
return False, log_lines
return True, log_lines
else:
log_lines.append( '%s does not exist' % filename )
return False, log_lines
def pathLeaf(path):
path, filename = ntpath.split(path)
return {"path":path, "filename":filename}
def readFile( filename ):
log_lines = []
if xbmcvfs.exists( filename ):
try:
the_file = xbmcvfs.File( filename, 'r' )
data = the_file.read()
the_file.close()
except IOError:
log_lines.append( 'unable to read data from ' + filename )
return log_lines, ''
except Exception, e:
log_lines.append( 'unknown error while reading data from ' + filename )
log_lines.append( e )
return log_lines, ''
return log_lines, data
else:
log_lines.append( '%s does not exist' % filename )
return log_lines, ''
def writeFile( data, filename ):
log_lines = []
if type(data).__name__=='unicode':
data = data.encode('utf-8')
try:
thefile = xbmcvfs.File( filename, 'wb' )
thefile.write( data )
thefile.close()
except IOError, e:
log_lines.append( 'unable to write data to ' + filename )
log_lines.append( e )
return False, log_lines
except Exception, e:
log_lines.append( 'unknown error while writing data to ' + filename )
log_lines.append( e )
return False, log_lines
log_lines.append( 'successfuly wrote data to ' + filename )
return True, log_lines | gpl-2.0 |
joshloyal/scikit-learn | sklearn/cross_decomposition/cca_.py | 151 | 3192 | from .pls_ import _PLS
__all__ = ['CCA']
class CCA(_PLS):
"""CCA Canonical Correlation Analysis.
CCA inherits from PLS with mode="B" and deflation_mode="canonical".
Read more in the :ref:`User Guide <cross_decomposition>`.
Parameters
----------
n_components : int, (default 2).
number of components to keep.
scale : boolean, (default True)
whether to scale the data?
max_iter : an integer, (default 500)
the maximum number of iterations of the NIPALS inner loop
tol : non-negative real, default 1e-06.
the tolerance used in the iterative algorithm
copy : boolean
Whether the deflation be done on a copy. Let the default value
to True unless you don't care about side effects
Attributes
----------
x_weights_ : array, [p, n_components]
X block weights vectors.
y_weights_ : array, [q, n_components]
Y block weights vectors.
x_loadings_ : array, [p, n_components]
X block loadings vectors.
y_loadings_ : array, [q, n_components]
Y block loadings vectors.
x_scores_ : array, [n_samples, n_components]
X scores.
y_scores_ : array, [n_samples, n_components]
Y scores.
x_rotations_ : array, [p, n_components]
X block to latents rotations.
y_rotations_ : array, [q, n_components]
Y block to latents rotations.
n_iter_ : array-like
Number of iterations of the NIPALS inner loop for each
component.
Notes
-----
For each component k, find the weights u, v that maximizes
max corr(Xk u, Yk v), such that ``|u| = |v| = 1``
Note that it maximizes only the correlations between the scores.
The residual matrix of X (Xk+1) block is obtained by the deflation on the
current X score: x_score.
The residual matrix of Y (Yk+1) block is obtained by deflation on the
current Y score.
Examples
--------
>>> from sklearn.cross_decomposition import CCA
>>> X = [[0., 0., 1.], [1.,0.,0.], [2.,2.,2.], [3.,5.,4.]]
>>> Y = [[0.1, -0.2], [0.9, 1.1], [6.2, 5.9], [11.9, 12.3]]
>>> cca = CCA(n_components=1)
>>> cca.fit(X, Y)
... # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
CCA(copy=True, max_iter=500, n_components=1, scale=True, tol=1e-06)
>>> X_c, Y_c = cca.transform(X, Y)
References
----------
Jacob A. Wegelin. A survey of Partial Least Squares (PLS) methods, with
emphasis on the two-block case. Technical Report 371, Department of
Statistics, University of Washington, Seattle, 2000.
In french but still a reference:
Tenenhaus, M. (1998). La regression PLS: theorie et pratique. Paris:
Editions Technic.
See also
--------
PLSCanonical
PLSSVD
"""
def __init__(self, n_components=2, scale=True,
max_iter=500, tol=1e-06, copy=True):
super(CCA, self).__init__(n_components=n_components, scale=scale,
deflation_mode="canonical", mode="B",
norm_y_weights=True, algorithm="nipals",
max_iter=max_iter, tol=tol, copy=copy)
| bsd-3-clause |
AIML/scikit-learn | sklearn/svm/bounds.py | 280 | 2911 | """Determination of parameter bounds"""
# Author: Paolo Losi
# License: BSD 3 clause
from warnings import warn
import numpy as np
from ..preprocessing import LabelBinarizer
from ..utils.validation import check_consistent_length, check_array
from ..utils.extmath import safe_sparse_dot
def l1_min_c(X, y, loss='squared_hinge', fit_intercept=True,
intercept_scaling=1.0):
"""
Return the lowest bound for C such that for C in (l1_min_C, infinity)
the model is guaranteed not to be empty. This applies to l1 penalized
classifiers, such as LinearSVC with penalty='l1' and
linear_model.LogisticRegression with penalty='l1'.
This value is valid if class_weight parameter in fit() is not set.
Parameters
----------
X : array-like or sparse matrix, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target vector relative to X
loss : {'squared_hinge', 'log'}, default 'squared_hinge'
Specifies the loss function.
With 'squared_hinge' it is the squared hinge loss (a.k.a. L2 loss).
With 'log' it is the loss of logistic regression models.
'l2' is accepted as an alias for 'squared_hinge', for backward
compatibility reasons, but should not be used in new code.
fit_intercept : bool, default: True
Specifies if the intercept should be fitted by the model.
It must match the fit() method parameter.
intercept_scaling : float, default: 1
when fit_intercept is True, instance vector x becomes
[x, intercept_scaling],
i.e. a "synthetic" feature with constant value equals to
intercept_scaling is appended to the instance vector.
It must match the fit() method parameter.
Returns
-------
l1_min_c: float
minimum value for C
"""
if loss == "l2":
warn("loss='l2' will be impossible from 0.18 onwards."
" Use loss='squared_hinge' instead.",
DeprecationWarning)
loss = "squared_hinge"
if loss not in ('squared_hinge', 'log'):
raise ValueError('loss type not in ("squared_hinge", "log", "l2")')
X = check_array(X, accept_sparse='csc')
check_consistent_length(X, y)
Y = LabelBinarizer(neg_label=-1).fit_transform(y).T
# maximum absolute value over classes and features
den = np.max(np.abs(safe_sparse_dot(Y, X)))
if fit_intercept:
bias = intercept_scaling * np.ones((np.size(y), 1))
den = max(den, abs(np.dot(Y, bias)).max())
if den == 0.0:
raise ValueError('Ill-posed l1_min_c calculation: l1 will always '
'select zero coefficients for this data')
if loss == 'squared_hinge':
return 0.5 / den
else: # loss == 'log':
return 2.0 / den
| bsd-3-clause |
simone/django-gb | tests/view_tests/tests/test_csrf.py | 20 | 2641 | from django.test import TestCase, override_settings, Client
from django.utils.translation import override
class CsrfViewTests(TestCase):
urls = "view_tests.urls"
def setUp(self):
super(CsrfViewTests, self).setUp()
self.client = Client(enforce_csrf_checks=True)
@override_settings(
USE_I18N=True,
MIDDLEWARE_CLASSES=(
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
),
)
def test_translation(self):
"""
Test that an invalid request is rejected with a localized error message.
"""
response = self.client.post('/')
self.assertContains(response, "Forbidden", status_code=403)
self.assertContains(response,
"CSRF verification failed. Request aborted.",
status_code=403)
with self.settings(LANGUAGE_CODE='nl'), override('en-us'):
response = self.client.post('/')
self.assertContains(response, "Verboden", status_code=403)
self.assertContains(response,
"CSRF-verificatie mislukt. Verzoek afgebroken.",
status_code=403)
@override_settings(
SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTO', 'https')
)
def test_no_referer(self):
"""
Referer header is strictly checked for POST over HTTPS. Trigger the
exception by sending an incorrect referer.
"""
response = self.client.post('/', HTTP_X_FORWARDED_PROTO='https')
self.assertContains(response,
"You are seeing this message because this HTTPS "
"site requires a 'Referer header' to be "
"sent by your Web browser, but none was sent.",
status_code=403)
def test_no_cookies(self):
"""
The CSRF cookie is checked for POST. Failure to send this cookie should
provide a nice error message.
"""
response = self.client.post('/')
self.assertContains(response,
"You are seeing this message because this site "
"requires a CSRF cookie when submitting forms. "
"This cookie is required for security reasons, to "
"ensure that your browser is not being hijacked "
"by third parties.",
status_code=403)
| bsd-3-clause |
sarvex/tensorflow | tensorflow/python/ops/linalg/linear_operator_tridiag.py | 6 | 15035 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""`LinearOperator` acting like a tridiagonal matrix."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import manip_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg_impl as linalg
from tensorflow.python.ops.linalg import linear_operator
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.util.tf_export import tf_export
__all__ = ['LinearOperatorTridiag',]
_COMPACT = 'compact'
_MATRIX = 'matrix'
_SEQUENCE = 'sequence'
_DIAGONAL_FORMATS = frozenset({_COMPACT, _MATRIX, _SEQUENCE})
@tf_export('linalg.LinearOperatorTridiag')
@linear_operator.make_composite_tensor
class LinearOperatorTridiag(linear_operator.LinearOperator):
"""`LinearOperator` acting like a [batch] square tridiagonal matrix.
This operator acts like a [batch] square tridiagonal matrix `A` with shape
`[B1,...,Bb, N, N]` for some `b >= 0`. The first `b` indices index a
batch member. For every batch index `(i1,...,ib)`, `A[i1,...,ib, : :]` is
an `N x M` matrix. This matrix `A` is not materialized, but for
purposes of broadcasting this shape will be relevant.
Example usage:
Create a 3 x 3 tridiagonal linear operator.
>>> superdiag = [3., 4., 5.]
>>> diag = [1., -1., 2.]
>>> subdiag = [6., 7., 8]
>>> operator = tf.linalg.LinearOperatorTridiag(
... [superdiag, diag, subdiag],
... diagonals_format='sequence')
>>> operator.to_dense()
<tf.Tensor: shape=(3, 3), dtype=float32, numpy=
array([[ 1., 3., 0.],
[ 7., -1., 4.],
[ 0., 8., 2.]], dtype=float32)>
>>> operator.shape
TensorShape([3, 3])
Scalar Tensor output.
>>> operator.log_abs_determinant()
<tf.Tensor: shape=(), dtype=float32, numpy=4.3307333>
Create a [2, 3] batch of 4 x 4 linear operators.
>>> diagonals = tf.random.normal(shape=[2, 3, 3, 4])
>>> operator = tf.linalg.LinearOperatorTridiag(
... diagonals,
... diagonals_format='compact')
Create a shape [2, 1, 4, 2] vector. Note that this shape is compatible
since the batch dimensions, [2, 1], are broadcast to
operator.batch_shape = [2, 3].
>>> y = tf.random.normal(shape=[2, 1, 4, 2])
>>> x = operator.solve(y)
>>> x
<tf.Tensor: shape=(2, 3, 4, 2), dtype=float32, numpy=...,
dtype=float32)>
#### Shape compatibility
This operator acts on [batch] matrix with compatible shape.
`x` is a batch matrix with compatible shape for `matmul` and `solve` if
```
operator.shape = [B1,...,Bb] + [N, N], with b >= 0
x.shape = [C1,...,Cc] + [N, R],
and [C1,...,Cc] broadcasts with [B1,...,Bb].
```
#### Performance
Suppose `operator` is a `LinearOperatorTridiag` of shape `[N, N]`,
and `x.shape = [N, R]`. Then
* `operator.matmul(x)` will take O(N * R) time.
* `operator.solve(x)` will take O(N * R) time.
If instead `operator` and `x` have shape `[B1,...,Bb, N, N]` and
`[B1,...,Bb, N, R]`, every operation increases in complexity by `B1*...*Bb`.
#### Matrix property hints
This `LinearOperator` is initialized with boolean flags of the form `is_X`,
for `X = non_singular, self_adjoint, positive_definite, square`.
These have the following meaning:
* If `is_X == True`, callers should expect the operator to have the
property `X`. This is a promise that should be fulfilled, but is *not* a
runtime assert. For example, finite floating point precision may result
in these promises being violated.
* If `is_X == False`, callers should expect the operator to not have `X`.
* If `is_X == None` (the default), callers should have no expectation either
way.
"""
def __init__(self,
diagonals,
diagonals_format=_COMPACT,
is_non_singular=None,
is_self_adjoint=None,
is_positive_definite=None,
is_square=None,
name='LinearOperatorTridiag'):
r"""Initialize a `LinearOperatorTridiag`.
Args:
diagonals: `Tensor` or list of `Tensor`s depending on `diagonals_format`.
If `diagonals_format=sequence`, this is a list of three `Tensor`'s each
with shape `[B1, ..., Bb, N]`, `b >= 0, N >= 0`, representing the
superdiagonal, diagonal and subdiagonal in that order. Note the
superdiagonal is padded with an element in the last position, and the
subdiagonal is padded with an element in the front.
If `diagonals_format=matrix` this is a `[B1, ... Bb, N, N]` shaped
`Tensor` representing the full tridiagonal matrix.
If `diagonals_format=compact` this is a `[B1, ... Bb, 3, N]` shaped
`Tensor` with the second to last dimension indexing the
superdiagonal, diagonal and subdiagonal in that order. Note the
superdiagonal is padded with an element in the last position, and the
subdiagonal is padded with an element in the front.
In every case, these `Tensor`s are all floating dtype.
diagonals_format: one of `matrix`, `sequence`, or `compact`. Default is
`compact`.
is_non_singular: Expect that this operator is non-singular.
is_self_adjoint: Expect that this operator is equal to its hermitian
transpose. If `diag.dtype` is real, this is auto-set to `True`.
is_positive_definite: Expect that this operator is positive definite,
meaning the quadratic form `x^H A x` has positive real part for all
nonzero `x`. Note that we do not require the operator to be
self-adjoint to be positive-definite. See:
https://en.wikipedia.org/wiki/Positive-definite_matrix#Extension_for_non-symmetric_matrices
is_square: Expect that this operator acts like square [batch] matrices.
name: A name for this `LinearOperator`.
Raises:
TypeError: If `diag.dtype` is not an allowed type.
ValueError: If `diag.dtype` is real, and `is_self_adjoint` is not `True`.
"""
parameters = dict(
diagonals=diagonals,
diagonals_format=diagonals_format,
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
name=name
)
with ops.name_scope(name, values=[diagonals]):
if diagonals_format not in _DIAGONAL_FORMATS:
raise ValueError(
'Diagonals Format must be one of compact, matrix, sequence'
', got : {}'.format(diagonals_format))
if diagonals_format == _SEQUENCE:
self._diagonals = [linear_operator_util.convert_nonref_to_tensor(
d, name='diag_{}'.format(i)) for i, d in enumerate(diagonals)]
dtype = self._diagonals[0].dtype
else:
self._diagonals = linear_operator_util.convert_nonref_to_tensor(
diagonals, name='diagonals')
dtype = self._diagonals.dtype
self._diagonals_format = diagonals_format
super(LinearOperatorTridiag, self).__init__(
dtype=dtype,
is_non_singular=is_non_singular,
is_self_adjoint=is_self_adjoint,
is_positive_definite=is_positive_definite,
is_square=is_square,
parameters=parameters,
name=name)
def _shape(self):
if self.diagonals_format == _MATRIX:
return self.diagonals.shape
if self.diagonals_format == _COMPACT:
# Remove the second to last dimension that contains the value 3.
d_shape = self.diagonals.shape[:-2].concatenate(
self.diagonals.shape[-1])
else:
broadcast_shape = array_ops.broadcast_static_shape(
self.diagonals[0].shape[:-1],
self.diagonals[1].shape[:-1])
broadcast_shape = array_ops.broadcast_static_shape(
broadcast_shape,
self.diagonals[2].shape[:-1])
d_shape = broadcast_shape.concatenate(self.diagonals[1].shape[-1])
return d_shape.concatenate(d_shape[-1])
def _shape_tensor(self, diagonals=None):
diagonals = diagonals if diagonals is not None else self.diagonals
if self.diagonals_format == _MATRIX:
return array_ops.shape(diagonals)
if self.diagonals_format == _COMPACT:
d_shape = array_ops.shape(diagonals[..., 0, :])
else:
broadcast_shape = array_ops.broadcast_dynamic_shape(
array_ops.shape(self.diagonals[0])[:-1],
array_ops.shape(self.diagonals[1])[:-1])
broadcast_shape = array_ops.broadcast_dynamic_shape(
broadcast_shape,
array_ops.shape(self.diagonals[2])[:-1])
d_shape = array_ops.concat(
[broadcast_shape, [array_ops.shape(self.diagonals[1])[-1]]], axis=0)
return array_ops.concat([d_shape, [d_shape[-1]]], axis=-1)
def _assert_self_adjoint(self):
# Check the diagonal has non-zero imaginary, and the super and subdiagonals
# are conjugate.
asserts = []
diag_message = (
'This tridiagonal operator contained non-zero '
'imaginary values on the diagonal.')
off_diag_message = (
'This tridiagonal operator has non-conjugate '
'subdiagonal and superdiagonal.')
if self.diagonals_format == _MATRIX:
asserts += [check_ops.assert_equal(
self.diagonals, linalg.adjoint(self.diagonals),
message='Matrix was not equal to its adjoint.')]
elif self.diagonals_format == _COMPACT:
diagonals = ops.convert_to_tensor_v2_with_dispatch(self.diagonals)
asserts += [linear_operator_util.assert_zero_imag_part(
diagonals[..., 1, :], message=diag_message)]
# Roll the subdiagonal so the shifted argument is at the end.
subdiag = manip_ops.roll(diagonals[..., 2, :], shift=-1, axis=-1)
asserts += [check_ops.assert_equal(
math_ops.conj(subdiag[..., :-1]),
diagonals[..., 0, :-1],
message=off_diag_message)]
else:
asserts += [linear_operator_util.assert_zero_imag_part(
self.diagonals[1], message=diag_message)]
subdiag = manip_ops.roll(self.diagonals[2], shift=-1, axis=-1)
asserts += [check_ops.assert_equal(
math_ops.conj(subdiag[..., :-1]),
self.diagonals[0][..., :-1],
message=off_diag_message)]
return control_flow_ops.group(asserts)
def _construct_adjoint_diagonals(self, diagonals):
# Constructs adjoint tridiagonal matrix from diagonals.
if self.diagonals_format == _SEQUENCE:
diagonals = [math_ops.conj(d) for d in reversed(diagonals)]
# The subdiag and the superdiag swap places, so we need to shift the
# padding argument.
diagonals[0] = manip_ops.roll(diagonals[0], shift=-1, axis=-1)
diagonals[2] = manip_ops.roll(diagonals[2], shift=1, axis=-1)
return diagonals
elif self.diagonals_format == _MATRIX:
return linalg.adjoint(diagonals)
else:
diagonals = math_ops.conj(diagonals)
superdiag, diag, subdiag = array_ops.unstack(
diagonals, num=3, axis=-2)
# The subdiag and the superdiag swap places, so we need
# to shift all arguments.
new_superdiag = manip_ops.roll(subdiag, shift=-1, axis=-1)
new_subdiag = manip_ops.roll(superdiag, shift=1, axis=-1)
return array_ops.stack([new_superdiag, diag, new_subdiag], axis=-2)
def _matmul(self, x, adjoint=False, adjoint_arg=False):
diagonals = self.diagonals
if adjoint:
diagonals = self._construct_adjoint_diagonals(diagonals)
x = linalg.adjoint(x) if adjoint_arg else x
return linalg.tridiagonal_matmul(
diagonals, x,
diagonals_format=self.diagonals_format)
def _solve(self, rhs, adjoint=False, adjoint_arg=False):
diagonals = self.diagonals
if adjoint:
diagonals = self._construct_adjoint_diagonals(diagonals)
# TODO(b/144860784): Remove the broadcasting code below once
# tridiagonal_solve broadcasts.
rhs_shape = array_ops.shape(rhs)
k = self._shape_tensor(diagonals)[-1]
broadcast_shape = array_ops.broadcast_dynamic_shape(
self._shape_tensor(diagonals)[:-2], rhs_shape[:-2])
rhs = array_ops.broadcast_to(
rhs, array_ops.concat(
[broadcast_shape, rhs_shape[-2:]], axis=-1))
if self.diagonals_format == _MATRIX:
diagonals = array_ops.broadcast_to(
diagonals, array_ops.concat(
[broadcast_shape, [k, k]], axis=-1))
elif self.diagonals_format == _COMPACT:
diagonals = array_ops.broadcast_to(
diagonals, array_ops.concat(
[broadcast_shape, [3, k]], axis=-1))
else:
diagonals = [
array_ops.broadcast_to(d, array_ops.concat(
[broadcast_shape, [k]], axis=-1)) for d in diagonals]
y = linalg.tridiagonal_solve(
diagonals, rhs,
diagonals_format=self.diagonals_format,
transpose_rhs=adjoint_arg,
conjugate_rhs=adjoint_arg)
return y
def _diag_part(self):
if self.diagonals_format == _MATRIX:
return array_ops.matrix_diag_part(self.diagonals)
elif self.diagonals_format == _SEQUENCE:
diagonal = self.diagonals[1]
return array_ops.broadcast_to(
diagonal, self.shape_tensor()[:-1])
else:
return self.diagonals[..., 1, :]
def _to_dense(self):
if self.diagonals_format == _MATRIX:
return self.diagonals
if self.diagonals_format == _COMPACT:
return gen_array_ops.matrix_diag_v3(
self.diagonals,
k=(-1, 1),
num_rows=-1,
num_cols=-1,
align='LEFT_RIGHT',
padding_value=0.)
diagonals = [
ops.convert_to_tensor_v2_with_dispatch(d) for d in self.diagonals
]
diagonals = array_ops.stack(diagonals, axis=-2)
return gen_array_ops.matrix_diag_v3(
diagonals,
k=(-1, 1),
num_rows=-1,
num_cols=-1,
align='LEFT_RIGHT',
padding_value=0.)
@property
def diagonals(self):
return self._diagonals
@property
def diagonals_format(self):
return self._diagonals_format
@property
def _composite_tensor_fields(self):
return ('diagonals', 'diagonals_format')
| apache-2.0 |
woobe/h2o | py/testdir_single_jvm/test_players_NA_fvec.py | 1 | 1180 | import unittest, random, sys, time
sys.path.extend(['.','..','py'])
import h2o_hosts
import h2o, h2o_cmd, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud()
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_players_NA_fvec(self):
h2o.beta_features = True
csvFilename = 'Players.csv'
csvPathname = 'ncaa/' + csvFilename
# hack it to ignore header (NA?) because it has duplicate col names
parseResult = h2i.import_parse(bucket='home-0xdiag-datasets', path=csvPathname, schema='put',
timeoutSecs=15, header=0)
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
missingValuesList = h2o_cmd.infoFromInspect(inspect, csvPathname)
print missingValuesList
# self.assertEqual(missingValuesList, [], "Players.csv should have no NAs")
if __name__ == '__main__':
h2o.unit_main()
| apache-2.0 |
minixalpha/SourceLearning | webpy/src/web/net.py | 14 | 6188 | """
Network Utilities
(from web.py)
"""
__all__ = [
"validipaddr", "validip6addr", "validipport", "validip", "validaddr",
"urlquote",
"httpdate", "parsehttpdate",
"htmlquote", "htmlunquote", "websafe",
]
import urllib, time
try: import datetime
except ImportError: pass
import re
import socket
def validip6addr(address):
"""
Returns True if `address` is a valid IPv6 address.
>>> validip6addr('::')
True
>>> validip6addr('aaaa:bbbb:cccc:dddd::1')
True
>>> validip6addr('1:2:3:4:5:6:7:8:9:10')
False
>>> validip6addr('12:10')
False
"""
try:
socket.inet_pton(socket.AF_INET6, address)
except socket.error:
return False
return True
def validipaddr(address):
"""
Returns True if `address` is a valid IPv4 address.
>>> validipaddr('192.168.1.1')
True
>>> validipaddr('192.168.1.800')
False
>>> validipaddr('192.168.1')
False
"""
try:
octets = address.split('.')
if len(octets) != 4:
return False
for x in octets:
if not (0 <= int(x) <= 255):
return False
except ValueError:
return False
return True
def validipport(port):
"""
Returns True if `port` is a valid IPv4 port.
>>> validipport('9000')
True
>>> validipport('foo')
False
>>> validipport('1000000')
False
"""
try:
if not (0 <= int(port) <= 65535):
return False
except ValueError:
return False
return True
def validip(ip, defaultaddr="0.0.0.0", defaultport=8080):
"""
Returns `(ip_address, port)` from string `ip_addr_port`
>>> validip('1.2.3.4')
('1.2.3.4', 8080)
>>> validip('80')
('0.0.0.0', 80)
>>> validip('192.168.0.1:85')
('192.168.0.1', 85)
>>> validip('::')
('::', 8080)
>>> validip('[::]:88')
('::', 88)
>>> validip('[::1]:80')
('::1', 80)
"""
addr = defaultaddr
port = defaultport
#Matt Boswell's code to check for ipv6 first
match = re.search(r'^\[([^]]+)\](?::(\d+))?$',ip) #check for [ipv6]:port
if match:
if validip6addr(match.group(1)):
if match.group(2):
if validipport(match.group(2)): return (match.group(1),int(match.group(2)))
else:
return (match.group(1),port)
else:
if validip6addr(ip): return (ip,port)
#end ipv6 code
ip = ip.split(":", 1)
if len(ip) == 1:
if not ip[0]:
pass
elif validipaddr(ip[0]):
addr = ip[0]
elif validipport(ip[0]):
port = int(ip[0])
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
elif len(ip) == 2:
addr, port = ip
if not validipaddr(addr) and validipport(port):
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
port = int(port)
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
return (addr, port)
def validaddr(string_):
"""
Returns either (ip_address, port) or "/path/to/socket" from string_
>>> validaddr('/path/to/socket')
'/path/to/socket'
>>> validaddr('8000')
('0.0.0.0', 8000)
>>> validaddr('127.0.0.1')
('127.0.0.1', 8080)
>>> validaddr('127.0.0.1:8000')
('127.0.0.1', 8000)
>>> validip('[::1]:80')
('::1', 80)
>>> validaddr('fff')
Traceback (most recent call last):
...
ValueError: fff is not a valid IP address/port
"""
if '/' in string_:
return string_
else:
return validip(string_)
def urlquote(val):
"""
Quotes a string for use in a URL.
>>> urlquote('://?f=1&j=1')
'%3A//%3Ff%3D1%26j%3D1'
>>> urlquote(None)
''
>>> urlquote(u'\u203d')
'%E2%80%BD'
"""
if val is None: return ''
if not isinstance(val, unicode): val = str(val)
else: val = val.encode('utf-8')
return urllib.quote(val)
def httpdate(date_obj):
"""
Formats a datetime object for use in HTTP headers.
>>> import datetime
>>> httpdate(datetime.datetime(1970, 1, 1, 1, 1, 1))
'Thu, 01 Jan 1970 01:01:01 GMT'
"""
return date_obj.strftime("%a, %d %b %Y %H:%M:%S GMT")
def parsehttpdate(string_):
"""
Parses an HTTP date into a datetime object.
>>> parsehttpdate('Thu, 01 Jan 1970 01:01:01 GMT')
datetime.datetime(1970, 1, 1, 1, 1, 1)
"""
try:
t = time.strptime(string_, "%a, %d %b %Y %H:%M:%S %Z")
except ValueError:
return None
return datetime.datetime(*t[:6])
def htmlquote(text):
r"""
Encodes `text` for raw use in HTML.
>>> htmlquote(u"<'&\">")
u'<'&">'
"""
text = text.replace(u"&", u"&") # Must be done first!
text = text.replace(u"<", u"<")
text = text.replace(u">", u">")
text = text.replace(u"'", u"'")
text = text.replace(u'"', u""")
return text
def htmlunquote(text):
r"""
Decodes `text` that's HTML quoted.
>>> htmlunquote(u'<'&">')
u'<\'&">'
"""
text = text.replace(u""", u'"')
text = text.replace(u"'", u"'")
text = text.replace(u">", u">")
text = text.replace(u"<", u"<")
text = text.replace(u"&", u"&") # Must be done last!
return text
def websafe(val):
r"""Converts `val` so that it is safe for use in Unicode HTML.
>>> websafe("<'&\">")
u'<'&">'
>>> websafe(None)
u''
>>> websafe(u'\u203d')
u'\u203d'
>>> websafe('\xe2\x80\xbd')
u'\u203d'
"""
if val is None:
return u''
elif isinstance(val, str):
val = val.decode('utf-8')
elif not isinstance(val, unicode):
val = unicode(val)
return htmlquote(val)
if __name__ == "__main__":
import doctest
doctest.testmod()
| apache-2.0 |
gangadharkadam/saloon_erp_install | erpnext/patches/v5_4/notify_system_managers_regarding_wrong_tax_calculation.py | 17 | 1431 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.email import sendmail_to_system_managers
from frappe.utils import get_url_to_form
def execute():
wrong_records = []
for dt in ("Quotation", "Sales Order", "Delivery Note", "Sales Invoice",
"Purchase Order", "Purchase Receipt", "Purchase Invoice"):
records = frappe.db.sql_list("""select name from `tab{0}`
where apply_discount_on = 'Net Total' and ifnull(discount_amount, 0) != 0
and modified >= '2015-02-17' and docstatus=1""".format(dt))
if records:
records = [get_url_to_form(dt, d) for d in records]
wrong_records.append([dt, records])
if wrong_records:
content = """Dear System Manager,
Due to an error related to Discount Amount on Net Total, tax calculation might be wrong in the following records. We did not fix the tax amount automatically because it can corrupt the entries, so we request you to check these records and amend if you found the calculation wrong.
Please check following Entries:
%s
Regards,
Administrator""" % "\n".join([(d[0] + ": " + ", ".join(d[1])) for d in wrong_records])
try:
sendmail_to_system_managers("[Important] [ERPNext] Tax calculation might be wrong, please check.", content)
except:
pass
print "="*50
print content
print "="*50 | agpl-3.0 |
MKaptein/streamingbandit | app/app.py | 2 | 4233 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Imports of external stuff that is needed
import tornado.escape
import tornado.ioloop
import tornado.web
import logging
import os,yaml
import logging.handlers
# import scheduling ingredients
from apscheduler.schedulers.tornado import TornadoScheduler
from core.jobs import *
# import Streampy classes
from handlers import corehandlers
from handlers import adminhandlers
from handlers import statshandlers
from handlers import loginhandlers
from handlers import evalhandlers
from handlers import basehandler
from pymongo import MongoClient
from redis import Redis
import builtins
dir = os.path.dirname(__file__)
f = open(os.path.join(dir,'config.cfg'),'r')
env=os.getenv('env', 'default')
config = yaml.load(f, Loader=yaml.FullLoader)
settings = config[env]
f.close()
########## Logging ##########
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
access_log = logging.getLogger("tornado.access")
access_log.setLevel(settings["log.level"])
app_log = logging.getLogger("myLogger")
app_log.setLevel(settings["log.level"])
ch = logging.StreamHandler()
ch.setLevel(settings["log.console.level"])
ch.setFormatter(formatter)
logHandlerAccess = logging.handlers.RotatingFileHandler(settings["log.access"], maxBytes=4096, backupCount=2)
logHandlerApp = logging.handlers.RotatingFileHandler(settings["log.app"], maxBytes=4096, backupCount=2)
logHandlerAccess.setFormatter(formatter)
logHandlerApp.setFormatter(formatter)
access_log.addHandler(logHandlerAccess)
access_log.addHandler(ch)
app_log.addHandler(logHandlerApp)
app_log.addHandler(ch)
app_log.info("Starting application {0}".format( settings["listen.port"]))
########## Handlers ##########
urls = [
# Core API
(r"(?i)/getaction/(?P<exp_id>\w+)", corehandlers.ActionHandler),
(r"(?i)/setreward/(?P<exp_id>\w+)", corehandlers.RewardHandler),
# Adminstration API
(r"(?i)/exp", adminhandlers.GenerateExperiments),
(r"(?i)/exp/defaults", adminhandlers.ListDefaults),
(r"(?i)/exp/defaults/(?P<default_id>\w+)", adminhandlers.GetDefault),
(r"(?i)/exp/(?P<exp_id>\w+)", adminhandlers.UpdateExperiment),
(r"(?i)/exp/(?P<exp_id>\w+)/resetexperiment", adminhandlers.ResetExperiment),
(r"(?i)/user", adminhandlers.AddUser),
# Statistics API
(r"(?i)/stats/(?P<exp_id>\w+)/currenttheta", statshandlers.GetCurrentTheta),
(r"(?i)/stats/(?P<exp_id>\w+)/hourlytheta", statshandlers.GetHourlyTheta),
(r"(?i)/stats/(?P<exp_id>\w+)/log", statshandlers.GetLog),
(r"(?i)/stats/(?P<exp_id>\w+)/actionlog", statshandlers.GetActionLog),
(r"(?i)/stats/(?P<exp_id>\w+)/rewardlog", statshandlers.GetRewardLog),
(r"(?i)/stats/(?P<exp_id>\w+)/simulationlog", statshandlers.GetSimulationLog),
(r"(?i)/stats/(?P<exp_id>\w+)/summary", statshandlers.GetSummary),
# Login API
(r"(?i)/login", loginhandlers.LogInHandler),
(r"(?i)/logout", loginhandlers.LogOutHandler),
# Simulation API
(r"(?i)/eval/(?P<exp_id>\w+)/simulate", evalhandlers.Simulate),
# Index
(r"(?i)/", basehandler.IndexHandler)
]
# Instantiate DB clients
redis_server = Redis(settings['redis_ip'], settings['redis_port'], decode_responses = True)
mongo_client = MongoClient(settings['mongo_ip'], settings['mongo_port'])
tornado_config = dict({
"template_path": os.path.join(os.path.dirname(__file__),"templates"),
"debug": True, # Should get from config?
"cookie_secret":"12",
"default_handler_class":basehandler.BaseHandler,
"redis_server" : redis_server,
"mongo_client" : mongo_client
})
builtins.tornado_config = tornado_config
application = tornado.web.Application(urls,**tornado_config)
def main():
# Use the above instantiated scheduler
# Set Tornado Scheduler
scheduler = TornadoScheduler()
# Use the imported jobs, every 60 minutes
scheduler.add_job(log_theta, 'interval', minutes=60, misfire_grace_time=3600)
scheduler.add_job(advice_time_out, 'interval', minutes=60, misfire_grace_time=3600)
scheduler.start()
application.listen(settings["listen.port"])
tornado.ioloop.IOLoop.instance().start()
# Starting Server:
if __name__ == "__main__":
main()
| mit |
makiftasova/hangoutsbot | hangupsbot/plugins/monitoradds.py | 4 | 3384 | """
Plugin for monitoring new adds to HOs and alerting if users were not added by an admin or mod.
Add mods to the config.json file either globally or on an individual HO basis.
Add a "watch_new_adds": true parameter to individual HOs in the config.json file.
Author: @Riptides
"""
import logging
import hangups
import plugins
logger = logging.getLogger(__name__)
def _initialise(bot):
plugins.register_handler(_watch_new_adds, type="membership")
plugins.register_admin_command(["addmod", "delmod"])
def _watch_new_adds(bot, event, command):
# Check if watching for new adds is enabled
if not bot.get_config_suboption(event.conv_id, 'watch_new_adds'):
return
# Generate list of added or removed users
event_users = [event.conv.get_user(user_id) for user_id
in event.conv_event.participant_ids]
names = ', '.join([user.full_name for user in event_users])
# JOIN
if event.conv_event.type_ == hangups.MembershipChangeType.JOIN:
# Check if the user who added people is a mod or admin
admins_list = bot.get_config_suboption(event.conv_id, 'admins')
if event.user_id.chat_id in admins_list:
return
config_mods = bot.get_config_suboption(event.conv_id, 'mods') or []
tagged_mods = list(bot.tags.userlist(event.conv_id, "mod").keys())
tagged_botkeeper = list(bot.tags.userlist(event.conv_id, "botkeeper").keys())
mods_list = config_mods + tagged_mods + tagged_botkeeper
try:
if event.user_id.chat_id in mods_list:
return
except TypeError:
# The mods are likely not configured. Continuing...
pass
html = _("<b>!!! WARNING !!!</b><br />"
"<br />"
"<b>{0}</b> invited <b>{1}</b> without authorization.<br />"
"<br />"
"<b>{1}</b>: Please leave this hangout and ask a moderator to add you. "
"Thank you for your understanding.").format(event.user.full_name, names)
yield from bot.coro_send_message(event.conv, html)
def addmod(bot, event, *args):
"""add user id(s) to the whitelist of who can add to a hangout"""
mod_ids = list(args)
if(bot.get_config_suboption(event.conv_id, 'mods') != None):
for mod in bot.get_config_suboption(event.conv_id, 'mods'):
mod_ids.append(mod)
bot.config.set_by_path(["mods"], mod_ids)
bot.config.save()
html_message = _("<i>Moderators updated: {} added</i>")
yield from bot.coro_send_message(event.conv, html_message.format(args[0]))
else:
bot.config.set_by_path(["mods"], mod_ids)
bot.config.save()
html_message = _("<i>Moderators updated: {} added</i>")
yield from bot.coro_send_message(event.conv, html_message.format(args[0]))
def delmod(bot, event, *args):
"""remove user id(s) from the whitelist of who can add to a hangout"""
if not bot.get_config_option('mods'):
return
mods = bot.get_config_option('mods')
mods_new = []
for mod in mods:
if args[0] != mod:
mods_new.append(mod)
bot.config.set_by_path(["mods"], mods_new)
bot.config.save()
html_message = _("<i>Moderators updated: {} removed</i>")
yield from bot.coro_send_message(event.conv, html_message.format(args[0]))
| agpl-3.0 |
pulilab/rapidpro | temba/orgs/tests.py | 1 | 148553 | from __future__ import unicode_literals
import json
import nexmo
import pytz
import six
from bs4 import BeautifulSoup
from context_processors import GroupPermWrapper
from datetime import timedelta
from dateutil.relativedelta import relativedelta
from decimal import Decimal
from django.conf import settings
from django.contrib.auth.models import User, Group
from django.core import mail
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.http import HttpRequest
from django.test.utils import override_settings
from django.utils import timezone
from mock import patch, Mock
from smartmin.tests import SmartminTest
from temba.airtime.models import AirtimeTransfer
from temba.api.models import APIToken, Resthook
from temba.campaigns.models import Campaign, CampaignEvent
from temba.channels.models import Channel
from temba.contacts.models import Contact, ContactGroup, ContactURN, TEL_SCHEME, TWITTER_SCHEME
from temba.flows.models import Flow, ActionSet
from temba.locations.models import AdminBoundary
from temba.middleware import BrandingMiddleware
from temba.msgs.models import Label, Msg, INCOMING
from temba.orgs.models import UserSettings, NEXMO_SECRET, NEXMO_KEY
from temba.tests import TembaTest, MockResponse, MockTwilioClient, MockRequestValidator, FlowFileTest
from temba.triggers.models import Trigger
from temba.utils.email import link_components
from temba.utils import languages, dict_to_struct
from .models import Org, OrgEvent, TopUp, Invitation, Language, DAYFIRST, MONTHFIRST, CURRENT_EXPORT_VERSION
from .models import CreditAlert, ORG_CREDIT_OVER, ORG_CREDIT_LOW, ORG_CREDIT_EXPIRING
from .models import UNREAD_FLOW_MSGS, UNREAD_INBOX_MSGS, TopUpCredits
from .models import WHITELISTED, SUSPENDED, RESTORED
from .tasks import squash_topupcredits
class OrgContextProcessorTest(TembaTest):
def test_group_perms_wrapper(self):
administrators = Group.objects.get(name="Administrators")
editors = Group.objects.get(name="Editors")
viewers = Group.objects.get(name="Viewers")
administrators_wrapper = GroupPermWrapper(administrators)
self.assertTrue(administrators_wrapper['msgs']['msg_api'])
self.assertTrue(administrators_wrapper["msgs"]["msg_inbox"])
editors_wrapper = GroupPermWrapper(editors)
self.assertFalse(editors_wrapper["msgs"]["org_plan"])
self.assertTrue(editors_wrapper["msgs"]["msg_inbox"])
viewers_wrapper = GroupPermWrapper(viewers)
self.assertFalse(viewers_wrapper["msgs"]["msg_api"])
self.assertTrue(viewers_wrapper["msgs"]["msg_inbox"])
class OrgTest(TembaTest):
def test_get_org_users(self):
org_users = self.org.get_org_users()
self.assertTrue(self.user in org_users)
self.assertTrue(self.surveyor in org_users)
self.assertTrue(self.editor in org_users)
self.assertTrue(self.admin in org_users)
# should be ordered by email
self.assertEqual(self.admin, org_users[0])
self.assertEqual(self.editor, org_users[1])
self.assertEqual(self.surveyor, org_users[2])
self.assertEqual(self.user, org_users[3])
def test_get_unique_slug(self):
self.org.slug = 'allo'
self.org.save()
self.assertEqual(Org.get_unique_slug('foo'), 'foo')
self.assertEqual(Org.get_unique_slug('Which part?'), 'which-part')
self.assertEqual(Org.get_unique_slug('Allo'), 'allo-2')
def test_languages(self):
self.assertEqual(self.org.get_language_codes(), set())
self.org.set_languages(self.admin, ['eng', 'fre'], 'eng')
self.org.refresh_from_db()
self.assertEqual({l.name for l in self.org.languages.all()}, {"English", "French"})
self.assertEqual(self.org.primary_language.name, "English")
self.assertEqual(self.org.get_language_codes(), {'eng', 'fre'})
self.org.set_languages(self.admin, ['eng', 'kin'], 'kin')
self.org.refresh_from_db()
self.assertEqual({l.name for l in self.org.languages.all()}, {"English", "Kinyarwanda"})
self.assertEqual(self.org.primary_language.name, "Kinyarwanda")
self.assertEqual(self.org.get_language_codes(), {'eng', 'kin'})
def test_get_channel_countries(self):
self.assertEqual(self.org.get_channel_countries(), [])
self.org.connect_transferto('mylogin', 'api_token', self.admin)
self.assertEqual(self.org.get_channel_countries(), [dict(code='RW', name='Rwanda', currency_name='Rwanda Franc',
currency_code='RWF')])
Channel.create(self.org, self.user, 'US', 'A', None, "+12001112222", gcm_id="asdf", secret="asdf")
self.assertEqual(self.org.get_channel_countries(), [dict(code='RW', name='Rwanda', currency_name='Rwanda Franc',
currency_code='RWF'),
dict(code='US', name='United States',
currency_name='US Dollar', currency_code='USD')])
Channel.create(self.org, self.user, None, 'TT', name="Twitter Channel",
address="billy_bob", role="SR", scheme='twitter')
self.assertEqual(self.org.get_channel_countries(), [dict(code='RW', name='Rwanda', currency_name='Rwanda Franc',
currency_code='RWF'),
dict(code='US', name='United States',
currency_name='US Dollar', currency_code='USD')])
Channel.create(self.org, self.user, 'US', 'A', None, "+12001113333", gcm_id="qwer", secret="qwer")
self.assertEqual(self.org.get_channel_countries(), [dict(code='RW', name='Rwanda', currency_name='Rwanda Franc',
currency_code='RWF'),
dict(code='US', name='United States',
currency_name='US Dollar', currency_code='USD')])
def test_edit(self):
# use a manager now
self.login(self.admin)
# can we see the edit page
response = self.client.get(reverse('orgs.org_edit'))
self.assertEquals(200, response.status_code)
# update the name and slug of the organization
data = dict(name="Temba", timezone="Africa/Kigali", date_format=DAYFIRST, slug="nice temba")
response = self.client.post(reverse('orgs.org_edit'), data)
self.assertTrue('slug' in response.context['form'].errors)
data = dict(name="Temba", timezone="Africa/Kigali", date_format=MONTHFIRST, slug="nice-temba")
response = self.client.post(reverse('orgs.org_edit'), data)
self.assertEquals(302, response.status_code)
org = Org.objects.get(pk=self.org.pk)
self.assertEquals("Temba", org.name)
self.assertEquals("nice-temba", org.slug)
def test_recommended_channel(self):
self.org.timezone = pytz.timezone('Africa/Nairobi')
self.org.save()
self.assertEquals(self.org.get_recommended_channel(), 'africastalking')
self.org.timezone = pytz.timezone('America/Phoenix')
self.org.save()
self.assertEquals(self.org.get_recommended_channel(), 'twilio')
self.org.timezone = pytz.timezone('Asia/Jakarta')
self.org.save()
self.assertEquals(self.org.get_recommended_channel(), 'hub9')
self.org.timezone = pytz.timezone('Africa/Mogadishu')
self.org.save()
self.assertEquals(self.org.get_recommended_channel(), 'shaqodoon')
self.org.timezone = pytz.timezone('Europe/Amsterdam')
self.org.save()
self.assertEquals(self.org.get_recommended_channel(), 'nexmo')
self.org.timezone = pytz.timezone('Africa/Kigali')
self.org.save()
self.assertEquals(self.org.get_recommended_channel(), 'android')
def test_country(self):
country_url = reverse('orgs.org_country')
# can't see this page if not logged in
self.assertLoginRedirect(self.client.get(country_url))
# login as admin instead
self.login(self.admin)
response = self.client.get(country_url)
self.assertEquals(200, response.status_code)
# save with Rwanda as a country
response = self.client.post(country_url, dict(country=AdminBoundary.objects.get(name='Rwanda').pk))
# assert it has changed
org = Org.objects.get(pk=self.org.pk)
self.assertEqual("Rwanda", six.text_type(org.country))
self.assertEqual("RW", org.get_country_code())
# set our admin boundary name to something invalid
org.country.name = 'Fantasia'
org.country.save()
# getting our country code show now back down to our channel
self.assertEqual('RW', org.get_country_code())
# clear it out
self.client.post(country_url, dict(country=''))
# assert it has been
org = Org.objects.get(pk=self.org.pk)
self.assertFalse(org.country)
self.assertEquals('RW', org.get_country_code())
# remove all our channels so we no longer have a backdown
org.channels.all().delete()
org = Org.objects.get(pk=self.org.pk)
# now really don't have a clue of our country code
self.assertIsNone(org.get_country_code())
def test_plans(self):
self.contact = self.create_contact("Joe", "+250788123123")
self.create_msg(direction=INCOMING, contact=self.contact, text="Orange")
# check start and end date for this plan
self.assertEquals(timezone.now().date(), self.org.current_plan_start())
self.assertEquals(timezone.now().date() + relativedelta(months=1), self.org.current_plan_end())
# check our credits
self.login(self.admin)
response = self.client.get(reverse('orgs.org_home'))
self.assertContains(response, "999")
# view our topups
response = self.client.get(reverse('orgs.topup_list'))
# should say we have a 1,000 credits too
self.assertContains(response, "999")
# and that we have 999 credits left on our topup
self.assertContains(response, "1 of 1,000 Credits Used")
# our receipt should show that the topup was free
with patch('stripe.Charge.retrieve') as stripe:
stripe.return_value = ''
response = self.client.get(reverse('orgs.topup_read', args=[TopUp.objects.filter(org=self.org).first().pk]))
self.assertContains(response, '1000 Credits')
def test_user_update(self):
update_url = reverse('orgs.user_edit')
login_url = reverse('users.user_login')
# no access if anonymous
response = self.client.get(update_url)
self.assertRedirect(response, login_url)
self.login(self.admin)
# change the user language
post_data = dict(language='pt-br', first_name='Admin', last_name='User', email='administrator@temba.com', current_password='Administrator')
response = self.client.post(update_url, post_data)
self.assertRedirect(response, reverse('orgs.org_home'))
# check that our user settings have changed
settings = self.admin.get_settings()
self.assertEquals('pt-br', settings.language)
def test_usersettings(self):
self.login(self.admin)
post_data = dict(tel='+250788382382')
self.client.post(reverse('orgs.usersettings_phone'), post_data)
self.assertEquals('+250 788 382 382', UserSettings.objects.get(user=self.admin).get_tel_formatted())
post_data = dict(tel='bad number')
response = self.client.post(reverse('orgs.usersettings_phone'), post_data)
self.assertEquals(response.context['form'].errors['tel'][0], 'Invalid phone number, try again.')
def test_org_suspension(self):
from temba.flows.models import FlowRun
self.login(self.admin)
self.org.set_suspended()
self.org.refresh_from_db()
self.assertEqual(True, self.org.is_suspended())
self.assertEqual(0, Msg.objects.all().count())
self.assertEqual(0, FlowRun.objects.all().count())
# while we are suspended, we can't send broadcasts
send_url = reverse('msgs.broadcast_send')
mark = self.create_contact('Mark', number='+12065551212')
post_data = dict(text="send me ur bank account login im ur friend.", omnibox="c-%s" % mark.uuid)
response = self.client.post(send_url, post_data, follow=True)
self.assertEquals('Sorry, your account is currently suspended. To enable sending messages, please contact support.',
response.context['form'].errors['__all__'][0])
# we also can't start flows
flow = self.create_flow()
post_data = dict(omnibox="c-%s" % mark.uuid, restart_participants='on')
response = self.client.post(reverse('flows.flow_broadcast', args=[flow.pk]), post_data, follow=True)
self.assertEquals('Sorry, your account is currently suspended. To enable sending messages, please contact support.',
response.context['form'].errors['__all__'][0])
# or use the api to do either
def postAPI(url, data):
response = self.client.post(url + ".json", json.dumps(data), content_type="application/json", HTTP_X_FORWARDED_HTTPS='https')
if response.content:
response.json = response.json()
return response
url = reverse('api.v2.broadcasts')
response = postAPI(url, dict(contacts=[mark.uuid], text="You are a distant cousin to a wealthy person."))
self.assertContains(response, "Sorry, your account is currently suspended. To enable sending messages, please contact support.", status_code=400)
url = reverse('api.v2.flow_starts')
response = postAPI(url, dict(flow=flow.uuid, urns=["tel:+250788123123"]))
self.assertContains(response, "Sorry, your account is currently suspended. To enable sending messages, please contact support.", status_code=400)
# still no messages or runs
self.assertEqual(0, Msg.objects.all().count())
self.assertEqual(0, FlowRun.objects.all().count())
# unsuspend our org and start a flow
self.org.set_restored()
post_data = dict(omnibox="c-%s" % mark.uuid, restart_participants='on')
response = self.client.post(reverse('flows.flow_broadcast', args=[flow.pk]), post_data, follow=True)
self.assertEqual(1, FlowRun.objects.all().count())
def test_webhook_headers(self):
update_url = reverse('orgs.org_webhook')
login_url = reverse('users.user_login')
# no access if anonymous
response = self.client.get(update_url)
self.assertRedirect(response, login_url)
self.login(self.admin)
response = self.client.get(update_url)
self.assertEquals(200, response.status_code)
# set a webhook with headers
post_data = response.context['form'].initial
post_data['webhook'] = 'http://webhooks.uniceflabs.org'
post_data['header_1_key'] = 'Authorization'
post_data['header_1_value'] = 'Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
response = self.client.post(update_url, post_data)
self.assertEquals(302, response.status_code)
self.assertRedirect(response, reverse('orgs.org_home'))
# check that our webhook settings have changed
org = Org.objects.get(pk=self.org.pk)
self.assertEquals('http://webhooks.uniceflabs.org', org.get_webhook_url())
self.assertDictEqual({'Authorization': 'Authorization: Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='}, org.get_webhook_headers())
def test_org_administration(self):
manage_url = reverse('orgs.org_manage')
update_url = reverse('orgs.org_update', args=[self.org.pk])
login_url = reverse('users.user_login')
# no access to anon
response = self.client.get(manage_url)
self.assertRedirect(response, login_url)
response = self.client.get(update_url)
self.assertRedirect(response, login_url)
# or admins
self.login(self.admin)
response = self.client.get(manage_url)
self.assertRedirect(response, login_url)
response = self.client.get(update_url)
self.assertRedirect(response, login_url)
# only superuser
self.login(self.superuser)
response = self.client.get(manage_url)
self.assertEquals(200, response.status_code)
self.assertNotContains(response, "(Suspended)")
self.org.set_suspended()
response = self.client.get(manage_url)
self.assertContains(response, "(Suspended)")
# should contain our test org
self.assertContains(response, "Temba")
# and can go to that org
response = self.client.get(update_url)
self.assertEquals(200, response.status_code)
# change to the trial plan
post_data = {
'name': 'Temba',
'brand': 'rapidpro.io',
'plan': 'TRIAL',
'language': '',
'country': '',
'primary_language': '',
'timezone': pytz.timezone("Africa/Kigali"),
'config': '{}',
'date_format': 'D',
'webhook': None,
'webhook_events': 0,
'parent': '',
'viewers': [self.user.id],
'editors': [self.editor.id],
'administrators': [self.admin.id],
'surveyors': [self.surveyor.id],
'surveyor_password': None
}
response = self.client.post(update_url, post_data)
self.assertEquals(302, response.status_code)
# restore
post_data['status'] = RESTORED
response = self.client.post(update_url, post_data)
self.org.refresh_from_db()
self.assertFalse(self.org.is_suspended())
# white list
post_data['status'] = WHITELISTED
response = self.client.post(update_url, post_data)
self.org.refresh_from_db()
self.assertTrue(self.org.is_whitelisted())
# suspend
post_data['status'] = SUSPENDED
response = self.client.post(update_url, post_data)
self.org.refresh_from_db()
self.assertTrue(self.org.is_suspended())
def test_accounts(self):
url = reverse('orgs.org_accounts')
self.login(self.admin)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'If you use the RapidPro Surveyor application to run flows offline')
Org.objects.create(name="Another Org", timezone="Africa/Kigali", country=self.country,
brand='rapidpro.io', created_by=self.user, modified_by=self.user,
surveyor_password='nyaruka')
response = self.client.post(url, dict(surveyor_password='nyaruka'))
self.org.refresh_from_db()
self.assertContains(response, 'This password is not valid. Choose a new password and try again.')
self.assertIsNone(self.org.surveyor_password)
# now try again, but with a unique password
response = self.client.post(url, dict(surveyor_password='unique password'))
self.org.refresh_from_db()
self.assertEqual('unique password', self.org.surveyor_password)
# add an extra editor
editor = self.create_user('EditorTwo')
self.org.editors.add(editor)
self.surveyor.delete()
# fetch it as a formax so we can inspect the summary
response = self.client.get(url, HTTP_X_FORMAX=1, HTTP_X_PJAX=1)
self.assertContains(response, '1 Administrator')
self.assertContains(response, '2 Editors')
self.assertContains(response, '1 Viewer')
self.assertContains(response, '0 Surveyors')
def test_refresh_tokens(self):
self.login(self.admin)
url = reverse('orgs.org_home')
response = self.client.get(url)
# admin should have a token
token = APIToken.objects.get(user=self.admin)
# and it should be on the page
self.assertContains(response, token.key)
# let's refresh it
self.client.post(reverse('api.apitoken_refresh'))
# visit our account page again
response = self.client.get(url)
# old token no longer there
self.assertNotContains(response, token.key)
# old token now inactive
token.refresh_from_db()
self.assertFalse(token.is_active)
# there is a new token for this user
new_token = APIToken.objects.get(user=self.admin, is_active=True)
self.assertNotEqual(new_token.key, token.key)
self.assertContains(response, new_token.key)
# can't refresh if logged in as viewer
self.login(self.user)
response = self.client.post(reverse('api.apitoken_refresh'))
self.assertLoginRedirect(response)
# or just not an org user
self.login(self.non_org_user)
response = self.client.post(reverse('api.apitoken_refresh'))
self.assertLoginRedirect(response)
@override_settings(SEND_EMAILS=True)
def test_manage_accounts(self):
url = reverse('orgs.org_manage_accounts')
self.login(self.admin)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
# give users an API token and give admin and editor an additional surveyor-role token
APIToken.get_or_create(self.org, self.admin)
APIToken.get_or_create(self.org, self.editor)
APIToken.get_or_create(self.org, self.surveyor)
APIToken.get_or_create(self.org, self.admin, role=Group.objects.get(name="Surveyors"))
APIToken.get_or_create(self.org, self.editor, role=Group.objects.get(name="Surveyors"))
# we have 19 fields in the form including 16 checkboxes for the four users, an email field, a user group field
# and 'loc' field.
expected_fields = {'invite_emails', 'invite_group', 'loc'}
for user in (self.surveyor, self.user, self.editor, self.admin):
for group in ('administrators', 'editors', 'viewers', 'surveyors'):
expected_fields.add(group + '_%d' % user.pk)
self.assertEqual(set(response.context['form'].fields.keys()), expected_fields)
self.assertEqual(response.context['form'].initial, {
'administrators_%d' % self.admin.pk: True,
'editors_%d' % self.editor.pk: True,
'viewers_%d' % self.user.pk: True,
'surveyors_%d' % self.surveyor.pk: True
})
self.assertEqual(response.context['form'].fields['invite_emails'].initial, None)
self.assertEqual(response.context['form'].fields['invite_group'].initial, 'V')
# keep admin as admin, editor as editor, but make user an editor too, and remove surveyor
post_data = {
'administrators_%d' % self.admin.pk: 'on',
'editors_%d' % self.editor.pk: 'on',
'editors_%d' % self.user.pk: 'on',
'invite_emails': "",
'invite_group': "V"
}
response = self.client.post(url, post_data)
self.assertRedirect(response, reverse('orgs.org_manage_accounts'))
self.org.refresh_from_db()
self.assertEqual(set(self.org.administrators.all()), {self.admin})
self.assertEqual(set(self.org.editors.all()), {self.user, self.editor})
self.assertFalse(set(self.org.viewers.all()), set())
self.assertEqual(set(self.org.surveyors.all()), set())
# our surveyor's API token will have been deleted
self.assertEqual(self.admin.api_tokens.filter(is_active=True).count(), 2)
self.assertEqual(self.editor.api_tokens.filter(is_active=True).count(), 2)
self.assertEqual(self.surveyor.api_tokens.filter(is_active=True).count(), 0)
# next we leave existing roles unchanged, but try to invite new user to be admin with invalid email address
post_data['invite_emails'] = "norkans7gmail.com"
post_data['invite_group'] = 'A'
response = self.client.post(url, post_data)
self.assertFormError(response, 'form', 'invite_emails', "One of the emails you entered is invalid.")
# try again with valid email
post_data['invite_emails'] = "norkans7@gmail.com"
response = self.client.post(url, post_data)
self.assertRedirect(response, reverse('orgs.org_manage_accounts'))
# an invitation is created
invitation = Invitation.objects.get()
self.assertEqual(invitation.org, self.org)
self.assertEqual(invitation.email, "norkans7@gmail.com")
self.assertEqual(invitation.user_group, "A")
# and sent by email
self.assertTrue(len(mail.outbox) == 1)
# pretend our invite was acted on
invitation.is_active = False
invitation.save()
# send another invitation, different group
post_data['invite_emails'] = "norkans7@gmail.com"
post_data['invite_group'] = 'E'
self.client.post(url, post_data)
# old invite should be updated
invitation.refresh_from_db()
self.assertEqual(invitation.user_group, 'E')
self.assertTrue(invitation.is_active)
# and new email sent
self.assertEqual(len(mail.outbox), 2)
# include multiple emails on the form
post_data['invite_emails'] = "norbert@temba.com,code@temba.com"
post_data['invite_group'] = 'A'
self.client.post(url, post_data)
# now 2 new invitations are created and sent
self.assertEqual(Invitation.objects.all().count(), 3)
self.assertEqual(len(mail.outbox), 4)
response = self.client.get(url)
# user ordered by email
self.assertEqual(list(response.context['org_users']), [self.admin, self.editor, self.user])
# invites ordered by email as well
self.assertEqual(response.context['invites'][0].email, 'code@temba.com')
self.assertEqual(response.context['invites'][1].email, 'norbert@temba.com')
self.assertEqual(response.context['invites'][2].email, 'norkans7@gmail.com')
# finally downgrade the editor to a surveyor and remove ourselves entirely from this org
response = self.client.post(url, {
'editors_%d' % self.user.pk: 'on',
'surveyors_%d' % self.editor.pk: 'on',
'invite_emails': "",
'invite_group': 'V'
})
# we should be redirected to chooser page
self.assertRedirect(response, reverse('orgs.org_choose'))
# and removed from this org
self.org.refresh_from_db()
self.assertEqual(set(self.org.administrators.all()), set())
self.assertEqual(set(self.org.editors.all()), {self.user})
self.assertEqual(set(self.org.viewers.all()), set())
self.assertEqual(set(self.org.surveyors.all()), {self.editor})
# editor will have lost their editor API token, but not their surveyor token
self.editor.refresh_from_db()
self.assertEqual([t.role.name for t in self.editor.api_tokens.filter(is_active=True)], ["Surveyors"])
# and all our API tokens for the admin are deleted
self.admin.refresh_from_db()
self.assertEqual(self.admin.api_tokens.filter(is_active=True).count(), 0)
@patch('temba.utils.email.send_temba_email')
def test_join(self, mock_send_temba_email):
def create_invite(group):
return Invitation.objects.create(org=self.org,
user_group=group,
email="norkans7@gmail.com",
created_by=self.admin,
modified_by=self.admin)
editor_invitation = create_invite('E')
editor_invitation.send_invitation()
email_args = mock_send_temba_email.call_args[0] # all positional args
self.assertEqual(email_args[0], "RapidPro Invitation")
self.assertIn('https://app.rapidpro.io/org/join/%s/' % editor_invitation.secret, email_args[1])
self.assertNotIn('{{', email_args[1])
self.assertIn('https://app.rapidpro.io/org/join/%s/' % editor_invitation.secret, email_args[2])
self.assertNotIn('{{', email_args[2])
editor_join_url = reverse('orgs.org_join', args=[editor_invitation.secret])
self.client.logout()
# if no user is logged we redirect to the create_login page
response = self.client.get(editor_join_url)
self.assertEqual(302, response.status_code)
response = self.client.get(editor_join_url, follow=True)
self.assertEqual(response.request['PATH_INFO'], reverse('orgs.org_create_login', args=[editor_invitation.secret]))
# a user is already logged in
self.invited_editor = self.create_user("InvitedEditor")
self.login(self.invited_editor)
response = self.client.get(editor_join_url)
self.assertEqual(200, response.status_code)
self.assertEqual(self.org.pk, response.context['org'].pk)
# we have a form without field except one 'loc'
self.assertEqual(1, len(response.context['form'].fields))
post_data = dict()
response = self.client.post(editor_join_url, post_data, follow=True)
self.assertEqual(200, response.status_code)
self.assertIn(self.invited_editor, self.org.editors.all())
self.assertFalse(Invitation.objects.get(pk=editor_invitation.pk).is_active)
roles = (('V', self.org.viewers), ('S', self.org.surveyors),
('A', self.org.administrators), ('E', self.org.editors))
# test it for each role
for role in roles:
invite = create_invite(role[0])
user = self.create_user('User%s' % role[0])
self.login(user)
response = self.client.post(reverse('orgs.org_join', args=[invite.secret]), follow=True)
self.assertEqual(200, response.status_code)
self.assertIsNotNone(role[1].filter(pk=user.pk).first())
# try an expired invite
invite = create_invite('S')
invite.is_active = False
invite.save()
expired_user = self.create_user("InvitedExpired")
self.login(expired_user)
response = self.client.post(reverse('orgs.org_join', args=[invite.secret]), follow=True)
self.assertEqual(200, response.status_code)
self.assertIsNone(self.org.surveyors.filter(pk=expired_user.pk).first())
def test_create_login(self):
admin_invitation = Invitation.objects.create(org=self.org,
user_group="A",
email="norkans7@gmail.com",
created_by=self.admin,
modified_by=self.admin)
admin_create_login_url = reverse('orgs.org_create_login', args=[admin_invitation.secret])
self.client.logout()
response = self.client.get(admin_create_login_url)
self.assertEquals(200, response.status_code)
self.assertEquals(self.org.pk, response.context['org'].pk)
# we have a form with 4 fields and one hidden 'loc'
self.assertEquals(5, len(response.context['form'].fields))
self.assertTrue('first_name' in response.context['form'].fields)
self.assertTrue('last_name' in response.context['form'].fields)
self.assertTrue('email' in response.context['form'].fields)
self.assertTrue('password' in response.context['form'].fields)
post_data = dict()
post_data['first_name'] = "Norbert"
post_data['last_name'] = "Kwizera"
post_data['email'] = "norkans7@gmail.com"
post_data['password'] = "norbertkwizeranorbert"
response = self.client.post(admin_create_login_url, post_data, follow=True)
self.assertEquals(200, response.status_code)
new_invited_user = User.objects.get(email="norkans7@gmail.com")
self.assertTrue(new_invited_user in self.org.administrators.all())
self.assertFalse(Invitation.objects.get(pk=admin_invitation.pk).is_active)
def test_surveyor_invite(self):
surveyor_invite = Invitation.objects.create(org=self.org,
user_group="S",
email="surveyor@gmail.com",
created_by=self.admin,
modified_by=self.admin)
admin_create_login_url = reverse('orgs.org_create_login', args=[surveyor_invite.secret])
self.client.logout()
post_data = dict(first_name='Surveyor', last_name='User', email='surveyor@gmail.com', password='password')
response = self.client.post(admin_create_login_url, post_data, follow=True)
self.assertEquals(200, response.status_code)
# as a surveyor we should have been rerourted
self.assertEquals(reverse('orgs.org_surveyor'), response._request.path)
self.assertFalse(Invitation.objects.get(pk=surveyor_invite.pk).is_active)
# make sure we are a surveyor
new_invited_user = User.objects.get(email="surveyor@gmail.com")
self.assertTrue(new_invited_user in self.org.surveyors.all())
# if we login, we should be rerouted too
self.client.logout()
response = self.client.post('/users/login/', {'username': 'surveyor@gmail.com', 'password': 'password'}, follow=True)
self.assertEquals(200, response.status_code)
self.assertEquals(reverse('orgs.org_surveyor'), response._request.path)
def test_surveyor(self):
self.client.logout()
url = '%s?mobile=true' % reverse('orgs.org_surveyor')
# try creating a surveyor account with a bogus password
post_data = dict(surveyor_password='badpassword')
response = self.client.post(url, post_data)
self.assertContains(response, 'Invalid surveyor password, please check with your project leader and try again.')
# save a surveyor password
self.org.surveyor_password = 'nyaruka'
self.org.save()
# now lets try again
post_data = dict(surveyor_password='nyaruka')
response = self.client.post(url, post_data)
self.assertContains(response, 'Enter your details below to create your account.')
# now try creating an account on the second step without and surveyor_password
post_data = dict(first_name='Marshawn', last_name='Lynch',
password='beastmode24', email='beastmode@seahawks.com')
response = self.client.post(url, post_data)
self.assertContains(response, 'Enter your details below to create your account.')
# now do the same but with a valid surveyor_password
post_data = dict(first_name='Marshawn', last_name='Lynch',
password='beastmode24', email='beastmode@seahawks.com',
surveyor_password='nyaruka')
response = self.client.post(url, post_data)
self.assertTrue('token' in response.url)
self.assertTrue('beastmode' in response.url)
self.assertTrue('Temba' in response.url)
# try with a login that already exists
post_data = dict(first_name='Resused', last_name='Email',
password='mypassword1', email='beastmode@seahawks.com',
surveyor_password='nyaruka')
response = self.client.post(url, post_data)
self.assertContains(response, 'That email address is already used')
# try with a login that already exists
post_data = dict(first_name='Short', last_name='Password',
password='short', email='thomasrawls@seahawks.com',
surveyor_password='nyaruka')
response = self.client.post(url, post_data)
self.assertContains(response, 'Passwords must contain at least 8 letters')
# finally make sure our login works
success = self.client.login(username='beastmode@seahawks.com', password='beastmode24')
self.assertTrue(success)
# and that we only have the surveyor role
self.assertIsNotNone(self.org.surveyors.filter(username='beastmode@seahawks.com').first())
self.assertIsNone(self.org.administrators.filter(username='beastmode@seahawks.com').first())
self.assertIsNone(self.org.editors.filter(username='beastmode@seahawks.com').first())
self.assertIsNone(self.org.viewers.filter(username='beastmode@seahawks.com').first())
def test_choose(self):
self.client.logout()
choose_url = reverse('orgs.org_choose')
# have a second org
self.create_secondary_org()
self.login(self.admin)
response = self.client.get(reverse('orgs.org_home'))
self.assertEquals(response.context['org'], self.org)
# add self.manager to self.org2 viewers
self.org2.viewers.add(self.admin)
response = self.client.get(choose_url)
self.assertEquals(200, response.status_code)
self.assertTrue('organization' in response.context['form'].fields)
post_data = dict()
post_data['organization'] = self.org2.pk
response = self.client.post(choose_url, post_data, follow=True)
self.assertEquals(200, response.status_code)
response = self.client.get(reverse('orgs.org_home'))
self.assertEquals(response.context_data['org'], self.org2)
# a non org user get's logged out
self.login(self.non_org_user)
response = self.client.get(choose_url)
self.assertRedirect(response, reverse('users.user_login'))
# superuser gets redirected to user management page
self.login(self.superuser)
response = self.client.get(choose_url, follow=True)
self.assertContains(response, "Organizations")
def test_topup_admin(self):
self.login(self.admin)
topup = TopUp.objects.get()
# admins shouldn't be able to see the create / manage / update pages
manage_url = reverse('orgs.topup_manage') + "?org=%d" % self.org.id
self.assertRedirect(self.client.get(manage_url), '/users/login/')
create_url = reverse('orgs.topup_create') + "?org=%d" % self.org.id
self.assertRedirect(self.client.get(create_url), '/users/login/')
update_url = reverse('orgs.topup_update', args=[topup.pk])
self.assertRedirect(self.client.get(update_url), '/users/login/')
# log in as root
self.login(self.superuser)
# should list our one topup
response = self.client.get(manage_url)
self.assertEquals(1, len(response.context['object_list']))
# create a new one
post_data = dict(price='1000', credits='500', comment="")
response = self.client.post(create_url, post_data)
self.assertEquals(2, TopUp.objects.filter(org=self.org).count())
self.assertEquals(1500, self.org.get_credits_remaining())
# update one of our topups
post_data = dict(is_active=True, price='0', credits='5000', comment="", expires_on="2025-04-03 13:47:46")
response = self.client.post(update_url, post_data)
self.assertEquals(5500, self.org.get_credits_remaining())
def test_topup_model(self):
topup = TopUp.create(self.admin, price=None, credits=1000)
self.assertEqual(topup.get_price_display(), "")
topup.price = 0
topup.save()
self.assertEqual(topup.get_price_display(), "Free")
topup.price = 100
topup.save()
self.assertEqual(topup.get_price_display(), "$1.00")
def test_topups(self):
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(multi_user=100000, multi_org=1000000)
contact = self.create_contact("Michael Shumaucker", "+250788123123")
test_contact = Contact.get_test_contact(self.user)
welcome_topup = TopUp.objects.get()
def create_msgs(recipient, count):
for m in range(count):
self.create_msg(contact=recipient, direction='I', text="Test %d" % m)
create_msgs(contact, 10)
with self.assertNumQueries(1):
self.assertEquals(150, self.org.get_low_credits_threshold())
with self.assertNumQueries(0):
self.assertEquals(150, self.org.get_low_credits_threshold())
# we should have 1000 minus 10 credits for this org
with self.assertNumQueries(4):
self.assertEquals(990, self.org.get_credits_remaining()) # from db
with self.assertNumQueries(0):
self.assertEquals(1000, self.org.get_credits_total()) # from cache
self.assertEquals(10, self.org.get_credits_used())
self.assertEquals(990, self.org.get_credits_remaining())
self.assertEquals(10, welcome_topup.msgs.count())
self.assertEquals(10, TopUp.objects.get(pk=welcome_topup.pk).get_used())
# at this point we shouldn't have squashed any topupcredits, so should have the same number as our used
self.assertEqual(10, TopUpCredits.objects.all().count())
# now squash
squash_topupcredits()
# should only have one remaining
self.assertEqual(1, TopUpCredits.objects.all().count())
# reduce our credits on our topup to 15
TopUp.objects.filter(pk=welcome_topup.pk).update(credits=15)
self.org.update_caches(OrgEvent.topup_updated, None) # invalidates our credits remaining cache
self.assertEquals(15, self.org.get_credits_total())
self.assertEquals(5, self.org.get_credits_remaining())
# create 10 more messages, only 5 of which will get a topup
create_msgs(contact, 10)
self.assertEquals(15, TopUp.objects.get(pk=welcome_topup.pk).msgs.count())
self.assertEquals(15, TopUp.objects.get(pk=welcome_topup.pk).get_used())
self.assertFalse(self.org._calculate_active_topup())
with self.assertNumQueries(0):
self.assertEquals(15, self.org.get_credits_total())
self.assertEquals(20, self.org.get_credits_used())
self.assertEquals(-5, self.org.get_credits_remaining())
# again create 10 more messages, none of which will get a topup
create_msgs(contact, 10)
with self.assertNumQueries(0):
self.assertEquals(15, self.org.get_credits_total())
self.assertEquals(30, self.org.get_credits_used())
self.assertEquals(-15, self.org.get_credits_remaining())
self.assertEquals(15, TopUp.objects.get(pk=welcome_topup.pk).get_used())
# raise our topup to take 20 and create another for 5
TopUp.objects.filter(pk=welcome_topup.pk).update(credits=20)
new_topup = TopUp.create(self.admin, price=0, credits=5)
self.org.update_caches(OrgEvent.topup_updated, None)
# apply topups which will max out both and reduce debt to 5
self.org.apply_topups()
self.assertEquals(20, welcome_topup.msgs.count())
self.assertEquals(20, TopUp.objects.get(pk=welcome_topup.pk).get_used())
self.assertEquals(5, new_topup.msgs.count())
self.assertEquals(5, TopUp.objects.get(pk=new_topup.pk).get_used())
self.assertEquals(25, self.org.get_credits_total())
self.assertEquals(30, self.org.get_credits_used())
self.assertEquals(-5, self.org.get_credits_remaining())
# create a message from our test contact, should not count against our totals
test_msg = self.create_msg(contact=test_contact, direction='I', text="Test")
self.assertIsNone(test_msg.topup_id)
self.assertEquals(30, self.org.get_credits_used())
# test special status
self.assertFalse(self.org.is_multi_user_tier())
self.assertFalse(self.org.is_multi_org_tier())
# add new topup with lots of credits
mega_topup = TopUp.create(self.admin, price=0, credits=100000)
self.org.update_caches(OrgEvent.topup_updated, None)
# after applying this, no non-test messages should be without a topup
self.org.apply_topups()
self.assertFalse(Msg.objects.filter(org=self.org, contact__is_test=False, topup=None))
self.assertFalse(Msg.objects.filter(org=self.org, contact__is_test=True).exclude(topup=None))
self.assertEquals(5, TopUp.objects.get(pk=mega_topup.pk).get_used())
# we aren't yet multi user since this topup was free
self.assertEquals(0, self.org.get_purchased_credits())
self.assertFalse(self.org.is_multi_user_tier())
self.assertEquals(100025, self.org.get_credits_total())
self.assertEquals(30, self.org.get_credits_used())
self.assertEquals(99995, self.org.get_credits_remaining())
# and new messages use the mega topup
msg = self.create_msg(contact=contact, direction='I', text="Test")
self.assertEquals(msg.topup, mega_topup)
self.assertEquals(6, TopUp.objects.get(pk=mega_topup.pk).get_used())
# but now it expires
yesterday = timezone.now() - relativedelta(days=1)
mega_topup.expires_on = yesterday
mega_topup.save(update_fields=['expires_on'])
self.org.update_caches(OrgEvent.topup_updated, None)
# new incoming messages should not be assigned a topup
msg = self.create_msg(contact=contact, direction='I', text="Test")
self.assertIsNone(msg.topup)
# check our totals
self.org.update_caches(OrgEvent.topup_updated, None)
with self.assertNumQueries(3):
self.assertEquals(0, self.org.get_purchased_credits())
self.assertEquals(31, self.org.get_credits_total())
self.assertEquals(32, self.org.get_credits_used())
self.assertEquals(-1, self.org.get_credits_remaining())
# all top up expired
TopUp.objects.all().update(expires_on=yesterday)
# we have expiring credits, and no more active
gift_topup = TopUp.create(self.admin, price=0, credits=100)
next_week = timezone.now() + relativedelta(days=7)
gift_topup.expires_on = next_week
gift_topup.save(update_fields=['expires_on'])
self.org.update_caches(OrgEvent.topup_updated, None)
self.org.apply_topups()
with self.assertNumQueries(3):
self.assertEquals(99, self.org.get_credits_expiring_soon())
with self.assertNumQueries(1):
self.assertEquals(15, self.org.get_low_credits_threshold())
with self.assertNumQueries(0):
self.assertEquals(99, self.org.get_credits_expiring_soon())
self.assertEquals(15, self.org.get_low_credits_threshold())
# some cedits expires but more credits will remain active
later_active_topup = TopUp.create(self.admin, price=0, credits=200)
five_week_ahead = timezone.now() + relativedelta(days=35)
later_active_topup.expires_on = five_week_ahead
later_active_topup.save(update_fields=['expires_on'])
self.org.update_caches(OrgEvent.topup_updated, None)
self.org.apply_topups()
with self.assertNumQueries(3):
self.assertEquals(0, self.org.get_credits_expiring_soon())
with self.assertNumQueries(1):
self.assertEquals(45, self.org.get_low_credits_threshold())
with self.assertNumQueries(0):
self.assertEquals(0, self.org.get_credits_expiring_soon())
self.assertEquals(45, self.org.get_low_credits_threshold())
# no expiring credits
gift_topup.expires_on = five_week_ahead
gift_topup.save(update_fields=['expires_on'])
self.org.update_caches(OrgEvent.topup_updated, None)
self.org.apply_topups()
with self.assertNumQueries(3):
self.assertEquals(0, self.org.get_credits_expiring_soon())
with self.assertNumQueries(1):
self.assertEquals(45, self.org.get_low_credits_threshold())
with self.assertNumQueries(0):
self.assertEquals(0, self.org.get_credits_expiring_soon())
self.assertEquals(45, self.org.get_low_credits_threshold())
# do not consider expired topup
gift_topup.expires_on = yesterday
gift_topup.save(update_fields=['expires_on'])
self.org.update_caches(OrgEvent.topup_updated, None)
self.org.apply_topups()
with self.assertNumQueries(3):
self.assertEquals(0, self.org.get_credits_expiring_soon())
with self.assertNumQueries(1):
self.assertEquals(30, self.org.get_low_credits_threshold())
with self.assertNumQueries(0):
self.assertEquals(0, self.org.get_credits_expiring_soon())
self.assertEquals(30, self.org.get_low_credits_threshold())
TopUp.objects.all().update(is_active=False)
self.org.update_caches(OrgEvent.topup_updated, None)
self.org.apply_topups()
with self.assertNumQueries(1):
self.assertEquals(0, self.org.get_low_credits_threshold())
with self.assertNumQueries(0):
self.assertEquals(0, self.org.get_low_credits_threshold())
# now buy some credits to make us multi user
TopUp.create(self.admin, price=100, credits=100000)
self.org.update_caches(OrgEvent.topup_updated, None)
self.assertTrue(self.org.is_multi_user_tier())
self.assertFalse(self.org.is_multi_org_tier())
# good deal!
TopUp.create(self.admin, price=100, credits=1000000)
self.org.update_caches(OrgEvent.topup_updated, None)
self.assertTrue(self.org.is_multi_user_tier())
self.assertTrue(self.org.is_multi_org_tier())
@patch('temba.orgs.views.TwilioRestClient', MockTwilioClient)
@patch('twilio.util.RequestValidator', MockRequestValidator)
def test_twilio_connect(self):
with patch('temba.tests.MockTwilioClient.MockAccounts.get') as mock_get:
mock_get.return_value = MockTwilioClient.MockAccount('Full')
connect_url = reverse("orgs.org_twilio_connect")
self.login(self.admin)
self.admin.set_org(self.org)
response = self.client.get(connect_url)
self.assertEquals(200, response.status_code)
self.assertEqual(list(response.context['form'].fields.keys()), ['account_sid', 'account_token', 'loc'])
# try posting without an account token
post_data = {'account_sid': "AccountSid"}
response = self.client.post(connect_url, post_data)
self.assertFormError(response, 'form', 'account_token', 'This field is required.')
# now add the account token and try again
post_data['account_token'] = "AccountToken"
# but with an unexpected exception
with patch('temba.tests.MockTwilioClient.__init__') as mock:
mock.side_effect = Exception('Unexpected')
response = self.client.post(connect_url, post_data)
self.assertFormError(response, 'form', '__all__', 'The Twilio account SID and Token seem invalid. '
'Please check them again and retry.')
self.client.post(connect_url, post_data)
self.org.refresh_from_db()
self.assertEquals(self.org.config_json()['ACCOUNT_SID'], "AccountSid")
self.assertEquals(self.org.config_json()['ACCOUNT_TOKEN'], "AccountToken")
# when the user submit the secondary token, we use it to get the primary one from the rest API
with patch('temba.tests.MockTwilioClient.MockAccounts.get') as mock_get_primary:
mock_get_primary.return_value = MockTwilioClient.MockAccount('Full', 'PrimaryAccountToken')
self.client.post(connect_url, post_data)
self.org.refresh_from_db()
self.assertEquals(self.org.config_json()['ACCOUNT_SID'], "AccountSid")
self.assertEquals(self.org.config_json()['ACCOUNT_TOKEN'], "PrimaryAccountToken")
twilio_account_url = reverse('orgs.org_twilio_account')
response = self.client.get(twilio_account_url)
self.assertEquals("AccountSid", response.context['account_sid'])
self.org.refresh_from_db()
config = self.org.config_json()
self.assertEquals('AccountSid', config['ACCOUNT_SID'])
self.assertEquals('PrimaryAccountToken', config['ACCOUNT_TOKEN'])
# post without a sid or token, should get a form validation error
response = self.client.post(twilio_account_url, dict(disconnect='false'), follow=True)
self.assertEquals('[{"message": "You must enter your Twilio Account SID", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
# all our twilio creds should remain the same
self.org.refresh_from_db()
config = self.org.config_json()
self.assertEquals(config['ACCOUNT_SID'], "AccountSid")
self.assertEquals(config['ACCOUNT_TOKEN'], "PrimaryAccountToken")
# now try with all required fields, and a bonus field we shouldn't change
self.client.post(twilio_account_url, dict(account_sid='AccountSid',
account_token='SecondaryToken',
disconnect='false',
name='DO NOT CHANGE ME'), follow=True)
# name shouldn't change
self.org.refresh_from_db()
self.assertEquals(self.org.name, "Temba")
# now disconnect our twilio connection
self.assertTrue(self.org.is_connected_to_twilio())
self.client.post(twilio_account_url, dict(disconnect='true', follow=True))
self.org.refresh_from_db()
self.assertFalse(self.org.is_connected_to_twilio())
def test_has_airtime_transfers(self):
AirtimeTransfer.objects.filter(org=self.org).delete()
self.assertFalse(self.org.has_airtime_transfers())
contact = self.create_contact('Bob', number='+250788123123')
AirtimeTransfer.objects.create(org=self.org, recipient='+250788123123', amount='100',
contact=contact, created_by=self.admin, modified_by=self.admin)
self.assertTrue(self.org.has_airtime_transfers())
def test_transferto_model_methods(self):
org = self.org
org.refresh_from_db()
self.assertFalse(org.is_connected_to_transferto())
org.connect_transferto('login', 'token', self.admin)
org.refresh_from_db()
self.assertTrue(org.is_connected_to_transferto())
self.assertEqual(org.modified_by, self.admin)
org.remove_transferto_account(self.admin)
org.refresh_from_db()
self.assertFalse(org.is_connected_to_transferto())
self.assertEqual(org.modified_by, self.admin)
def test_transferto_account(self):
self.login(self.admin)
# connect transferTo
transferto_account_url = reverse('orgs.org_transfer_to_account')
with patch('temba.airtime.models.AirtimeTransfer.post_transferto_api_response') as mock_post_transterto_request:
mock_post_transterto_request.return_value = MockResponse(200, 'Unexpected content')
response = self.client.post(transferto_account_url, dict(account_login='login', airtime_api_token='token',
disconnect='false'))
self.assertContains(response, "Your TransferTo API key and secret seem invalid.")
self.assertFalse(self.org.is_connected_to_transferto())
mock_post_transterto_request.return_value = MockResponse(200, 'authentication_key=123\r\n'
'error_code=400\r\n'
'error_txt=Failed Authentication\r\n')
response = self.client.post(transferto_account_url, dict(account_login='login', airtime_api_token='token',
disconnect='false'))
self.assertContains(response, "Connecting to your TransferTo account failed "
"with error text: Failed Authentication")
self.assertFalse(self.org.is_connected_to_transferto())
mock_post_transterto_request.return_value = MockResponse(200, 'info_txt=pong\r\n'
'authentication_key=123\r\n'
'error_code=0\r\n'
'error_txt=Transaction successful\r\n')
response = self.client.post(transferto_account_url, dict(account_login='login', airtime_api_token='token',
disconnect='false'))
self.assertNoFormErrors(response)
# transferTo should be connected
self.org = Org.objects.get(pk=self.org.pk)
self.assertTrue(self.org.is_connected_to_transferto())
self.assertEqual(self.org.config_json()['TRANSFERTO_ACCOUNT_LOGIN'], 'login')
self.assertEqual(self.org.config_json()['TRANSFERTO_AIRTIME_API_TOKEN'], 'token')
response = self.client.get(transferto_account_url)
self.assertEqual(response.context['transferto_account_login'], 'login')
# and disconnect
response = self.client.post(transferto_account_url, dict(account_login='login', airtime_api_token='token',
disconnect='true'))
self.assertNoFormErrors(response)
self.org = Org.objects.get(pk=self.org.pk)
self.assertFalse(self.org.is_connected_to_transferto())
self.assertFalse(self.org.config_json()['TRANSFERTO_ACCOUNT_LOGIN'])
self.assertFalse(self.org.config_json()['TRANSFERTO_AIRTIME_API_TOKEN'])
mock_post_transterto_request.side_effect = Exception('foo')
response = self.client.post(transferto_account_url, dict(account_login='login', airtime_api_token='token',
disconnect='false'))
self.assertContains(response, "Your TransferTo API key and secret seem invalid.")
self.assertFalse(self.org.is_connected_to_transferto())
# No account connected, do not show the button to Transfer logs
response = self.client.get(transferto_account_url, HTTP_X_FORMAX=True)
self.assertNotContains(response, reverse('airtime.airtimetransfer_list'))
self.assertNotContains(response, "%s?disconnect=true" % reverse('orgs.org_transfer_to_account'))
response = self.client.get(transferto_account_url)
self.assertNotContains(response, reverse('airtime.airtimetransfer_list'))
self.assertNotContains(response, "%s?disconnect=true" % reverse('orgs.org_transfer_to_account'))
self.org.connect_transferto('login', 'token', self.admin)
# links not show if request is not from formax
response = self.client.get(transferto_account_url)
self.assertNotContains(response, reverse('airtime.airtimetransfer_list'))
self.assertNotContains(response, "%s?disconnect=true" % reverse('orgs.org_transfer_to_account'))
# link show for formax requests
response = self.client.get(transferto_account_url, HTTP_X_FORMAX=True)
self.assertContains(response, reverse('airtime.airtimetransfer_list'))
self.assertContains(response, "%s?disconnect=true" % reverse('orgs.org_transfer_to_account'))
def test_chatbase_account(self):
self.login(self.admin)
self.org.refresh_from_db()
self.assertEquals((None, None), self.org.get_chatbase_credentials())
chatbase_account_url = reverse('orgs.org_chatbase')
response = self.client.get(chatbase_account_url)
self.assertContains(response, 'Chatbase')
payload = dict(version='1.0', not_handled=True, feedback=False, disconnect='false')
response = self.client.post(chatbase_account_url, payload, follow=True)
self.assertContains(response, "Missing data: Agent Name or API Key.Please check them again and retry.")
self.assertEquals((None, None), self.org.get_chatbase_credentials())
payload.update(dict(api_key='api_key', agent_name='chatbase_agent', type='user'))
self.client.post(chatbase_account_url, payload, follow=True)
self.org.refresh_from_db()
self.assertEquals(('api_key', '1.0'), self.org.get_chatbase_credentials())
self.assertEquals(self.org.config_json()['CHATBASE_API_KEY'], 'api_key')
self.assertEquals(self.org.config_json()['CHATBASE_AGENT_NAME'], 'chatbase_agent')
self.assertEquals(self.org.config_json()['CHATBASE_VERSION'], '1.0')
with self.assertRaises(Exception):
contact = self.create_contact('Anakin Skywalker', '+12067791212')
msg = self.create_msg(contact=contact, text="favs")
Msg.process_message(msg)
with self.settings(SEND_CHATBASE=True):
contact = self.create_contact('Anakin Skywalker', '+12067791212')
msg = self.create_msg(contact=contact, text="favs")
Msg.process_message(msg)
org_home_url = reverse('orgs.org_home')
response = self.client.get(org_home_url)
self.assertContains(response, self.org.config_json()['CHATBASE_AGENT_NAME'])
payload.update(dict(disconnect='true'))
self.client.post(chatbase_account_url, payload, follow=True)
self.org.refresh_from_db()
self.assertEquals((None, None), self.org.get_chatbase_credentials())
with self.settings(SEND_CHATBASE=True):
contact = self.create_contact('Anakin Skywalker', '+12067791212')
msg = self.create_msg(contact=contact, text="favs")
Msg.process_message(msg)
def test_resthooks(self):
# no hitting this page without auth
resthook_url = reverse('orgs.org_resthooks')
response = self.client.get(resthook_url)
self.assertLoginRedirect(response)
self.login(self.admin)
# get our resthook management page
response = self.client.get(resthook_url)
# shouldn't have any resthooks listed yet
self.assertFalse(response.context['current_resthooks'])
# ok, let's create one
self.client.post(resthook_url, dict(resthook='mother-registration'))
# should now have a resthook
resthook = Resthook.objects.get()
self.assertEqual(resthook.slug, 'mother-registration')
self.assertEqual(resthook.org, self.org)
self.assertEqual(resthook.created_by, self.admin)
# fetch our read page, should have have our resthook
response = self.client.get(resthook_url)
self.assertTrue(response.context['current_resthooks'])
# let's try to create a repeat, should fail due to duplicate slug
response = self.client.post(resthook_url, dict(resthook='Mother-Registration'))
self.assertTrue(response.context['form'].errors)
# hit our list page used by select2, checking it lists our resthook
response = self.client.get(reverse('api.resthook_list') + "?_format=select2")
results = response.json()['results']
self.assertEqual(len(results), 1)
self.assertEqual(results[0], dict(text='mother-registration', id='mother-registration'))
# finally, let's remove that resthook
self.client.post(resthook_url, {'resthook_%d' % resthook.id: 'checked'})
resthook.refresh_from_db()
self.assertFalse(resthook.is_active)
# no more resthooks!
response = self.client.get(resthook_url)
self.assertFalse(response.context['current_resthooks'])
def test_smtp_server(self):
self.login(self.admin)
smtp_server_url = reverse('orgs.org_smtp_server')
self.org.refresh_from_db()
self.assertFalse(self.org.has_smtp_config())
response = self.client.post(smtp_server_url, dict(disconnect='false'), follow=True)
self.assertEquals('[{"message": "You must enter a from email", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
response = self.client.post(smtp_server_url, dict(smtp_from_email='foobar.com',
disconnect='false'), follow=True)
self.assertEquals('[{"message": "Please enter a valid email address", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
response = self.client.post(smtp_server_url, dict(smtp_from_email='foo@bar.com',
disconnect='false'), follow=True)
self.assertEquals('[{"message": "You must enter the SMTP host", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
response = self.client.post(smtp_server_url, dict(smtp_from_email='foo@bar.com',
smtp_host='smtp.example.com',
disconnect='false'), follow=True)
self.assertEquals('[{"message": "You must enter the SMTP username", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
response = self.client.post(smtp_server_url, dict(smtp_from_email='foo@bar.com',
smtp_host='smtp.example.com',
smtp_username='support@example.com',
disconnect='false'), follow=True)
self.assertEquals('[{"message": "You must enter the SMTP password", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
response = self.client.post(smtp_server_url, dict(smtp_from_email='foo@bar.com',
smtp_host='smtp.example.com',
smtp_username='support@example.com',
smtp_password='secret',
disconnect='false'), follow=True)
self.assertEquals('[{"message": "You must enter the SMTP port", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
response = self.client.post(smtp_server_url, dict(smtp_from_email='foo@bar.com',
smtp_host='smtp.example.com',
smtp_username='support@example.com',
smtp_password='secret',
smtp_port='465',
smtp_encryption='',
disconnect='false'), follow=True)
self.org.refresh_from_db()
self.assertTrue(self.org.has_smtp_config())
self.assertEquals(self.org.config_json()['SMTP_FROM_EMAIL'], 'foo@bar.com')
self.assertEquals(self.org.config_json()['SMTP_HOST'], 'smtp.example.com')
self.assertEquals(self.org.config_json()['SMTP_USERNAME'], 'support@example.com')
self.assertEquals(self.org.config_json()['SMTP_PASSWORD'], 'secret')
self.assertEquals(self.org.config_json()['SMTP_PORT'], '465')
self.assertEquals(self.org.config_json()['SMTP_ENCRYPTION'], '')
response = self.client.get(smtp_server_url)
self.assertEquals('foo@bar.com', response.context['flow_from_email'])
self.client.post(smtp_server_url, dict(smtp_from_email='support@example.com',
smtp_host='smtp.example.com',
smtp_username='support@example.com',
smtp_password='secret',
smtp_port='465',
smtp_encryption='T',
name="DO NOT CHANGE ME",
disconnect='false'), follow=True)
# name shouldn't change
self.org.refresh_from_db()
self.assertEquals(self.org.name, "Temba")
self.assertTrue(self.org.has_smtp_config())
self.client.post(smtp_server_url, dict(smtp_from_email='support@example.com',
smtp_host='smtp.example.com',
smtp_username='support@example.com',
smtp_password='',
smtp_port='465',
smtp_encryption='T',
disconnect='false'), follow=True)
# password shouldn't change
self.org.refresh_from_db()
self.assertTrue(self.org.has_smtp_config())
self.assertEquals(self.org.config_json()['SMTP_PASSWORD'], 'secret')
response = self.client.post(smtp_server_url, dict(smtp_from_email='support@example.com',
smtp_host='smtp.example.com',
smtp_username='help@example.com',
smtp_password='',
smtp_port='465',
smtp_encryption='T',
disconnect='false'), follow=True)
# should have error for blank password
self.assertEquals('[{"message": "You must enter the SMTP password", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
self.client.post(smtp_server_url, dict(disconnect='true'), follow=True)
self.org.refresh_from_db()
self.assertFalse(self.org.has_smtp_config())
response = self.client.post(smtp_server_url, dict(smtp_from_email=' support@example.com',
smtp_host=' smtp.example.com ',
smtp_username=' support@example.com ',
smtp_password='secret ',
smtp_port='465 ',
smtp_encryption='T',
disconnect='false'), follow=True)
self.org.refresh_from_db()
self.assertTrue(self.org.has_smtp_config())
self.assertEquals(self.org.config_json()['SMTP_FROM_EMAIL'], 'support@example.com')
self.assertEquals(self.org.config_json()['SMTP_HOST'], 'smtp.example.com')
self.assertEquals(self.org.config_json()['SMTP_USERNAME'], 'support@example.com')
self.assertEquals(self.org.config_json()['SMTP_PASSWORD'], 'secret')
self.assertEquals(self.org.config_json()['SMTP_PORT'], '465')
self.assertEquals(self.org.config_json()['SMTP_ENCRYPTION'], 'T')
@patch('nexmo.Client.create_application')
def test_connect_nexmo(self, mock_create_application):
mock_create_application.return_value = dict(id='app-id', keys=dict(private_key='private-key'))
self.login(self.admin)
# connect nexmo
connect_url = reverse('orgs.org_nexmo_connect')
# simulate invalid credentials
with patch('requests.get') as nexmo:
nexmo.return_value = MockResponse(401, '{"error-code": "401"}')
response = self.client.post(connect_url, dict(api_key='key', api_secret='secret'))
self.assertContains(response, "Your Nexmo API key and secret seem invalid.")
self.assertFalse(self.org.is_connected_to_nexmo())
# ok, now with a success
with patch('requests.get') as nexmo_get:
with patch('requests.post') as nexmo_post:
# believe it or not nexmo returns 'error-code' 200
nexmo_get.return_value = MockResponse(200, '{"error-code": "200"}')
nexmo_post.return_value = MockResponse(200, '{"error-code": "200"}')
self.client.post(connect_url, dict(api_key='key', api_secret='secret'))
# nexmo should now be connected
self.org = Org.objects.get(pk=self.org.pk)
self.assertTrue(self.org.is_connected_to_nexmo())
self.assertEquals(self.org.config_json()['NEXMO_KEY'], 'key')
self.assertEquals(self.org.config_json()['NEXMO_SECRET'], 'secret')
nexmo_uuid = self.org.config_json()['NEXMO_UUID']
self.assertEquals(mock_create_application.call_args_list[0][1]['params']['answer_url'],
"https://%s%s" % (settings.TEMBA_HOST.lower(),
reverse('handlers.nexmo_call_handler', args=['answer',
nexmo_uuid])))
self.assertEquals(mock_create_application.call_args_list[0][1]['params']['event_url'],
"https://%s%s" % (settings.TEMBA_HOST.lower(),
reverse('handlers.nexmo_call_handler', args=['event',
nexmo_uuid])))
self.assertEquals(mock_create_application.call_args_list[0][1]['params']['answer_method'], 'POST')
self.assertEquals(mock_create_application.call_args_list[0][1]['params']['event_method'], 'POST')
self.assertEquals(mock_create_application.call_args_list[0][1]['params']['type'], 'voice')
self.assertEquals(mock_create_application.call_args_list[0][1]['params']['name'],
"%s/%s" % (settings.TEMBA_HOST.lower(), nexmo_uuid))
nexmo_account_url = reverse('orgs.org_nexmo_account')
response = self.client.get(nexmo_account_url)
self.assertEquals("key", response.context['api_key'])
self.org.refresh_from_db()
config = self.org.config_json()
self.assertEquals('key', config[NEXMO_KEY])
self.assertEquals('secret', config[NEXMO_SECRET])
# post without api token, should get validation error
response = self.client.post(nexmo_account_url, dict(disconnect='false'), follow=True)
self.assertEquals('[{"message": "You must enter your Nexmo Account API Key", "code": ""}]',
response.context['form'].errors['__all__'].as_json())
# nexmo config should remain the same
self.org.refresh_from_db()
config = self.org.config_json()
self.assertEquals('key', config[NEXMO_KEY])
self.assertEquals('secret', config[NEXMO_SECRET])
# now try with all required fields, and a bonus field we shouldn't change
self.client.post(nexmo_account_url, dict(api_key='other_key',
api_secret='secret-too',
disconnect='false',
name='DO NOT CHNAGE ME'), follow=True)
# name shouldn't change
self.org.refresh_from_db()
self.assertEquals(self.org.name, "Temba")
# should change nexmo config
with patch('nexmo.Client.get_balance') as mock_get_balance:
mock_get_balance.return_value = 120
self.client.post(nexmo_account_url, dict(api_key='other_key',
api_secret='secret-too',
disconnect='false'), follow=True)
self.org.refresh_from_db()
config = self.org.config_json()
self.assertEquals('other_key', config[NEXMO_KEY])
self.assertEquals('secret-too', config[NEXMO_SECRET])
self.assertTrue(self.org.is_connected_to_nexmo())
self.client.post(nexmo_account_url, dict(disconnect='true'), follow=True)
self.org.refresh_from_db()
self.assertFalse(self.org.is_connected_to_nexmo())
# and disconnect
self.org.remove_nexmo_account(self.admin)
self.assertFalse(self.org.is_connected_to_nexmo())
self.assertFalse(self.org.config_json()['NEXMO_KEY'])
self.assertFalse(self.org.config_json()['NEXMO_SECRET'])
@patch('nexmo.Client.create_application')
def test_nexmo_configuration(self, mock_create_application):
mock_create_application.return_value = dict(id='app-id', keys=dict(private_key='private-key'))
self.login(self.admin)
nexmo_configuration_url = reverse('orgs.org_nexmo_configuration')
# try nexmo not connected
response = self.client.get(nexmo_configuration_url)
self.assertEqual(response.status_code, 302)
response = self.client.get(nexmo_configuration_url, follow=True)
self.assertEqual(response.request['PATH_INFO'], reverse('orgs.org_nexmo_connect'))
self.org.connect_nexmo('key', 'secret', self.admin)
with patch('temba.utils.nexmo.NexmoClient.update_account') as mock_update_account:
# try automatic nexmo settings update
mock_update_account.return_value = True
response = self.client.get(nexmo_configuration_url)
self.assertEqual(response.status_code, 302)
response = self.client.get(nexmo_configuration_url, follow=True)
self.assertEqual(response.request['PATH_INFO'], reverse('channels.channel_claim_nexmo'))
with patch('temba.utils.nexmo.NexmoClient.update_account') as mock_update_account:
mock_update_account.side_effect = [nexmo.Error, nexmo.Error]
response = self.client.get(nexmo_configuration_url)
self.assertEqual(response.status_code, 200)
response = self.client.get(nexmo_configuration_url, follow=True)
self.assertEqual(response.request['PATH_INFO'], reverse('orgs.org_nexmo_configuration'))
def test_connect_plivo(self):
self.login(self.admin)
# connect plivo
connect_url = reverse('orgs.org_plivo_connect')
# simulate invalid credentials
with patch('requests.get') as plivo_mock:
plivo_mock.return_value = MockResponse(401,
'Could not verify your access level for that URL.'
'\nYou have to login with proper credentials')
response = self.client.post(connect_url, dict(auth_id='auth-id', auth_token='auth-token'))
self.assertContains(response,
"Your Plivo AUTH ID and AUTH TOKEN seem invalid. Please check them again and retry.")
self.assertFalse(Channel.CONFIG_PLIVO_AUTH_ID in self.client.session)
self.assertFalse(Channel.CONFIG_PLIVO_AUTH_TOKEN in self.client.session)
# ok, now with a success
with patch('requests.get') as plivo_mock:
plivo_mock.return_value = MockResponse(200, json.dumps(dict()))
self.client.post(connect_url, dict(auth_id='auth-id', auth_token='auth-token'))
# plivo should be added to the session
self.assertEquals(self.client.session[Channel.CONFIG_PLIVO_AUTH_ID], 'auth-id')
self.assertEquals(self.client.session[Channel.CONFIG_PLIVO_AUTH_TOKEN], 'auth-token')
def test_tiers(self):
# default is no tiers, everything is allowed, go crazy!
self.assertTrue(self.org.is_import_flows_tier())
self.assertTrue(self.org.is_multi_user_tier())
self.assertTrue(self.org.is_multi_org_tier())
# same when tiers are missing completely
del settings.BRANDING[settings.DEFAULT_BRAND]['tiers']
self.assertTrue(self.org.is_import_flows_tier())
self.assertTrue(self.org.is_multi_user_tier())
self.assertTrue(self.org.is_multi_org_tier())
# not enough credits with tiers enabled
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(import_flows=1, multi_user=100000, multi_org=1000000)
self.assertIsNone(self.org.create_sub_org('Sub Org A'))
self.assertFalse(self.org.is_import_flows_tier())
self.assertFalse(self.org.is_multi_user_tier())
self.assertFalse(self.org.is_multi_org_tier())
# not enough credits, but tiers disabled
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(import_flows=0, multi_user=0, multi_org=0)
self.assertIsNotNone(self.org.create_sub_org('Sub Org A'))
self.assertTrue(self.org.is_import_flows_tier())
self.assertTrue(self.org.is_multi_user_tier())
self.assertTrue(self.org.is_multi_org_tier())
# tiers enabled, but enough credits
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(import_flows=1, multi_user=100000, multi_org=1000000)
TopUp.create(self.admin, price=100, credits=1000000)
self.org.update_caches(OrgEvent.topup_updated, None)
self.assertIsNotNone(self.org.create_sub_org('Sub Org B'))
self.assertTrue(self.org.is_import_flows_tier())
self.assertTrue(self.org.is_multi_user_tier())
self.assertTrue(self.org.is_multi_org_tier())
def test_sub_orgs(self):
from temba.orgs.models import Debit
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(multi_org=1000000)
# lets start with two topups
expires = timezone.now() + timedelta(days=400)
first_topup = TopUp.objects.filter(org=self.org).first()
second_topup = TopUp.create(self.admin, price=0, credits=1000, org=self.org, expires_on=expires)
sub_org = self.org.create_sub_org('Sub Org')
# we won't create sub orgs if the org isn't the proper level
self.assertIsNone(sub_org)
# lower the tier and try again
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(multi_org=0)
sub_org = self.org.create_sub_org('Sub Org')
# suborgs can't create suborgs
self.assertIsNone(sub_org.create_sub_org('Grandchild Org'))
# we should be linked to our parent with the same brand
self.assertEqual(self.org, sub_org.parent)
self.assertEqual(self.org.brand, sub_org.brand)
# our sub account should have zero credits
self.assertEqual(0, sub_org.get_credits_remaining())
# default values should be the same as parent
self.assertEqual(self.org.timezone, sub_org.timezone)
self.assertEqual(self.org.created_by, sub_org.created_by)
# now allocate some credits to our sub org
self.assertTrue(self.org.allocate_credits(self.admin, sub_org, 700))
self.assertEqual(700, sub_org.get_credits_remaining())
self.assertEqual(1300, self.org.get_credits_remaining())
# we should have a debit to track this transaction
debits = Debit.objects.filter(topup__org=self.org)
self.assertEqual(1, len(debits))
debit = debits.first()
self.assertEqual(700, debit.amount)
self.assertEqual(Debit.TYPE_ALLOCATION, debit.debit_type)
self.assertEqual(first_topup.expires_on, debit.beneficiary.expires_on)
# try allocating more than we have
self.assertFalse(self.org.allocate_credits(self.admin, sub_org, 1301))
self.assertEqual(700, sub_org.get_credits_remaining())
self.assertEqual(1300, self.org.get_credits_remaining())
self.assertEqual(700, self.org._calculate_credits_used())
# now allocate across our remaining topups
self.assertTrue(self.org.allocate_credits(self.admin, sub_org, 1200))
self.assertEqual(1900, sub_org.get_credits_remaining())
self.assertEqual(1900, self.org.get_credits_used())
self.assertEqual(100, self.org.get_credits_remaining())
# now clear our cache, we ought to have proper amount still
self.org._calculate_credit_caches()
sub_org._calculate_credit_caches()
self.assertEqual(1900, sub_org.get_credits_remaining())
self.assertEqual(100, self.org.get_credits_remaining())
# this creates two more debits, for a total of three
debits = Debit.objects.filter(topup__org=self.org).order_by('id')
self.assertEqual(3, len(debits))
# the last two debits should expire at same time as topup they were funded by
self.assertEqual(first_topup.expires_on, debits[1].topup.expires_on)
self.assertEqual(second_topup.expires_on, debits[2].topup.expires_on)
# allocate the exact number of credits remaining
self.org.allocate_credits(self.admin, sub_org, 100)
self.assertEqual(2000, sub_org.get_credits_remaining())
self.assertEqual(0, self.org.get_credits_remaining())
def test_sub_org_ui(self):
self.login(self.admin)
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(multi_org=1000000)
# set our org on the session
session = self.client.session
session['org_id'] = self.org.id
session.save()
response = self.client.get(reverse('orgs.org_home'))
self.assertNotContains(response, 'Manage Organizations')
# attempting to manage orgs should redirect
response = self.client.get(reverse('orgs.org_sub_orgs'))
self.assertRedirect(response, reverse('orgs.org_home'))
# creating a new sub org should also redirect
response = self.client.get(reverse('orgs.org_create_sub_org'))
self.assertRedirect(response, reverse('orgs.org_home'))
# make sure posting is gated too
new_org = dict(name='Sub Org', timezone=self.org.timezone, date_format=self.org.date_format)
response = self.client.post(reverse('orgs.org_create_sub_org'), new_org)
self.assertRedirect(response, reverse('orgs.org_home'))
# same thing with trying to transfer credits
response = self.client.get(reverse('orgs.org_transfer_credits'))
self.assertRedirect(response, reverse('orgs.org_home'))
# cant manage users either
response = self.client.get(reverse('orgs.org_manage_accounts_sub_org'))
self.assertRedirect(response, reverse('orgs.org_home'))
# zero out our tier
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(multi_org=0)
self.assertTrue(self.org.is_multi_org_tier())
response = self.client.get(reverse('orgs.org_home'))
self.assertContains(response, 'Manage Organizations')
# now we can manage our orgs
response = self.client.get(reverse('orgs.org_sub_orgs'))
self.assertEqual(200, response.status_code)
self.assertContains(response, 'Organizations')
# add a sub org
response = self.client.post(reverse('orgs.org_create_sub_org'), new_org)
self.assertRedirect(response, reverse('orgs.org_sub_orgs'))
sub_org = Org.objects.filter(name='Sub Org').first()
self.assertIsNotNone(sub_org)
self.assertIn(self.admin, sub_org.administrators.all())
# load the transfer credit page
response = self.client.get(reverse('orgs.org_transfer_credits'))
self.assertEqual(200, response.status_code)
# try to transfer more than we have
post_data = dict(from_org=self.org.id, to_org=sub_org.id, amount=1500)
response = self.client.post(reverse('orgs.org_transfer_credits'), post_data)
self.assertContains(response, "Pick a different organization to transfer from")
# now transfer some creditos
post_data = dict(from_org=self.org.id, to_org=sub_org.id, amount=600)
response = self.client.post(reverse('orgs.org_transfer_credits'), post_data)
self.assertEqual(400, self.org.get_credits_remaining())
self.assertEqual(600, sub_org.get_credits_remaining())
# we can reach the manage accounts page too now
response = self.client.get('%s?org=%d' % (reverse('orgs.org_manage_accounts_sub_org'), sub_org.id))
self.assertEqual(200, response.status_code)
# edit our sub org's name
new_org['name'] = 'New Sub Org Name'
new_org['slug'] = 'new-sub-org-name'
response = self.client.post('%s?org=%s' % (reverse('orgs.org_edit_sub_org'), sub_org.pk), new_org)
self.assertIsNotNone(Org.objects.filter(name='New Sub Org Name').first())
# now we should see new topups on our sub org
session['org_id'] = sub_org.id
session.save()
response = self.client.get(reverse('orgs.topup_list'))
self.assertContains(response, '600 Credits')
class AnonOrgTest(TembaTest):
"""
Tests the case where our organization is marked as anonymous, that is the phone numbers are masked
for users.
"""
def setUp(self):
super(AnonOrgTest, self).setUp()
self.org.is_anon = True
self.org.save()
def test_contacts(self):
# are there real phone numbers on the contact list page?
contact = self.create_contact(None, "+250788123123")
self.login(self.admin)
masked = "%010d" % contact.pk
response = self.client.get(reverse('contacts.contact_list'))
# phone not in the list
self.assertNotContains(response, "788 123 123")
# but the id is
self.assertContains(response, masked)
self.assertContains(response, ContactURN.ANON_MASK_HTML)
# can't search for it
response = self.client.get(reverse('contacts.contact_list') + "?search=788")
# can't look for 788 as that is in the search box..
self.assertNotContains(response, "123123")
# create a flow
flow = self.create_flow(definition=self.COLOR_FLOW_DEFINITION)
# start the contact down it
flow.start([], [contact])
# should have one SMS
self.assertEquals(1, Msg.objects.all().count())
# shouldn't show the number on the outgoing page
response = self.client.get(reverse('msgs.msg_outbox'))
self.assertNotContains(response, "788 123 123")
# create an incoming SMS, check our flow page
Msg.create_incoming(self.channel, contact.get_urn().urn, "Blue")
response = self.client.get(reverse('msgs.msg_flow'))
self.assertNotContains(response, "788 123 123")
self.assertContains(response, masked)
# send another, this will be in our inbox this time
Msg.create_incoming(self.channel, contact.get_urn().urn, "Where's the beef?")
response = self.client.get(reverse('msgs.msg_flow'))
self.assertNotContains(response, "788 123 123")
self.assertContains(response, masked)
# contact detail page
response = self.client.get(reverse('contacts.contact_read', args=[contact.uuid]))
self.assertNotContains(response, "788 123 123")
self.assertContains(response, masked)
class OrgCRUDLTest(TembaTest):
def test_org_grant(self):
grant_url = reverse('orgs.org_grant')
response = self.client.get(grant_url)
self.assertRedirect(response, '/users/login/')
self.user = self.create_user(username="tito")
self.login(self.user)
response = self.client.get(grant_url)
self.assertRedirect(response, '/users/login/')
granters = Group.objects.get(name='Granters')
self.user.groups.add(granters)
response = self.client.get(grant_url)
self.assertEquals(200, response.status_code)
# fill out the form
post_data = dict(email='john@carmack.com', first_name="John", last_name="Carmack",
name="Oculus", timezone="Africa/Kigali", credits="100000", password='dukenukem')
response = self.client.post(grant_url, post_data, follow=True)
self.assertContains(response, "created")
org = Org.objects.get(name="Oculus")
self.assertEquals(100000, org.get_credits_remaining())
# check user exists and is admin
User.objects.get(username="john@carmack.com")
self.assertTrue(org.administrators.filter(username="john@carmack.com"))
self.assertTrue(org.administrators.filter(username="tito"))
# try a new org with a user that already exists instead
del post_data['password']
post_data['name'] = "id Software"
response = self.client.post(grant_url, post_data, follow=True)
self.assertContains(response, "created")
org = Org.objects.get(name="id Software")
self.assertEquals(100000, org.get_credits_remaining())
self.assertTrue(org.administrators.filter(username="john@carmack.com"))
self.assertTrue(org.administrators.filter(username="tito"))
@patch("temba.orgs.views.OrgCRUDL.Signup.pre_process")
def test_new_signup_with_user_logged_in(self, mock_pre_process):
mock_pre_process.return_value = None
signup_url = reverse('orgs.org_signup')
self.user = self.create_user(username="tito")
self.login(self.user)
response = self.client.get(signup_url)
self.assertEqual(response.status_code, 200)
post_data = dict(first_name="Kellan", last_name="Alexander", email="kellan@example.com",
password="HeyThere", name="AlexCom", timezone="Africa/Kigali")
response = self.client.post(signup_url, post_data)
self.assertEqual(response.status_code, 302)
# should have a new user
user = User.objects.get(username="kellan@example.com")
self.assertEqual(user.first_name, "Kellan")
self.assertEqual(user.last_name, "Alexander")
self.assertEqual(user.email, "kellan@example.com")
self.assertTrue(user.check_password("HeyThere"))
self.assertTrue(user.api_token) # should be able to generate an API token
# should have a new org
org = Org.objects.get(name="AlexCom")
self.assertEqual(org.timezone, pytz.timezone("Africa/Kigali"))
# of which our user is an administrator
self.assertTrue(org.get_org_admins().filter(pk=user.pk))
# not the logged in user at the signup time
self.assertFalse(org.get_org_admins().filter(pk=self.user.pk))
def test_org_signup(self):
signup_url = reverse('orgs.org_signup')
response = self.client.get(signup_url)
self.assertEqual(response.status_code, 200)
self.assertIn('name', response.context['form'].fields)
# submit with missing fields
response = self.client.post(signup_url, {})
self.assertFormError(response, 'form', 'name', "This field is required.")
self.assertFormError(response, 'form', 'first_name', "This field is required.")
self.assertFormError(response, 'form', 'last_name', "This field is required.")
self.assertFormError(response, 'form', 'email', "This field is required.")
self.assertFormError(response, 'form', 'password', "This field is required.")
self.assertFormError(response, 'form', 'timezone', "This field is required.")
# submit with invalid password and email
post_data = dict(first_name="Eugene", last_name="Rwagasore", email="bad_email",
password="badpass", name="Your Face", timezone="Africa/Kigali")
response = self.client.post(signup_url, post_data)
self.assertFormError(response, 'form', 'email', "Enter a valid email address.")
self.assertFormError(response, 'form', 'password', "Passwords must contain at least 8 letters.")
# submit with valid data (long email)
post_data = dict(first_name="Eugene", last_name="Rwagasore", email="myal12345678901234567890@relieves.org",
password="HelloWorld1", name="Relieves World", timezone="Africa/Kigali")
response = self.client.post(signup_url, post_data)
self.assertEqual(response.status_code, 302)
# should have a new user
user = User.objects.get(username="myal12345678901234567890@relieves.org")
self.assertEqual(user.first_name, "Eugene")
self.assertEqual(user.last_name, "Rwagasore")
self.assertEqual(user.email, "myal12345678901234567890@relieves.org")
self.assertTrue(user.check_password("HelloWorld1"))
self.assertTrue(user.api_token) # should be able to generate an API token
# should have a new org
org = Org.objects.get(name="Relieves World")
self.assertEqual(org.timezone, pytz.timezone("Africa/Kigali"))
self.assertEqual(str(org), "Relieves World")
self.assertEqual(org.slug, "relieves-world")
# of which our user is an administrator
self.assertTrue(org.get_org_admins().filter(pk=user.pk))
# org should have 1000 credits
self.assertEqual(org.get_credits_remaining(), 1000)
# from a single welcome topup
topup = TopUp.objects.get(org=org)
self.assertEqual(topup.credits, 1000)
self.assertEqual(topup.price, 0)
# fake session set_org to make the test work
user.set_org(org)
# should now be able to go to channels page
response = self.client.get(reverse('channels.channel_claim'))
self.assertEquals(200, response.status_code)
# check that we have all the tabs
self.assertContains(response, reverse('msgs.msg_inbox'))
self.assertContains(response, reverse('flows.flow_list'))
self.assertContains(response, reverse('contacts.contact_list'))
self.assertContains(response, reverse('channels.channel_list'))
self.assertContains(response, reverse('orgs.org_home'))
post_data['name'] = "Relieves World Rwanda"
response = self.client.post(signup_url, post_data)
self.assertTrue('email' in response.context['form'].errors)
# if we hit /login we'll be taken back to the channel page
response = self.client.get(reverse('users.user_check_login'))
self.assertRedirect(response, reverse('orgs.org_choose'))
# but if we log out, same thing takes us to the login page
self.client.logout()
response = self.client.get(reverse('users.user_check_login'))
self.assertRedirect(response, reverse('users.user_login'))
# try going to the org home page, no dice
response = self.client.get(reverse('orgs.org_home'))
self.assertRedirect(response, reverse('users.user_login'))
# log in as the user
self.client.login(username="myal12345678901234567890@relieves.org", password="HelloWorld1")
response = self.client.get(reverse('orgs.org_home'))
self.assertEquals(200, response.status_code)
# try setting our webhook and subscribe to one of the events
response = self.client.post(reverse('orgs.org_webhook'), dict(webhook='http://fake.com/webhook.php', mt_sms=1))
self.assertRedirect(response, reverse('orgs.org_home'))
org = Org.objects.get(name="Relieves World")
self.assertEquals("http://fake.com/webhook.php", org.get_webhook_url())
self.assertTrue(org.is_notified_of_mt_sms())
self.assertFalse(org.is_notified_of_mo_sms())
self.assertFalse(org.is_notified_of_mt_call())
self.assertFalse(org.is_notified_of_mo_call())
self.assertFalse(org.is_notified_of_alarms())
# try changing our username, wrong password
post_data = dict(email='myal@wr.org', current_password='HelloWorld')
response = self.client.post(reverse('orgs.user_edit'), post_data)
self.assertEquals(200, response.status_code)
self.assertTrue('current_password' in response.context['form'].errors)
# bad new password
post_data = dict(email='myal@wr.org', current_password='HelloWorld1', new_password='passwor')
response = self.client.post(reverse('orgs.user_edit'), post_data)
self.assertEquals(200, response.status_code)
self.assertTrue('new_password' in response.context['form'].errors)
User.objects.create(username='bill@msn.com', email='bill@msn.com')
# dupe user
post_data = dict(email='bill@msn.com', current_password='HelloWorld1')
response = self.client.post(reverse('orgs.user_edit'), post_data)
self.assertEquals(200, response.status_code)
self.assertTrue('email' in response.context['form'].errors)
post_data = dict(email='myal@wr.org', first_name="Myal", last_name="Greene", language="en-us", current_password='HelloWorld1')
response = self.client.post(reverse('orgs.user_edit'), post_data)
self.assertRedirect(response, reverse('orgs.org_home'))
self.assertTrue(User.objects.get(username='myal@wr.org'))
self.assertTrue(User.objects.get(email='myal@wr.org'))
self.assertFalse(User.objects.filter(username='myal@relieves.org'))
self.assertFalse(User.objects.filter(email='myal@relieves.org'))
post_data['current_password'] = 'HelloWorld1'
post_data['new_password'] = 'Password123'
response = self.client.post(reverse('orgs.user_edit'), post_data)
self.assertRedirect(response, reverse('orgs.org_home'))
user = User.objects.get(username='myal@wr.org')
self.assertTrue(user.check_password('Password123'))
def test_org_timezone(self):
self.assertEqual(self.org.timezone, pytz.timezone('Africa/Kigali'))
Msg.create_incoming(self.channel, "tel:250788382382", "My name is Frank")
self.login(self.admin)
response = self.client.get(reverse('msgs.msg_inbox'), follow=True)
# Check the message datetime
created_on = response.context['object_list'][0].created_on.astimezone(self.org.timezone)
self.assertIn(created_on.strftime("%I:%M %p").lower().lstrip('0'), response.content)
# change the org timezone to "Africa/Nairobi"
self.org.timezone = pytz.timezone('Africa/Nairobi')
self.org.save()
response = self.client.get(reverse('msgs.msg_inbox'), follow=True)
# checkout the message should have the datetime changed by timezone
created_on = response.context['object_list'][0].created_on.astimezone(self.org.timezone)
self.assertIn(created_on.strftime("%I:%M %p").lower().lstrip('0'), response.content)
def test_urn_schemes(self):
# remove existing channels
Channel.objects.all().update(is_active=False, org=None)
self.assertEqual(set(), self.org.get_schemes(Channel.ROLE_SEND))
self.assertEqual(set(), self.org.get_schemes(Channel.ROLE_RECEIVE))
# add a receive only tel channel
Channel.create(self.org, self.user, 'RW', Channel.TYPE_TWILIO, "Nexmo", "0785551212", role="R", secret="45678", gcm_id="123")
self.org = Org.objects.get(pk=self.org.pk)
self.assertEqual(set(), self.org.get_schemes(Channel.ROLE_SEND))
self.assertEqual({TEL_SCHEME}, self.org.get_schemes(Channel.ROLE_RECEIVE))
# add a send/receive tel channel
Channel.create(self.org, self.user, 'RW', Channel.TYPE_TWILIO, "Twilio", "0785553434", role="SR", secret="56789", gcm_id="456")
self.org = Org.objects.get(pk=self.org.id)
self.assertEqual({TEL_SCHEME}, self.org.get_schemes(Channel.ROLE_SEND))
self.assertEqual({TEL_SCHEME}, self.org.get_schemes(Channel.ROLE_RECEIVE))
# add a twitter channel
Channel.create(self.org, self.user, None, 'TT', "Twitter")
self.org = Org.objects.get(pk=self.org.id)
self.assertEqual({TEL_SCHEME, TWITTER_SCHEME}, self.org.get_schemes(Channel.ROLE_SEND))
self.assertEqual({TEL_SCHEME, TWITTER_SCHEME}, self.org.get_schemes(Channel.ROLE_RECEIVE))
def test_login_case_not_sensitive(self):
login_url = reverse('users.user_login')
User.objects.create_superuser("superuser", "superuser@group.com", "superuser")
response = self.client.post(login_url, dict(username="superuser", password="superuser"))
self.assertEquals(response.status_code, 302)
response = self.client.post(login_url, dict(username="superuser", password="superuser"), follow=True)
self.assertEquals(response.request['PATH_INFO'], reverse('orgs.org_manage'))
response = self.client.post(login_url, dict(username="SUPeruser", password="superuser"))
self.assertEquals(response.status_code, 302)
response = self.client.post(login_url, dict(username="SUPeruser", password="superuser"), follow=True)
self.assertEquals(response.request['PATH_INFO'], reverse('orgs.org_manage'))
User.objects.create_superuser("withCAPS", "with_caps@group.com", "thePASSWORD")
response = self.client.post(login_url, dict(username="withcaps", password="thePASSWORD"))
self.assertEquals(response.status_code, 302)
response = self.client.post(login_url, dict(username="withcaps", password="thePASSWORD"), follow=True)
self.assertEquals(response.request['PATH_INFO'], reverse('orgs.org_manage'))
# passwords stay case sensitive
response = self.client.post(login_url, dict(username="withcaps", password="thepassword"), follow=True)
self.assertTrue('form' in response.context)
self.assertTrue(response.context['form'].errors)
def test_org_service(self):
# create a customer service user
self.csrep = self.create_user("csrep")
self.csrep.groups.add(Group.objects.get(name="Customer Support"))
self.csrep.is_staff = True
self.csrep.save()
service_url = reverse('orgs.org_service')
# without logging in, try to service our main org
response = self.client.post(service_url, dict(organization=self.org.id))
self.assertRedirect(response, '/users/login/')
# try logging in with a normal user
self.login(self.admin)
# same thing, no permission
response = self.client.post(service_url, dict(organization=self.org.id))
self.assertRedirect(response, '/users/login/')
# ok, log in as our cs rep
self.login(self.csrep)
# then service our org
response = self.client.post(service_url, dict(organization=self.org.id))
self.assertRedirect(response, '/msg/inbox/')
# create a new contact
response = self.client.post(reverse('contacts.contact_create'), data=dict(name='Ben Haggerty',
urn__tel__0='0788123123'))
self.assertNoFormErrors(response)
# make sure that contact's created on is our cs rep
contact = Contact.objects.get(urns__path='+250788123123', org=self.org)
self.assertEquals(self.csrep, contact.created_by)
# make sure we can manage topups as well
TopUp.objects.create(org=self.org, price=100, credits=1000, expires_on=timezone.now() + timedelta(days=30),
created_by=self.admin, modified_by=self.admin)
response = self.client.get(reverse('orgs.topup_manage') + "?org=%d" % self.org.id)
# i'd buy that for a dollar!
self.assertContains(response, '$1.00')
self.assertNotRedirect(response, '/users/login/')
# ok, now end our session
response = self.client.post(service_url, dict())
self.assertRedirect(response, '/org/manage/')
# can no longer go to inbox, asked to log in
response = self.client.get(reverse('msgs.msg_inbox'))
self.assertRedirect(response, '/users/login/')
class LanguageTest(TembaTest):
def test_languages(self):
url = reverse('orgs.org_languages')
self.login(self.admin)
# update our org with some language settings
response = self.client.post(url, dict(primary_lang='fre', languages='hat,arc'))
self.assertEqual(response.status_code, 302)
self.org.refresh_from_db()
self.assertEqual(self.org.primary_language.name, 'French')
self.assertIsNotNone(self.org.languages.filter(name='French'))
# everything after the paren should be stripped for aramaic
self.assertIsNotNone(self.org.languages.filter(name='Official Aramaic'))
# everything after the semi should be stripped for haitian
self.assertIsNotNone(self.org.languages.filter(name='Haitian'))
# check that the last load shows our new languages
response = self.client.get(url)
self.assertEqual(response.context['languages'], 'Haitian and Official Aramaic')
self.assertContains(response, 'fre')
self.assertContains(response, 'hat,arc')
# three translation languages
self.client.post(url, dict(primary_lang='fre', languages='hat,arc,spa'))
response = self.client.get(reverse('orgs.org_languages'))
self.assertEqual(response.context['languages'], 'Haitian, Official Aramaic and Spanish')
# one translation language
self.client.post(url, dict(primary_lang='fre', languages='hat'))
response = self.client.get(reverse('orgs.org_languages'))
self.assertEqual(response.context['languages'], 'Haitian')
# remove all languages
self.client.post(url, dict())
self.org.refresh_from_db()
self.assertIsNone(self.org.primary_language)
self.assertFalse(self.org.languages.all())
# search languages
response = self.client.get('%s?search=fre' % url)
results = response.json()['results']
self.assertEqual(len(results), 4)
# initial should do a match on code only
response = self.client.get('%s?initial=fre' % url)
results = response.json()['results']
self.assertEqual(len(results), 1)
def test_language_codes(self):
self.assertEquals('French', languages.get_language_name('fre'))
self.assertEquals('Creoles and pidgins, English based', languages.get_language_name('cpe'))
# should strip off anything after an open paren or semicolon
self.assertEquals('Official Aramaic', languages.get_language_name('arc'))
self.assertEquals('Haitian', languages.get_language_name('hat'))
# check that search returns results and in the proper order
matches = languages.search_language_names('Fre')
self.assertEquals(4, len(matches))
self.assertEquals('Creoles and pidgins, French-based', matches[0]['text'])
self.assertEquals('French', matches[1]['text'])
self.assertEquals('French, Middle (ca.1400-1600)', matches[2]['text'])
self.assertEquals('French, Old (842-ca.1400)', matches[3]['text'])
# try a language that doesn't exist
self.assertEquals(None, languages.get_language_name('xyz'))
def test_get_localized_text(self):
text_translations = dict(eng="Hello", esp="Hola")
# null case
self.assertEqual(Language.get_localized_text(None, None, "Hi"), "Hi")
# simple dictionary case
self.assertEqual(Language.get_localized_text(text_translations, ['eng'], "Hi"), "Hello")
# missing language case
self.assertEqual(Language.get_localized_text(text_translations, ['fre'], "Hi"), "Hi")
# secondary option
self.assertEqual(Language.get_localized_text(text_translations, ['fre', 'esp'], "Hi"), "Hola")
class BulkExportTest(TembaTest):
def test_get_dependencies(self):
# import a flow that triggers another flow
contact1 = self.create_contact("Marshawn", "+14255551212")
substitutions = dict(contact_id=contact1.id)
flow = self.get_flow('triggered', substitutions)
# read in the old version 8 raw json
old_json = json.loads(self.get_import_json('triggered', substitutions))
old_actions = old_json['flows'][1]['action_sets'][0]['actions']
# splice our actionset with old bits
actionset = flow.action_sets.all()[0]
actionset.actions = json.dumps(old_actions)
actionset.save()
# fake our version number back to 8
flow.version_number = 8
flow.save()
# now make sure a call to get dependencies succeeds and shows our flow
triggeree = Flow.objects.filter(name='Triggeree').first()
self.assertIn(triggeree, flow.get_dependencies())
def test_trigger_flow(self):
self.import_file('triggered_flow')
flow = Flow.objects.filter(name='Trigger a Flow', org=self.org).first()
definition = flow.as_json()
actions = definition[Flow.ACTION_SETS][0]['actions']
self.assertEquals(1, len(actions))
self.assertEquals('Triggered Flow', actions[0]['flow']['name'])
def test_trigger_dependency(self):
# tests the case of us doing an export of only a single flow (despite dependencies) and making sure we
# don't include the triggers of our dependent flows (which weren't exported)
self.import_file('parent_child_trigger')
parent = Flow.objects.filter(name='Parent Flow').first()
self.login(self.admin)
# export only the parent
post_data = dict(flows=[parent.pk], campaigns=[])
response = self.client.post(reverse('orgs.org_export'), post_data)
exported = response.json()
# shouldn't have any triggers
self.assertFalse(exported['triggers'])
def test_subflow_dependencies(self):
self.import_file('subflow')
parent = Flow.objects.filter(name='Parent Flow').first()
child = Flow.objects.filter(name='Child Flow').first()
self.assertIn(child, parent.get_dependencies())
self.login(self.admin)
response = self.client.get(reverse('orgs.org_export'))
soup = BeautifulSoup(response.content, "html.parser")
group = str(soup.findAll("div", {"class": "exportables bucket"})[0])
self.assertIn('Parent Flow', group)
self.assertIn('Child Flow', group)
def test_flow_export_dynamic_group(self):
flow = self.get_flow('favorites')
# get one of our flow actionsets, change it to an AddToGroupAction
actionset = ActionSet.objects.filter(flow=flow).order_by('y').first()
# replace the actions
from temba.flows.models import AddToGroupAction
actionset.set_actions_dict([AddToGroupAction([dict(uuid='123', name="Other Group"), '@contact.name']).as_json()])
actionset.save()
# now let's export!
self.login(self.admin)
post_data = dict(flows=[flow.pk], campaigns=[])
response = self.client.post(reverse('orgs.org_export'), post_data)
exported = response.json()
# try to import the flow
flow.delete()
response.json()
Flow.import_flows(exported, self.org, self.admin)
# make sure the created flow has the same action set
flow = Flow.objects.filter(name="%s" % flow.name).first()
actionset = ActionSet.objects.filter(flow=flow).order_by('y').first()
self.assertTrue('@contact.name' in actionset.get_actions()[0].groups)
def test_import_voice_flows_expiration_time(self):
# all imported voice flows should have a max expiration time of 15 min
self.get_flow('ivr_child_flow')
self.assertEqual(Flow.objects.filter(flow_type=Flow.VOICE).count(), 1)
voice_flow = Flow.objects.get(flow_type=Flow.VOICE)
self.assertEqual(voice_flow.name, 'Voice Flow')
self.assertEqual(voice_flow.expires_after_minutes, 15)
def test_missing_flows_on_import(self):
# import a flow that starts a missing flow
self.import_file('start_missing_flow')
# the flow that kicks off our missing flow
flow = Flow.objects.get(name='Start Missing Flow')
# make sure our missing flow is indeed not there
self.assertIsNone(Flow.objects.filter(name='Missing Flow').first())
# these two actionsets only have a single action that starts the missing flow
# therefore they should not be created on import
self.assertIsNone(ActionSet.objects.filter(flow=flow, y=160, x=90).first())
self.assertIsNone(ActionSet.objects.filter(flow=flow, y=233, x=395).first())
# should have this actionset, but only one action now since one was removed
other_actionset = ActionSet.objects.filter(flow=flow, y=145, x=731).first()
self.assertEquals(1, len(other_actionset.get_actions()))
# now make sure it does the same thing from an actionset
self.import_file('start_missing_flow_from_actionset')
self.assertIsNotNone(Flow.objects.filter(name='Start Missing Flow').first())
self.assertIsNone(Flow.objects.filter(name='Missing Flow').first())
def test_import(self):
self.login(self.admin)
# try importing without having purchased credits
settings.BRANDING[settings.DEFAULT_BRAND]['tiers'] = dict(import_flows=1, multi_user=100000, multi_org=1000000)
post_data = dict(import_file=open('%s/test_flows/new_mother.json' % settings.MEDIA_ROOT, 'rb'))
response = self.client.post(reverse('orgs.org_import'), post_data)
self.assertEquals(response.context['form'].errors['import_file'][0], 'Sorry, import is a premium feature')
# now purchase some credits and try again
TopUp.objects.create(org=self.org, price=1, credits=10000,
expires_on=timezone.now() + timedelta(days=30),
created_by=self.admin, modified_by=self.admin)
# force our cache to reload
self.org.get_credits_total(force_dirty=True)
self.org.update_caches(OrgEvent.topup_updated, None)
self.assertTrue(self.org.get_purchased_credits() > 0)
# now try again with purchased credits, but our file is too old
post_data = dict(import_file=open('%s/test_flows/too_old.json' % settings.MEDIA_ROOT, 'rb'))
response = self.client.post(reverse('orgs.org_import'), post_data)
self.assertEquals(response.context['form'].errors['import_file'][0], 'This file is no longer valid. Please export a new version and try again.')
# simulate an unexpected exception during import
with patch('temba.triggers.models.Trigger.import_triggers') as validate:
validate.side_effect = Exception('Unexpected Error')
post_data = dict(import_file=open('%s/test_flows/new_mother.json' % settings.MEDIA_ROOT, 'rb'))
response = self.client.post(reverse('orgs.org_import'), post_data)
self.assertEquals(response.context['form'].errors['import_file'][0], 'Sorry, your import file is invalid.')
# trigger import failed, new flows that were added should get rolled back
self.assertIsNone(Flow.objects.filter(org=self.org, name='New Mother').first())
def test_import_campaign_with_translations(self):
self.import_file('campaign_import_with_translations')
campaign = Campaign.objects.all().first()
event = campaign.events.all().first()
action_set = event.flow.action_sets.order_by('-y').first()
actions = action_set.get_actions_dict()
action_msg = actions[0]['msg']
self.assertEqual(event.message['swa'], 'hello')
self.assertEqual(event.message['eng'], 'Hey')
# base language for this flow is 'swa' despite our org languages being unset
self.assertEqual(event.flow.base_language, 'swa')
self.assertEqual(action_msg['swa'], 'hello')
self.assertEqual(action_msg['eng'], 'Hey')
def test_export_import(self):
def assert_object_counts():
self.assertEquals(8, Flow.objects.filter(org=self.org, is_active=True, is_archived=False, flow_type='F').count())
self.assertEquals(2, Flow.objects.filter(org=self.org, is_active=True, is_archived=False, flow_type='M').count())
self.assertEquals(1, Campaign.objects.filter(org=self.org, is_archived=False).count())
self.assertEquals(4, CampaignEvent.objects.filter(campaign__org=self.org, event_type='F').count())
self.assertEquals(2, CampaignEvent.objects.filter(campaign__org=self.org, event_type='M').count())
self.assertEquals(2, Trigger.objects.filter(org=self.org, trigger_type='K', is_archived=False).count())
self.assertEquals(1, Trigger.objects.filter(org=self.org, trigger_type='C', is_archived=False).count())
self.assertEquals(1, Trigger.objects.filter(org=self.org, trigger_type='M', is_archived=False).count())
self.assertEquals(3, ContactGroup.user_groups.filter(org=self.org).count())
self.assertEquals(1, Label.label_objects.filter(org=self.org).count())
# import all our bits
self.import_file('the_clinic')
# check that the right number of objects successfully imported for our app
assert_object_counts()
# let's update some stuff
confirm_appointment = Flow.objects.get(name='Confirm Appointment')
confirm_appointment.expires_after_minutes = 60
confirm_appointment.save()
action_set = confirm_appointment.action_sets.order_by('-y').first()
actions = action_set.get_actions_dict()
actions[0]['msg']['base'] = 'Thanks for nothing'
action_set.set_actions_dict(actions)
action_set.save()
trigger = Trigger.objects.filter(keyword='patient').first()
trigger.flow = confirm_appointment
trigger.save()
message_flow = Flow.objects.filter(flow_type='M', events__offset=-1).order_by('pk').first()
action_set = message_flow.action_sets.order_by('-y').first()
actions = action_set.get_actions_dict()
self.assertEquals("Hi there, just a quick reminder that you have an appointment at The Clinic at @contact.next_appointment. If you can't make it please call 1-888-THE-CLINIC.", actions[0]['msg']['base'])
actions[0]['msg'] = 'No reminders for you!'
action_set.set_actions_dict(actions)
action_set.save()
# now reimport
self.import_file('the_clinic')
# our flow should get reset from the import
confirm_appointment = Flow.objects.get(pk=confirm_appointment.pk)
action_set = confirm_appointment.action_sets.order_by('-y').first()
actions = action_set.get_actions_dict()
self.assertEquals("Thanks, your appointment at The Clinic has been confirmed for @contact.next_appointment. See you then!", actions[0]['msg']['base'])
# same with our trigger
trigger = Trigger.objects.filter(keyword='patient').first()
self.assertEquals(Flow.objects.filter(name='Register Patient').first(), trigger.flow)
# our old campaign message flow should be inactive now
self.assertTrue(Flow.objects.filter(pk=message_flow.pk, is_active=False))
# find our new message flow, and see that the original message is there
message_flow = Flow.objects.filter(flow_type='M', events__offset=-1, is_active=True).order_by('pk').first()
action_set = Flow.objects.get(pk=message_flow.pk).action_sets.order_by('-y').first()
actions = action_set.get_actions_dict()
self.assertEquals("Hi there, just a quick reminder that you have an appointment at The Clinic at @contact.next_appointment. If you can't make it please call 1-888-THE-CLINIC.", actions[0]['msg']['base'])
# and we should have the same number of items as after the first import
assert_object_counts()
# see that everything shows up properly on our export page
self.login(self.admin)
response = self.client.get(reverse('orgs.org_export'))
self.assertContains(response, 'Register Patient')
self.assertContains(response, 'Catch All')
self.assertContains(response, 'Missed Call')
self.assertContains(response, 'Start Notifications')
self.assertContains(response, 'Stop Notifications')
self.assertContains(response, 'Confirm Appointment')
self.assertContains(response, 'Appointment Followup')
# our campaign
self.assertContains(response, 'Appointment Schedule')
# now let's export!
post_data = dict(flows=[f.pk for f in Flow.objects.filter(flow_type='F')],
campaigns=[c.pk for c in Campaign.objects.all()])
response = self.client.post(reverse('orgs.org_export'), post_data)
exported = response.json()
self.assertEquals(CURRENT_EXPORT_VERSION, exported.get('version', 0))
self.assertEquals('https://app.rapidpro.io', exported.get('site', None))
self.assertEquals(8, len(exported.get('flows', [])))
self.assertEquals(4, len(exported.get('triggers', [])))
self.assertEquals(1, len(exported.get('campaigns', [])))
# set our org language to english
self.org.set_languages(self.admin, ['eng', 'fre'], 'eng')
# finally let's try importing our exported file
self.org.import_app(exported, self.admin, site='http://app.rapidpro.io')
assert_object_counts()
message_flow = Flow.objects.filter(flow_type='M', events__offset=-1, is_active=True).order_by('pk').first()
# make sure the base language is set to 'base', not 'eng'
self.assertEqual(message_flow.base_language, 'base')
# let's rename a flow and import our export again
flow = Flow.objects.get(name='Confirm Appointment')
flow.name = "A new flow"
flow.save()
campaign = Campaign.objects.all().first()
campaign.name = "A new campagin"
campaign.save()
group = ContactGroup.user_groups.filter(name='Pending Appointments').first()
group.name = "A new group"
group.save()
# it should fall back on ids and not create new objects even though the names changed
self.org.import_app(exported, self.admin, site='http://app.rapidpro.io')
assert_object_counts()
# and our objets should have the same names as before
self.assertEquals('Confirm Appointment', Flow.objects.get(pk=flow.pk).name)
self.assertEquals('Appointment Schedule', Campaign.objects.all().first().name)
self.assertEquals('Pending Appointments', ContactGroup.user_groups.get(pk=group.pk).name)
# let's rename our objects again
flow.name = "A new name"
flow.save()
campaign.name = "A new campagin"
campaign.save()
group.name = "A new group"
group.save()
# now import the same import but pretend its from a different site
self.org.import_app(exported, self.admin, site='http://temba.io')
# the newly named objects won't get updated in this case and we'll create new ones instead
self.assertEquals(9, Flow.objects.filter(org=self.org, is_archived=False, flow_type='F').count())
self.assertEquals(2, Campaign.objects.filter(org=self.org, is_archived=False).count())
self.assertEquals(4, ContactGroup.user_groups.filter(org=self.org).count())
# now archive a flow
register = Flow.objects.filter(name='Register Patient').first()
register.is_archived = True
register.save()
# default view shouldn't show archived flows
response = self.client.get(reverse('orgs.org_export'))
self.assertNotContains(response, 'Register Patient')
# with the archived flag one, it should be there
response = self.client.get("%s?archived=1" % reverse('orgs.org_export'))
self.assertContains(response, 'Register Patient')
# delete our flow, and reimport
confirm_appointment.delete()
self.org.import_app(exported, self.admin, site='https://app.rapidpro.io')
# make sure we have the previously exported expiration
confirm_appointment = Flow.objects.get(name='Confirm Appointment')
self.assertEquals(60, confirm_appointment.expires_after_minutes)
# now delete a flow
register = Flow.objects.filter(name='Register Patient').first()
register.is_active = False
register.save()
# default view shouldn't show deleted flows
response = self.client.get(reverse('orgs.org_export'))
self.assertNotContains(response, 'Register Patient')
# even with the archived flag one deleted flows should not show up
response = self.client.get("%s?archived=1" % reverse('orgs.org_export'))
self.assertNotContains(response, 'Register Patient')
class CreditAlertTest(TembaTest):
def test_check_org_credits(self):
self.joe = self.create_contact("Joe Blow", "123")
self.create_msg(contact=self.joe)
with self.settings(HOSTNAME="rapidpro.io", SEND_EMAILS=True):
with patch('temba.orgs.models.Org.get_credits_remaining') as mock_get_credits_remaining:
mock_get_credits_remaining.return_value = -1
# no alert yet
self.assertFalse(CreditAlert.objects.all())
CreditAlert.check_org_credits()
# one alert created and sent
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_OVER).count())
self.assertEquals(1, len(mail.outbox))
# alert email is for out of credits type
sent_email = mail.outbox[0]
self.assertEqual(len(sent_email.to), 1)
self.assertTrue('RapidPro account for Temba' in sent_email.body)
self.assertTrue('is out of credit.' in sent_email.body)
# no new alert if one is sent and no new email
CreditAlert.check_org_credits()
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_OVER).count())
self.assertEquals(1, len(mail.outbox))
# reset alerts
CreditAlert.reset_for_org(self.org)
self.assertFalse(CreditAlert.objects.filter(org=self.org, is_active=True))
# can resend a new alert
CreditAlert.check_org_credits()
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_OVER).count())
self.assertEquals(2, len(mail.outbox))
mock_get_credits_remaining.return_value = 10
with patch('temba.orgs.models.Org.has_low_credits') as mock_has_low_credits:
mock_has_low_credits.return_value = True
self.assertFalse(CreditAlert.objects.filter(org=self.org, alert_type=ORG_CREDIT_LOW))
CreditAlert.check_org_credits()
# low credit alert created and email sent
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_LOW).count())
self.assertEquals(3, len(mail.outbox))
# email sent
sent_email = mail.outbox[2]
self.assertEqual(len(sent_email.to), 1)
self.assertTrue('RapidPro account for Temba' in sent_email.body)
self.assertTrue('is running low on credits' in sent_email.body)
# no new alert if one is sent and no new email
CreditAlert.check_org_credits()
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_LOW).count())
self.assertEquals(3, len(mail.outbox))
# reset alerts
CreditAlert.reset_for_org(self.org)
self.assertFalse(CreditAlert.objects.filter(org=self.org, is_active=True))
# can resend a new alert
CreditAlert.check_org_credits()
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_LOW).count())
self.assertEquals(4, len(mail.outbox))
mock_has_low_credits.return_value = False
with patch('temba.orgs.models.Org.get_credits_expiring_soon') as mock_get_credits_exipiring_soon:
mock_get_credits_exipiring_soon.return_value = 0
self.assertFalse(CreditAlert.objects.filter(org=self.org, alert_type=ORG_CREDIT_EXPIRING))
CreditAlert.check_org_credits()
# no alert since no expiring credits
self.assertFalse(CreditAlert.objects.filter(org=self.org, alert_type=ORG_CREDIT_EXPIRING))
mock_get_credits_exipiring_soon.return_value = 200
CreditAlert.check_org_credits()
# expiring credit alert created and email sent
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_EXPIRING).count())
self.assertEquals(5, len(mail.outbox))
# email sent
sent_email = mail.outbox[4]
self.assertEqual(len(sent_email.to), 1)
self.assertTrue('RapidPro account for Temba' in sent_email.body)
self.assertTrue('expiring credits in less than one month.' in sent_email.body)
# no new alert if one is sent and no new email
CreditAlert.check_org_credits()
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_EXPIRING).count())
self.assertEquals(5, len(mail.outbox))
# reset alerts
CreditAlert.reset_for_org(self.org)
self.assertFalse(CreditAlert.objects.filter(org=self.org, is_active=True))
# can resend a new alert
CreditAlert.check_org_credits()
self.assertEquals(1, CreditAlert.objects.filter(is_active=True, org=self.org,
alert_type=ORG_CREDIT_EXPIRING).count())
self.assertEquals(6, len(mail.outbox))
class UnreadCountTest(FlowFileTest):
def test_unread_count_test(self):
flow = self.get_flow('favorites')
# create a trigger for 'favs'
Trigger.objects.create(org=self.org, flow=flow, keyword='favs', created_by=self.admin, modified_by=self.admin)
# start our flow by firing an incoming message
contact = self.create_contact('Anakin Skywalker', '+12067791212')
msg = self.create_msg(contact=contact, text="favs")
# process it
Msg.process_message(msg)
# our flow unread count should have gone up
self.assertEquals(1, flow.get_and_clear_unread_responses())
# cleared by the first call
self.assertEquals(0, flow.get_and_clear_unread_responses())
# at this point our flow should have started.. go to our trigger list page to see if our context is correct
self.login(self.admin)
trigger_list = reverse('triggers.trigger_list')
response = self.client.get(trigger_list)
self.assertEquals(0, response.context['msgs_unread_count'])
self.assertEquals(1, response.context['flows_unread_count'])
# answer another question in the flow
msg = self.create_msg(contact=contact, text="red")
Msg.process_message(msg)
response = self.client.get(trigger_list)
self.assertEquals(0, response.context['msgs_unread_count'])
self.assertEquals(2, response.context['flows_unread_count'])
# finish the flow and send a message outside it
msg = self.create_msg(contact=contact, text="primus")
Msg.process_message(msg)
msg = self.create_msg(contact=contact, text="nic")
Msg.process_message(msg)
msg = self.create_msg(contact=contact, text="Hello?")
Msg.process_message(msg)
response = self.client.get(trigger_list)
self.assertEquals(4, response.context['flows_unread_count'])
self.assertEquals(1, response.context['msgs_unread_count'])
# visit the msg pane
response = self.client.get(reverse('msgs.msg_inbox'))
self.assertEquals(4, response.context['flows_unread_count'])
self.assertEquals(0, response.context['msgs_unread_count'])
# now the flow list pane
response = self.client.get(reverse('flows.flow_list'))
self.assertEquals(0, response.context['flows_unread_count'])
self.assertEquals(0, response.context['msgs_unread_count'])
# make sure a test contact doesn't update our counts
test_contact = self.create_contact("Test Contact", "+12065551214", is_test=True)
msg = self.create_msg(contact=test_contact, text="favs")
Msg.process_message(msg)
# assert our counts weren't updated
self.assertEquals(0, self.org.get_unread_msg_count(UNREAD_INBOX_MSGS))
self.assertEquals(0, self.org.get_unread_msg_count(UNREAD_FLOW_MSGS))
# wasn't counted for the individual flow
self.assertEquals(0, flow.get_and_clear_unread_responses())
class EmailContextProcessorsTest(SmartminTest):
def setUp(self):
super(EmailContextProcessorsTest, self).setUp()
self.admin = self.create_user("Administrator")
self.middleware = BrandingMiddleware()
def test_link_components(self):
self.request = Mock(spec=HttpRequest)
self.request.get_host.return_value = "rapidpro.io"
response = self.middleware.process_request(self.request)
self.assertIsNone(response)
self.assertEquals(link_components(self.request, self.admin), dict(protocol="https", hostname="app.rapidpro.io"))
with self.settings(HOSTNAME="rapidpro.io"):
forget_url = reverse('users.user_forget')
post_data = dict()
post_data['email'] = 'nouser@nouser.com'
response = self.client.post(forget_url, post_data, follow=True)
self.assertEquals(1, len(mail.outbox))
sent_email = mail.outbox[0]
self.assertEqual(len(sent_email.to), 1)
self.assertEqual(sent_email.to[0], 'nouser@nouser.com')
# we have the domain of rapipro.io brand
self.assertTrue('app.rapidpro.io' in sent_email.body)
class TestStripeCredits(TembaTest):
@patch('stripe.Customer.create')
@patch('stripe.Charge.create')
@override_settings(SEND_EMAILS=True)
def test_add_credits(self, charge_create, customer_create):
customer_create.return_value = dict_to_struct('Customer', dict(id='stripe-cust-1'))
charge_create.return_value = \
dict_to_struct('Charge', dict(id='stripe-charge-1',
card=dict_to_struct('Card', dict(last4='1234', type='Visa', name='Rudolph'))))
settings.BRANDING[settings.DEFAULT_BRAND]['bundles'] = (dict(cents="2000", credits=1000, feature=""),)
self.org.add_credits('2000', 'stripe-token', self.admin)
self.assertTrue(2000, self.org.get_credits_total())
# assert we saved our charge info
topup = self.org.topups.last()
self.assertEqual('stripe-charge-1', topup.stripe_charge)
# and we saved our stripe customer info
org = Org.objects.get(id=self.org.id)
self.assertEqual('stripe-cust-1', org.stripe_customer)
# assert we sent our confirmation emai
self.assertEqual(1, len(mail.outbox))
email = mail.outbox[0]
self.assertEquals("RapidPro Receipt", email.subject)
self.assertTrue('Rudolph' in email.body)
self.assertTrue('Visa' in email.body)
self.assertTrue('$20' in email.body)
@patch('stripe.Customer.create')
@patch('stripe.Charge.create')
@override_settings(SEND_EMAILS=True)
def test_add_btc_credits(self, charge_create, customer_create):
customer_create.return_value = dict_to_struct('Customer', dict(id='stripe-cust-1'))
charge_create.return_value = \
dict_to_struct('Charge', dict(id='stripe-charge-1', card=None,
source=dict_to_struct('Source',
dict(bitcoin=dict_to_struct('Bitcoin', dict(address='abcde'))))))
settings.BRANDING[settings.DEFAULT_BRAND]['bundles'] = (dict(cents="2000", credits=1000, feature=""),)
self.org.add_credits('2000', 'stripe-token', self.admin)
self.assertTrue(2000, self.org.get_credits_total())
# assert we saved our charge info
topup = self.org.topups.last()
self.assertEqual('stripe-charge-1', topup.stripe_charge)
# and we saved our stripe customer info
org = Org.objects.get(id=self.org.id)
self.assertEqual('stripe-cust-1', org.stripe_customer)
# assert we sent our confirmation emai
self.assertEqual(1, len(mail.outbox))
email = mail.outbox[0]
self.assertEquals("RapidPro Receipt", email.subject)
self.assertTrue('bitcoin' in email.body)
self.assertTrue('abcde' in email.body)
self.assertTrue('$20' in email.body)
@patch('stripe.Customer.create')
def test_add_credits_fail(self, customer_create):
customer_create.side_effect = ValueError("Invalid customer token")
with self.assertRaises(ValidationError):
self.org.add_credits('2000', 'stripe-token', self.admin)
# assert no email was sent
self.assertEqual(0, len(mail.outbox))
# and no topups created
self.assertEqual(1, self.org.topups.all().count())
self.assertEqual(1000, self.org.get_credits_total())
def test_add_credits_invalid_bundle(self):
with self.assertRaises(ValidationError):
self.org.add_credits('-10', 'stripe-token', self.admin)
# assert no email was sent
self.assertEqual(0, len(mail.outbox))
# and no topups created
self.assertEqual(1, self.org.topups.all().count())
self.assertEqual(1000, self.org.get_credits_total())
@patch('stripe.Customer.create')
@patch('stripe.Customer.retrieve')
@patch('stripe.Charge.create')
@override_settings(SEND_EMAILS=True)
def test_add_credits_existing_customer(self, charge_create, customer_retrieve, customer_create):
self.admin2 = self.create_user("Administrator 2")
self.org.administrators.add(self.admin2)
self.org.stripe_customer = 'stripe-cust-1'
self.org.save()
class MockCard(object):
def __init__(self):
self.id = 'stripe-card-1'
def delete(self):
pass
class MockCards(object):
def all(self):
return dict_to_struct('MockCardData', dict(data=[MockCard(), MockCard()]))
def create(self, card):
return MockCard()
class MockCustomer(object):
def __init__(self, id, email):
self.id = id
self.email = email
self.cards = MockCards()
def save(self):
pass
customer_retrieve.return_value = MockCustomer(id='stripe-cust-1', email=self.admin.email)
customer_create.return_value = MockCustomer(id='stripe-cust-2', email=self.admin2.email)
charge_create.return_value = \
dict_to_struct('Charge', dict(id='stripe-charge-1',
card=dict_to_struct('Card', dict(last4='1234', type='Visa', name='Rudolph'))))
settings.BRANDING[settings.DEFAULT_BRAND]['bundles'] = (dict(cents="2000", credits=1000, feature=""),)
self.org.add_credits('2000', 'stripe-token', self.admin)
self.assertTrue(2000, self.org.get_credits_total())
# assert we saved our charge info
topup = self.org.topups.last()
self.assertEqual('stripe-charge-1', topup.stripe_charge)
# and we saved our stripe customer info
org = Org.objects.get(id=self.org.id)
self.assertEqual('stripe-cust-1', org.stripe_customer)
# assert we sent our confirmation email
self.assertEqual(1, len(mail.outbox))
email = mail.outbox[0]
self.assertEquals("RapidPro Receipt", email.subject)
self.assertTrue('Rudolph' in email.body)
self.assertTrue('Visa' in email.body)
self.assertTrue('$20' in email.body)
# do it again with a different user, should create a new stripe customer
self.org.add_credits('2000', 'stripe-token', self.admin2)
self.assertTrue(4000, self.org.get_credits_total())
# should have a different customer now
org = Org.objects.get(id=self.org.id)
self.assertEqual('stripe-cust-2', org.stripe_customer)
class ParsingTest(TembaTest):
def test_parse_decimal(self):
self.assertEqual(self.org.parse_decimal("Not num"), None)
self.assertEqual(self.org.parse_decimal("00.123"), Decimal("0.123"))
self.assertEqual(self.org.parse_decimal("6e33"), None)
self.assertEqual(self.org.parse_decimal("6e5"), Decimal("600000"))
self.assertEqual(self.org.parse_decimal("9999999999999999999999999"), None)
self.assertEqual(self.org.parse_decimal(""), None)
self.assertEqual(self.org.parse_decimal("NaN"), None)
self.assertEqual(self.org.parse_decimal("Infinity"), None)
| agpl-3.0 |
ormnv/os_final_project | django/contrib/formtools/wizard/storage/base.py | 216 | 3949 | from django.core.files.uploadedfile import UploadedFile
from django.utils.datastructures import MultiValueDict
from django.utils.functional import lazy_property
from django.utils import six
from django.contrib.formtools.wizard.storage.exceptions import NoFileStorageConfigured
class BaseStorage(object):
step_key = 'step'
step_data_key = 'step_data'
step_files_key = 'step_files'
extra_data_key = 'extra_data'
def __init__(self, prefix, request=None, file_storage=None):
self.prefix = 'wizard_%s' % prefix
self.request = request
self.file_storage = file_storage
def init_data(self):
self.data = {
self.step_key: None,
self.step_data_key: {},
self.step_files_key: {},
self.extra_data_key: {},
}
def reset(self):
self.init_data()
def _get_current_step(self):
return self.data[self.step_key]
def _set_current_step(self, step):
self.data[self.step_key] = step
current_step = lazy_property(_get_current_step, _set_current_step)
def _get_extra_data(self):
return self.data[self.extra_data_key]
def _set_extra_data(self, extra_data):
self.data[self.extra_data_key] = extra_data
extra_data = lazy_property(_get_extra_data, _set_extra_data)
def get_step_data(self, step):
# When reading the serialized data, upconvert it to a MultiValueDict,
# some serializers (json) don't preserve the type of the object.
values = self.data[self.step_data_key].get(step, None)
if values is not None:
values = MultiValueDict(values)
return values
def set_step_data(self, step, cleaned_data):
# If the value is a MultiValueDict, convert it to a regular dict of the
# underlying contents. Some serializers call the public API on it (as
# opposed to the underlying dict methods), in which case the content
# can be truncated (__getitem__ returns only the first item).
if isinstance(cleaned_data, MultiValueDict):
cleaned_data = dict(cleaned_data.lists())
self.data[self.step_data_key][step] = cleaned_data
@property
def current_step_data(self):
return self.get_step_data(self.current_step)
def get_step_files(self, step):
wizard_files = self.data[self.step_files_key].get(step, {})
if wizard_files and not self.file_storage:
raise NoFileStorageConfigured(
"You need to define 'file_storage' in your "
"wizard view in order to handle file uploads.")
files = {}
for field, field_dict in six.iteritems(wizard_files):
field_dict = field_dict.copy()
tmp_name = field_dict.pop('tmp_name')
files[field] = UploadedFile(
file=self.file_storage.open(tmp_name), **field_dict)
return files or None
def set_step_files(self, step, files):
if files and not self.file_storage:
raise NoFileStorageConfigured(
"You need to define 'file_storage' in your "
"wizard view in order to handle file uploads.")
if step not in self.data[self.step_files_key]:
self.data[self.step_files_key][step] = {}
for field, field_file in six.iteritems(files or {}):
tmp_filename = self.file_storage.save(field_file.name, field_file)
file_dict = {
'tmp_name': tmp_filename,
'name': field_file.name,
'content_type': field_file.content_type,
'size': field_file.size,
'charset': field_file.charset
}
self.data[self.step_files_key][step][field] = file_dict
@property
def current_step_files(self):
return self.get_step_files(self.current_step)
def update_response(self, response):
pass
| bsd-3-clause |
blowmage/gcloud-python | gcloud/datastore/test_helpers.py | 1 | 21896 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
class Test_entity_from_protobuf(unittest2.TestCase):
def setUp(self):
from gcloud.datastore._testing import _setup_defaults
_setup_defaults(self)
def tearDown(self):
from gcloud.datastore._testing import _tear_down_defaults
_tear_down_defaults(self)
def _callFUT(self, val):
from gcloud.datastore.helpers import entity_from_protobuf
return entity_from_protobuf(val)
def test_it(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
_DATASET_ID = 'DATASET'
_KIND = 'KIND'
_ID = 1234
entity_pb = datastore_pb.Entity()
entity_pb.key.partition_id.dataset_id = _DATASET_ID
entity_pb.key.path_element.add(kind=_KIND, id=_ID)
prop_pb = entity_pb.property.add()
prop_pb.name = 'foo'
prop_pb.value.string_value = 'Foo'
unindexed_prop_pb = entity_pb.property.add()
unindexed_prop_pb.name = 'bar'
unindexed_prop_pb.value.integer_value = 10
unindexed_prop_pb.value.indexed = False
list_prop_pb1 = entity_pb.property.add()
list_prop_pb1.name = 'baz'
list_pb1 = list_prop_pb1.value.list_value
unindexed_value_pb = list_pb1.add()
unindexed_value_pb.integer_value = 11
unindexed_value_pb.indexed = False
list_prop_pb2 = entity_pb.property.add()
list_prop_pb2.name = 'qux'
list_pb2 = list_prop_pb2.value.list_value
indexed_value_pb = list_pb2.add()
indexed_value_pb.integer_value = 12
indexed_value_pb.indexed = True
entity = self._callFUT(entity_pb)
self.assertEqual(entity.kind, _KIND)
self.assertEqual(entity.exclude_from_indexes,
frozenset(['bar', 'baz']))
entity_props = dict(entity)
self.assertEqual(entity_props,
{'foo': 'Foo', 'bar': 10, 'baz': [11], 'qux': [12]})
# Also check the key.
key = entity.key
self.assertEqual(key.dataset_id, _DATASET_ID)
self.assertEqual(key.namespace, None)
self.assertEqual(key.kind, _KIND)
self.assertEqual(key.id, _ID)
def test_mismatched_value_indexed(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
_DATASET_ID = 'DATASET'
_KIND = 'KIND'
_ID = 1234
entity_pb = datastore_pb.Entity()
entity_pb.key.partition_id.dataset_id = _DATASET_ID
entity_pb.key.path_element.add(kind=_KIND, id=_ID)
list_prop_pb = entity_pb.property.add()
list_prop_pb.name = 'baz'
list_pb = list_prop_pb.value.list_value
unindexed_value_pb1 = list_pb.add()
unindexed_value_pb1.integer_value = 10
unindexed_value_pb1.indexed = False
unindexed_value_pb2 = list_pb.add()
unindexed_value_pb2.integer_value = 11
unindexed_value_pb2.indexed = True
with self.assertRaises(ValueError):
self._callFUT(entity_pb)
def test_entity_no_key(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
entity_pb = datastore_pb.Entity()
entity = self._callFUT(entity_pb)
self.assertEqual(entity.key, None)
self.assertEqual(dict(entity), {})
def test_nested_entity_no_key(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
DATASET_ID = 's~FOO'
KIND = 'KIND'
INSIDE_NAME = 'IFOO'
OUTSIDE_NAME = 'OBAR'
INSIDE_VALUE = 1337
entity_inside = datastore_pb.Entity()
inside_prop = entity_inside.property.add()
inside_prop.name = INSIDE_NAME
inside_prop.value.integer_value = INSIDE_VALUE
entity_pb = datastore_pb.Entity()
entity_pb.key.partition_id.dataset_id = DATASET_ID
element = entity_pb.key.path_element.add()
element.kind = KIND
outside_prop = entity_pb.property.add()
outside_prop.name = OUTSIDE_NAME
outside_prop.value.entity_value.CopyFrom(entity_inside)
entity = self._callFUT(entity_pb)
self.assertEqual(entity.key.dataset_id, DATASET_ID)
self.assertEqual(entity.key.flat_path, (KIND,))
self.assertEqual(len(entity), 1)
inside_entity = entity[OUTSIDE_NAME]
self.assertEqual(inside_entity.key, None)
self.assertEqual(len(inside_entity), 1)
self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE)
class Test_key_from_protobuf(unittest2.TestCase):
def setUp(self):
from gcloud.datastore._testing import _setup_defaults
_setup_defaults(self)
def tearDown(self):
from gcloud.datastore._testing import _tear_down_defaults
_tear_down_defaults(self)
def _callFUT(self, val):
from gcloud.datastore.helpers import key_from_protobuf
return key_from_protobuf(val)
def _makePB(self, dataset_id=None, namespace=None, path=()):
from gcloud.datastore._datastore_v1_pb2 import Key
pb = Key()
if dataset_id is not None:
pb.partition_id.dataset_id = dataset_id
if namespace is not None:
pb.partition_id.namespace = namespace
for elem in path:
added = pb.path_element.add()
added.kind = elem['kind']
if 'id' in elem:
added.id = elem['id']
if 'name' in elem:
added.name = elem['name']
return pb
def test_wo_namespace_in_pb(self):
_DATASET = 'DATASET'
pb = self._makePB(path=[{'kind': 'KIND'}], dataset_id=_DATASET)
key = self._callFUT(pb)
self.assertEqual(key.dataset_id, _DATASET)
self.assertEqual(key.namespace, None)
def test_w_namespace_in_pb(self):
_DATASET = 'DATASET'
_NAMESPACE = 'NAMESPACE'
pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE,
dataset_id=_DATASET)
key = self._callFUT(pb)
self.assertEqual(key.dataset_id, _DATASET)
self.assertEqual(key.namespace, _NAMESPACE)
def test_w_nested_path_in_pb(self):
_PATH = [
{'kind': 'PARENT', 'name': 'NAME'},
{'kind': 'CHILD', 'id': 1234},
{'kind': 'GRANDCHILD', 'id': 5678},
]
pb = self._makePB(path=_PATH, dataset_id='DATASET')
key = self._callFUT(pb)
self.assertEqual(key.path, _PATH)
def test_w_nothing_in_pb(self):
pb = self._makePB()
self.assertRaises(ValueError, self._callFUT, pb)
class Test__pb_attr_value(unittest2.TestCase):
def _callFUT(self, val):
from gcloud.datastore.helpers import _pb_attr_value
return _pb_attr_value(val)
def test_datetime_naive(self):
import calendar
import datetime
import pytz
naive = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375) # No zone.
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc)
name, value = self._callFUT(naive)
self.assertEqual(name, 'timestamp_microseconds_value')
self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
self.assertEqual(value % 1000000, 4375)
def test_datetime_w_zone(self):
import calendar
import datetime
import pytz
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc)
name, value = self._callFUT(utc)
self.assertEqual(name, 'timestamp_microseconds_value')
self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
self.assertEqual(value % 1000000, 4375)
def test_key(self):
from gcloud.datastore.key import Key
key = Key('PATH', 1234, dataset_id='DATASET')
name, value = self._callFUT(key)
self.assertEqual(name, 'key_value')
self.assertEqual(value, key.to_protobuf())
def test_bool(self):
name, value = self._callFUT(False)
self.assertEqual(name, 'boolean_value')
self.assertEqual(value, False)
def test_float(self):
name, value = self._callFUT(3.1415926)
self.assertEqual(name, 'double_value')
self.assertEqual(value, 3.1415926)
def test_int(self):
name, value = self._callFUT(42)
self.assertEqual(name, 'integer_value')
self.assertEqual(value, 42)
def test_long(self):
must_be_long = (1 << 63) - 1
name, value = self._callFUT(must_be_long)
self.assertEqual(name, 'integer_value')
self.assertEqual(value, must_be_long)
def test_long_too_small(self):
too_small = -(1 << 63) - 1
self.assertRaises(ValueError, self._callFUT, too_small)
def test_long_too_large(self):
too_large = 1 << 63
self.assertRaises(ValueError, self._callFUT, too_large)
def test_native_str(self):
import six
name, value = self._callFUT('str')
if six.PY2:
self.assertEqual(name, 'blob_value')
else: # pragma: NO COVER
self.assertEqual(name, 'string_value')
self.assertEqual(value, 'str')
def test_bytes(self):
name, value = self._callFUT(b'bytes')
self.assertEqual(name, 'blob_value')
self.assertEqual(value, b'bytes')
def test_unicode(self):
name, value = self._callFUT(u'str')
self.assertEqual(name, 'string_value')
self.assertEqual(value, u'str')
def test_entity(self):
from gcloud.datastore.entity import Entity
entity = Entity()
name, value = self._callFUT(entity)
self.assertEqual(name, 'entity_value')
self.assertTrue(value is entity)
def test_list(self):
values = ['a', 0, 3.14]
name, value = self._callFUT(values)
self.assertEqual(name, 'list_value')
self.assertTrue(value is values)
def test_object(self):
self.assertRaises(ValueError, self._callFUT, object())
class Test__get_value_from_value_pb(unittest2.TestCase):
def _callFUT(self, pb):
from gcloud.datastore.helpers import _get_value_from_value_pb
return _get_value_from_value_pb(pb)
def _makePB(self, attr_name, value):
from gcloud.datastore._datastore_v1_pb2 import Value
pb = Value()
setattr(pb, attr_name, value)
return pb
def test_datetime(self):
import calendar
import datetime
import pytz
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc)
micros = (calendar.timegm(utc.timetuple()) * 1000000) + 4375
pb = self._makePB('timestamp_microseconds_value', micros)
self.assertEqual(self._callFUT(pb), utc)
def test_key(self):
from gcloud.datastore._datastore_v1_pb2 import Value
from gcloud.datastore.key import Key
pb = Value()
expected = Key('KIND', 1234, dataset_id='DATASET').to_protobuf()
pb.key_value.CopyFrom(expected)
found = self._callFUT(pb)
self.assertEqual(found.to_protobuf(), expected)
def test_bool(self):
pb = self._makePB('boolean_value', False)
self.assertEqual(self._callFUT(pb), False)
def test_float(self):
pb = self._makePB('double_value', 3.1415926)
self.assertEqual(self._callFUT(pb), 3.1415926)
def test_int(self):
pb = self._makePB('integer_value', 42)
self.assertEqual(self._callFUT(pb), 42)
def test_bytes(self):
pb = self._makePB('blob_value', b'str')
self.assertEqual(self._callFUT(pb), b'str')
def test_unicode(self):
pb = self._makePB('string_value', u'str')
self.assertEqual(self._callFUT(pb), u'str')
def test_entity(self):
from gcloud.datastore._datastore_v1_pb2 import Value
from gcloud.datastore.entity import Entity
pb = Value()
entity_pb = pb.entity_value
entity_pb.key.path_element.add(kind='KIND')
entity_pb.key.partition_id.dataset_id = 'DATASET'
prop_pb = entity_pb.property.add()
prop_pb.name = 'foo'
prop_pb.value.string_value = 'Foo'
entity = self._callFUT(pb)
self.assertTrue(isinstance(entity, Entity))
self.assertEqual(entity['foo'], 'Foo')
def test_list(self):
from gcloud.datastore._datastore_v1_pb2 import Value
pb = Value()
list_pb = pb.list_value
item_pb = list_pb.add()
item_pb.string_value = 'Foo'
item_pb = list_pb.add()
item_pb.string_value = 'Bar'
items = self._callFUT(pb)
self.assertEqual(items, ['Foo', 'Bar'])
def test_unknown(self):
from gcloud.datastore._datastore_v1_pb2 import Value
pb = Value()
self.assertEqual(self._callFUT(pb), None)
class Test__get_value_from_property_pb(unittest2.TestCase):
def _callFUT(self, pb):
from gcloud.datastore.helpers import _get_value_from_property_pb
return _get_value_from_property_pb(pb)
def test_it(self):
from gcloud.datastore._datastore_v1_pb2 import Property
pb = Property()
pb.value.string_value = 'value'
self.assertEqual(self._callFUT(pb), 'value')
class Test_set_protobuf_value(unittest2.TestCase):
def _callFUT(self, value_pb, val):
from gcloud.datastore.helpers import _set_protobuf_value
return _set_protobuf_value(value_pb, val)
def _makePB(self):
from gcloud.datastore._datastore_v1_pb2 import Value
return Value()
def test_datetime(self):
import calendar
import datetime
import pytz
pb = self._makePB()
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc)
self._callFUT(pb, utc)
value = pb.timestamp_microseconds_value
self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
self.assertEqual(value % 1000000, 4375)
def test_key(self):
from gcloud.datastore.key import Key
pb = self._makePB()
key = Key('KIND', 1234, dataset_id='DATASET')
self._callFUT(pb, key)
value = pb.key_value
self.assertEqual(value, key.to_protobuf())
def test_none(self):
from gcloud.datastore.entity import Entity
entity = Entity()
pb = self._makePB()
self._callFUT(pb, False)
self._callFUT(pb, 3.1415926)
self._callFUT(pb, 42)
self._callFUT(pb, (1 << 63) - 1)
self._callFUT(pb, 'str')
self._callFUT(pb, b'str')
self._callFUT(pb, u'str')
self._callFUT(pb, entity)
self._callFUT(pb, [u'a', 0, 3.14])
self._callFUT(pb, None)
self.assertEqual(len(pb.ListFields()), 0)
def test_bool(self):
pb = self._makePB()
self._callFUT(pb, False)
value = pb.boolean_value
self.assertEqual(value, False)
def test_float(self):
pb = self._makePB()
self._callFUT(pb, 3.1415926)
value = pb.double_value
self.assertEqual(value, 3.1415926)
def test_int(self):
pb = self._makePB()
self._callFUT(pb, 42)
value = pb.integer_value
self.assertEqual(value, 42)
def test_long(self):
pb = self._makePB()
must_be_long = (1 << 63) - 1
self._callFUT(pb, must_be_long)
value = pb.integer_value
self.assertEqual(value, must_be_long)
def test_native_str(self):
import six
pb = self._makePB()
self._callFUT(pb, 'str')
if six.PY2:
value = pb.blob_value
else: # pragma: NO COVER
value = pb.string_value
self.assertEqual(value, 'str')
def test_bytes(self):
pb = self._makePB()
self._callFUT(pb, b'str')
value = pb.blob_value
self.assertEqual(value, b'str')
def test_unicode(self):
pb = self._makePB()
self._callFUT(pb, u'str')
value = pb.string_value
self.assertEqual(value, u'str')
def test_entity_empty_wo_key(self):
from gcloud.datastore.entity import Entity
pb = self._makePB()
entity = Entity()
self._callFUT(pb, entity)
value = pb.entity_value
self.assertEqual(value.key.SerializeToString(), b'')
props = list(value.property)
self.assertEqual(len(props), 0)
def test_entity_w_key(self):
from gcloud.datastore.entity import Entity
from gcloud.datastore.key import Key
pb = self._makePB()
key = Key('KIND', 123, dataset_id='DATASET')
entity = Entity(key=key)
entity['foo'] = u'Foo'
self._callFUT(pb, entity)
value = pb.entity_value
self.assertEqual(value.key, key.to_protobuf())
props = list(value.property)
self.assertEqual(len(props), 1)
self.assertEqual(props[0].name, 'foo')
self.assertEqual(props[0].value.string_value, u'Foo')
def test_list(self):
pb = self._makePB()
values = [u'a', 0, 3.14]
self._callFUT(pb, values)
marshalled = pb.list_value
self.assertEqual(len(marshalled), len(values))
self.assertEqual(marshalled[0].string_value, values[0])
self.assertEqual(marshalled[1].integer_value, values[1])
self.assertEqual(marshalled[2].double_value, values[2])
class Test__prepare_key_for_request(unittest2.TestCase):
def _callFUT(self, key_pb):
from gcloud.datastore.helpers import _prepare_key_for_request
return _prepare_key_for_request(key_pb)
def test_prepare_dataset_id_valid(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
key = datastore_pb.Key()
key.partition_id.dataset_id = 'foo'
new_key = self._callFUT(key)
self.assertFalse(new_key is key)
key_without = datastore_pb.Key()
new_key.ClearField('partition_id')
self.assertEqual(new_key, key_without)
def test_prepare_dataset_id_unset(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
key = datastore_pb.Key()
new_key = self._callFUT(key)
self.assertTrue(new_key is key)
class Test_find_true_dataset_id(unittest2.TestCase):
def setUp(self):
from gcloud.datastore._testing import _setup_defaults
_setup_defaults(self)
def tearDown(self):
from gcloud.datastore._testing import _tear_down_defaults
_tear_down_defaults(self)
def _callFUT(self, dataset_id, connection):
from gcloud.datastore.helpers import find_true_dataset_id
return find_true_dataset_id(dataset_id, connection)
def test_prefixed(self):
PREFIXED = 's~DATASET'
result = self._callFUT(PREFIXED, object())
self.assertEqual(PREFIXED, result)
def test_unprefixed_bogus_key_miss(self):
UNPREFIXED = 'DATASET'
PREFIX = 's~'
CONNECTION = _Connection(PREFIX, from_missing=False)
result = self._callFUT(UNPREFIXED, CONNECTION)
self.assertEqual(CONNECTION._called_dataset_id, UNPREFIXED)
self.assertEqual(len(CONNECTION._lookup_result), 1)
# Make sure just one.
called_key_pb, = CONNECTION._called_key_pbs
path_element = called_key_pb.path_element
self.assertEqual(len(path_element), 1)
self.assertEqual(path_element[0].kind, '__MissingLookupKind')
self.assertEqual(path_element[0].id, 1)
self.assertFalse(path_element[0].HasField('name'))
PREFIXED = PREFIX + UNPREFIXED
self.assertEqual(result, PREFIXED)
def test_unprefixed_bogus_key_hit(self):
UNPREFIXED = 'DATASET'
PREFIX = 'e~'
CONNECTION = _Connection(PREFIX, from_missing=True)
result = self._callFUT(UNPREFIXED, CONNECTION)
self.assertEqual(CONNECTION._called_dataset_id, UNPREFIXED)
self.assertEqual(CONNECTION._lookup_result, [])
# Make sure just one.
called_key_pb, = CONNECTION._called_key_pbs
path_element = called_key_pb.path_element
self.assertEqual(len(path_element), 1)
self.assertEqual(path_element[0].kind, '__MissingLookupKind')
self.assertEqual(path_element[0].id, 1)
self.assertFalse(path_element[0].HasField('name'))
PREFIXED = PREFIX + UNPREFIXED
self.assertEqual(result, PREFIXED)
class _Connection(object):
_called_dataset_id = _called_key_pbs = _lookup_result = None
def __init__(self, prefix, from_missing=False):
self.prefix = prefix
self.from_missing = from_missing
def lookup(self, dataset_id, key_pbs):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
# Store the arguments called with.
self._called_dataset_id = dataset_id
self._called_key_pbs = key_pbs
key_pb, = key_pbs
response = datastore_pb.Entity()
response.key.CopyFrom(key_pb)
response.key.partition_id.dataset_id = self.prefix + dataset_id
missing = []
deferred = []
if self.from_missing:
missing[:] = [response]
self._lookup_result = []
else:
self._lookup_result = [response]
return self._lookup_result, missing, deferred
| apache-2.0 |
yuuki0xff/conoha-cli | conoha/api.py | 1 | 4205 |
# -*- coding: utf8 -*-
from urllib.request import Request, urlopen
import json
__all__ = 'API'.split()
class DictWrapper(dict):
""" dictインスタンスへインスタンス変数を追加するために使用する """
pass
class BytesWrapper(bytes):
""" bytesインスタンスへインスタンス変数を追加するために使用する """
pass
class API:
"""全てのConoHa APIを呼び出すクラスのスーパークラス"""
def __init__(self, token=None, baseURIPrefix=None):
self.__baseURI = None
self.token = token
self._serviceType = None
self.baseURIPrefix = baseURIPrefix
def _getHeaders(self, h):
headers={
'Accept': 'application/json',
}
if self.token:
headers['X-Auth-Token'] = self.token.getAuthToken()
if h:
headers.update(h)
return headers
def _GET(self, path, data=None, isDeserialize=True, headers=None, method='GET'):
"""APIを呼び出す
dataにNone以外を指定した場合は、jsonに変換してアップロードする
headersにはdictを指定した場合は、リクエストに指定したヘッダーを追加する
レスポンスは、DictWrapperオブジェクトかBytesWrapperオブジェクト
レスポンスには、下記の属性を含む
code:int http status code
msg:str http status message
headers:dict レスポンスヘッダー
"""
# set self.__baseURI
if not self.__baseURI:
if self.token:
self.__baseURI = self.token.getEndpointURL(self._serviceType)
else:
self.__baseURI = self.getEndpointURL(self._serviceType)
if self.baseURIPrefix:
self.__baseURI += '/' + self.baseURIPrefix
if data:
data = bytes(json.dumps(data), 'utf8')
req = Request(
url=self.__baseURI + ('/' + path if path else ''), # 末尾の'/'はつけない
headers=self._getHeaders(headers),
method=method,
data=data,
)
with urlopen(req) as res:
resBin = res.read()
if isDeserialize:
data = DictWrapper(json.loads(str(resBin, 'utf8')))
else:
data = BytesWrapper(resBin)
# HTTPステータスコードとヘッダーを追加
data.code = res.code
data.msg = res.msg
data.headers = res.headers
return data
def _DELETE(self, *args, **nargs):
"""see help(self._GET)"""
return self._GET(*args, method='DELETE', **nargs)
def _POST(self, path, data, *args, **nargs):
"""see help(self._GET)"""
return self._GET(path, data, *args, method='POST', **nargs)
def _PUT(self, path, data, *args, **nargs):
"""see help(self._GET)"""
return self._GET(path, data, *args, method='PUT', **nargs)
class Token(API):
def __init__(self, conf):
super().__init__()
self._serviceType = 'identity'
self.conf = conf
path = 'tokens'
data = { 'auth':{
'passwordCredentials':{
'username': conf.get('api', 'user'),
'password': conf.get('api', 'passwd'),
},
}}
self.tenantId = conf.get('api', 'tenant')
self.region = conf.get('endpoint', 'region')
if self.tenantId:
data['auth']['passwordCredentials']['tenantId'] = self.tenantId
res = self._POST(path, data)
self.token = res['access']['token']
def getTenantId(self):
return self.tenantId
def getRegion(self):
return self.region
def getAuthToken(self):
return self.token['id']
def getEndpointURL(self, name):
url = self.conf.get('endpoint', name, fallback=None) or self.conf.endpoint[name]
assert(url)
if '{REGION}' in url:
url = url.replace('{REGION}', self.getRegion())
if '{TENANT_ID}' in url:
url = url.replace('{TENANT_ID}', self.getTenantId())
return url.rstrip('/')
class CustomList(list):
"""インデックス指定の拡張を支援する
インデックスが int または slice で指定された場合は、通常のリスト同様の動作をする
それ以外のオブジェクトを指定した場合は、_getitemメソッドが最初にTrueを返したitemを返す
サブクラスは_getitem(key, item)メソッドを実装しなければならない
"""
def __getitem__(self, key):
if isinstance(key, int) or isinstance(key, slice):
return super().__getitem__(key)
else:
for item in self:
if self._getitem(key, item):
return item
raise KeyError(key)
| mit |
lewischeng-ms/pox | tools/gui/communication.py | 5 | 6464 | '''
This module implements the communication between
- the topology view and the monitoring backend that feeds it
- the log view and NOX's logger
- the json command prompt and NOX's json messenger
@author Kyriakos Zarifis
'''
from PyQt4 import QtGui, QtCore
import SocketServer
import socket
import logging
import json
import asyncore
from time import sleep
import cPickle
import struct
# JSON decoder used by default
defaultDecoder = json.JSONDecoder()
class Communication(QtCore.QThread, QtGui.QWidget):
'''
Communicates with backend in order to receive topology-view
information. Used to communicate with GuiMessenger for other, component-
specific event notification too.
'''
# Define signals that are emitted when messages are received
# Interested custom views connect these signals to their slots
# Signal used to notify te view that tunnels have been updated
#tunnels_reply_received_signal = QtCore.pyqtSignal()
# Signal used to notify te view that new TED info was received
#ted_reply_received_signal = QtCore.pyqtSignal()
# Signal used to notify te view that tunnels might have changed
#link_status_change_signal = QtCore.pyqtSignal()
# Define a new signal that takes a SwitchQueryReply type as an argument
#switch_query_reply_received_signal = QtCore.pyqtSignal()# SwitchQueryReply )
# Signal used to notify monitoring view of new msg
monitoring_received_signal = QtCore.pyqtSignal(object)
# Define a new signal that takes a Topology type as an argument
topology_received_signal = QtCore.pyqtSignal(object)
# Signal used to notify STP view of new msg
spanning_tree_received_signal = QtCore.pyqtSignal(object)
# Signal used to notify routing view of new msg
routing_received_signal = QtCore.pyqtSignal(object)
# Signal used to notify FlowTracer view of new msg
flowtracer_received_signal = QtCore.pyqtSignal(object)
# Signal used to notify Log of new msg
log_received_signal = QtCore.pyqtSignal(object)
def __init__(self, parent):
QtCore.QThread.__init__(self)
self.xid_counter = 1
self.parent = parent
self.backend_ip = self.parent.backend_ip
self.backend_port = self.parent.backend_port
# Connect socket
self.connected = False
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#self.sock.setblocking(0)
try:
self.sock.connect((self.backend_ip,self.backend_port))
self.connected = True
except:
self.retry_connection()
#self.subscribe_for_topochanges()
#self.subscribe_for_linkutils()
self.listener = Listener(self)
self.listener.start()
def retry_connection(self):
print "Retrying connection to POX...(is 'messenger' running?)"
sleep(2)
try:
self.sock.connect((self.backend_ip,self.backend_port))
self.connected = True
except:
self.retry_connection()
def send(self, msg):
if not self.connected:
print "Not connected to POX"
return
#if not "xid" in msg:
# msg["xid"] = self.xid_counter
#self.xid_counter += 1
print 'Sending :', msg
self.sock.send(json.dumps(msg))
def shutdown(self):
#self.listener.stop()
self.sock.shutdown(1)
self.sock.close()
class Listener(QtCore.QThread):
def __init__(self, p):
QtCore.QThread.__init__(self)
self.p = p
self._buf = bytes()
def run (self):
while 1:
data = self.p.sock.recv(1024)
if data is None or len(data) == 0:
break
#if len(data) == 0: return
if len(self._buf) == 0:
if data[0].isspace():
self._buf = data.lstrip()
else:
self._buf = data
else:
self._buf += data
while len(self._buf) > 0:
try:
msg, l = defaultDecoder.raw_decode(self._buf)
except:
# Need more data before it's a valid message
# (.. or the stream is corrupt and things will never be okay ever again)
return
self._buf = self._buf[l:]
if len(self._buf) != 0 and self._buf[0].isspace():
self._buf = self._buf.lstrip()
if msg["type"] == "topology":
print "Recieved :", msg
self.p.topology_received_signal.emit(msg)
elif msg["type"] == "monitoring":
self.p.monitoring_received_signal.emit(msg)
elif msg["type"] == "spanning_tree":
self.p.spanning_tree_received_signal.emit(msg)
elif msg["type"] == "sample_routing":
self.p.routing_received_signal.emit(msg)
elif msg["type"] == "flowtracer":
self.p.flowtracer_received_signal.emit(msg)
elif msg["type"] == "log":
self.p.log_received_signal.emit(msg)
class ConsoleInterface():
'''
Sends JSON commands to NOX
'''
def __init__(self, parent):
self.consoleWidget = parent
##NOX host
self.nox_host = "localhost"
##Port number
self.port_no = 2703
def send_cmd(self, cmd=None, expectReply=False):
# if textbox empty, construct command
if not cmd:
print "sending dummy cmd"
cmd = "{\"type\":\"lavi\",\"command\":\"request\",\"node_type\":\"all\"}"
#Send command
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((self.nox_host,self.port_no))
sock.send(cmd)
if expectReply:
print json.dumps(json.loads(sock.recv(4096)), indent=4)
sock.send("{\"type\":\"disconnect\"}")
sock.shutdown(1)
sock.close() | gpl-3.0 |
andremiller/beets | test/test_info.py | 1 | 2980 | # This file is part of beets.
# Copyright 2015, Thomas Scholtes.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
from _common import unittest
from helper import TestHelper
from beets.mediafile import MediaFile
class InfoTest(unittest.TestCase, TestHelper):
def setUp(self):
self.setup_beets()
self.load_plugins('info')
def tearDown(self):
self.unload_plugins()
self.teardown_beets()
def run_command(self, *args):
super(InfoTest, self).run_command('info', *args)
def test_path(self):
path = self.create_mediafile_fixture()
mediafile = MediaFile(path)
mediafile.albumartist = 'AAA'
mediafile.disctitle = 'DDD'
mediafile.genres = ['a', 'b', 'c']
mediafile.composer = None
mediafile.save()
out = self.run_with_output(path)
self.assertIn(path, out)
self.assertIn('albumartist: AAA', out)
self.assertIn('disctitle: DDD', out)
self.assertIn('genres: a; b; c', out)
self.assertNotIn('composer:', out)
def test_item_query(self):
items = self.add_item_fixtures(count=2)
items[0].album = 'xxxx'
items[0].write()
items[0].album = 'yyyy'
items[0].store()
out = self.run_with_output('album:yyyy')
self.assertIn(items[0].path, out)
self.assertIn('album: xxxx', out)
self.assertNotIn(items[1].path, out)
def test_item_library_query(self):
item, = self.add_item_fixtures()
item.album = 'xxxx'
item.store()
out = self.run_with_output('--library', 'album:xxxx')
self.assertIn(item.path, out)
self.assertIn('album: xxxx', out)
def test_collect_item_and_path(self):
path = self.create_mediafile_fixture()
mediafile = MediaFile(path)
item, = self.add_item_fixtures()
item.album = mediafile.album = 'AAA'
item.tracktotal = mediafile.tracktotal = 5
item.title = 'TTT'
mediafile.title = 'SSS'
item.write()
item.store()
mediafile.save()
out = self.run_with_output('--summarize', 'album:AAA', path)
self.assertIn('album: AAA', out)
self.assertIn('tracktotal: 5', out)
self.assertIn('title: [various]', out)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| mit |
erja-gp/openthread | tools/harness-automation/cases/leader_9_2_19.py | 1 | 1877 | #!/usr/bin/env python
#
# Copyright (c) 2018, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
from autothreadharness.harness_case import HarnessCase
class Leader_9_2_19(HarnessCase):
role = HarnessCase.ROLE_LEADER
case = '9 2 19'
golden_devices_required = 1
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
whitehorse-io/encarnia | pyenv/lib/python2.7/site-packages/pip/_vendor/distlib/markers.py | 1261 | 6282 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Parser for the environment markers micro-language defined in PEP 345."""
import ast
import os
import sys
import platform
from .compat import python_implementation, string_types
from .util import in_venv
__all__ = ['interpret']
class Evaluator(object):
"""
A limited evaluator for Python expressions.
"""
operators = {
'eq': lambda x, y: x == y,
'gt': lambda x, y: x > y,
'gte': lambda x, y: x >= y,
'in': lambda x, y: x in y,
'lt': lambda x, y: x < y,
'lte': lambda x, y: x <= y,
'not': lambda x: not x,
'noteq': lambda x, y: x != y,
'notin': lambda x, y: x not in y,
}
allowed_values = {
'sys_platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os_name': os.name,
'platform_in_venv': str(in_venv()),
'platform_release': platform.release(),
'platform_version': platform.version(),
'platform_machine': platform.machine(),
'platform_python_implementation': python_implementation(),
}
def __init__(self, context=None):
"""
Initialise an instance.
:param context: If specified, names are looked up in this mapping.
"""
self.context = context or {}
self.source = None
def get_fragment(self, offset):
"""
Get the part of the source which is causing a problem.
"""
fragment_len = 10
s = '%r' % (self.source[offset:offset + fragment_len])
if offset + fragment_len < len(self.source):
s += '...'
return s
def get_handler(self, node_type):
"""
Get a handler for the specified AST node type.
"""
return getattr(self, 'do_%s' % node_type, None)
def evaluate(self, node, filename=None):
"""
Evaluate a source string or node, using ``filename`` when
displaying errors.
"""
if isinstance(node, string_types):
self.source = node
kwargs = {'mode': 'eval'}
if filename:
kwargs['filename'] = filename
try:
node = ast.parse(node, **kwargs)
except SyntaxError as e:
s = self.get_fragment(e.offset)
raise SyntaxError('syntax error %s' % s)
node_type = node.__class__.__name__.lower()
handler = self.get_handler(node_type)
if handler is None:
if self.source is None:
s = '(source not available)'
else:
s = self.get_fragment(node.col_offset)
raise SyntaxError("don't know how to evaluate %r %s" % (
node_type, s))
return handler(node)
def get_attr_key(self, node):
assert isinstance(node, ast.Attribute), 'attribute node expected'
return '%s.%s' % (node.value.id, node.attr)
def do_attribute(self, node):
if not isinstance(node.value, ast.Name):
valid = False
else:
key = self.get_attr_key(node)
valid = key in self.context or key in self.allowed_values
if not valid:
raise SyntaxError('invalid expression: %s' % key)
if key in self.context:
result = self.context[key]
else:
result = self.allowed_values[key]
return result
def do_boolop(self, node):
result = self.evaluate(node.values[0])
is_or = node.op.__class__ is ast.Or
is_and = node.op.__class__ is ast.And
assert is_or or is_and
if (is_and and result) or (is_or and not result):
for n in node.values[1:]:
result = self.evaluate(n)
if (is_or and result) or (is_and and not result):
break
return result
def do_compare(self, node):
def sanity_check(lhsnode, rhsnode):
valid = True
if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
valid = False
#elif (isinstance(lhsnode, ast.Attribute)
# and isinstance(rhsnode, ast.Attribute)):
# klhs = self.get_attr_key(lhsnode)
# krhs = self.get_attr_key(rhsnode)
# valid = klhs != krhs
if not valid:
s = self.get_fragment(node.col_offset)
raise SyntaxError('Invalid comparison: %s' % s)
lhsnode = node.left
lhs = self.evaluate(lhsnode)
result = True
for op, rhsnode in zip(node.ops, node.comparators):
sanity_check(lhsnode, rhsnode)
op = op.__class__.__name__.lower()
if op not in self.operators:
raise SyntaxError('unsupported operation: %r' % op)
rhs = self.evaluate(rhsnode)
result = self.operators[op](lhs, rhs)
if not result:
break
lhs = rhs
lhsnode = rhsnode
return result
def do_expression(self, node):
return self.evaluate(node.body)
def do_name(self, node):
valid = False
if node.id in self.context:
valid = True
result = self.context[node.id]
elif node.id in self.allowed_values:
valid = True
result = self.allowed_values[node.id]
if not valid:
raise SyntaxError('invalid expression: %s' % node.id)
return result
def do_str(self, node):
return node.s
def interpret(marker, execution_context=None):
"""
Interpret a marker and return a result depending on environment.
:param marker: The marker to interpret.
:type marker: str
:param execution_context: The context used for name lookup.
:type execution_context: mapping
"""
return Evaluator(execution_context).evaluate(marker.strip())
| mit |
zbassett/curling-robot | RaspberryPi/DjangoSite/mysite/curling/migrations/0011_auto_20170212_1701.py | 1 | 1906 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-12 17:01
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('curling', '0010_auto_20170212_0059'),
]
operations = [
migrations.AlterField(
model_name='club',
name='LastUpdated',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='person',
name='FirstName',
field=models.CharField(max_length=200),
),
migrations.AlterField(
model_name='person',
name='LastName',
field=models.CharField(max_length=200),
),
migrations.AlterField(
model_name='person',
name='LastUpdated',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='rfidrawdata',
name='LastUpdated',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='rock',
name='LastUpdated',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='session',
name='Initiated',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='sheet',
name='SheetLocalID',
field=models.CharField(default=14, max_length=200),
preserve_default=False,
),
migrations.AlterField(
model_name='sheet',
name='Width',
field=models.FloatField(default=14),
),
]
| apache-2.0 |
bestvibes/neo4j-social-network | env/lib/python2.7/encodings/utf_8_sig.py | 412 | 3685 | """ Python 'utf-8-sig' Codec
This work similar to UTF-8 with the following changes:
* On encoding/writing a UTF-8 encoded BOM will be prepended/written as the
first three bytes.
* On decoding/reading if the first three bytes are a UTF-8 encoded BOM, these
bytes will be skipped.
"""
import codecs
### Codec APIs
def encode(input, errors='strict'):
return (codecs.BOM_UTF8 + codecs.utf_8_encode(input, errors)[0], len(input))
def decode(input, errors='strict'):
prefix = 0
if input[:3] == codecs.BOM_UTF8:
input = input[3:]
prefix = 3
(output, consumed) = codecs.utf_8_decode(input, errors, True)
return (output, consumed+prefix)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
codecs.IncrementalEncoder.__init__(self, errors)
self.first = 1
def encode(self, input, final=False):
if self.first:
self.first = 0
return codecs.BOM_UTF8 + codecs.utf_8_encode(input, self.errors)[0]
else:
return codecs.utf_8_encode(input, self.errors)[0]
def reset(self):
codecs.IncrementalEncoder.reset(self)
self.first = 1
def getstate(self):
return self.first
def setstate(self, state):
self.first = state
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def __init__(self, errors='strict'):
codecs.BufferedIncrementalDecoder.__init__(self, errors)
self.first = True
def _buffer_decode(self, input, errors, final):
if self.first:
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this really is a BOM
# => try again on the next call
return (u"", 0)
else:
self.first = None
else:
self.first = None
if input[:3] == codecs.BOM_UTF8:
(output, consumed) = codecs.utf_8_decode(input[3:], errors, final)
return (output, consumed+3)
return codecs.utf_8_decode(input, errors, final)
def reset(self):
codecs.BufferedIncrementalDecoder.reset(self)
self.first = True
class StreamWriter(codecs.StreamWriter):
def reset(self):
codecs.StreamWriter.reset(self)
try:
del self.encode
except AttributeError:
pass
def encode(self, input, errors='strict'):
self.encode = codecs.utf_8_encode
return encode(input, errors)
class StreamReader(codecs.StreamReader):
def reset(self):
codecs.StreamReader.reset(self)
try:
del self.decode
except AttributeError:
pass
def decode(self, input, errors='strict'):
if len(input) < 3:
if codecs.BOM_UTF8.startswith(input):
# not enough data to decide if this is a BOM
# => try again on the next call
return (u"", 0)
elif input[:3] == codecs.BOM_UTF8:
self.decode = codecs.utf_8_decode
(output, consumed) = codecs.utf_8_decode(input[3:],errors)
return (output, consumed+3)
# (else) no BOM present
self.decode = codecs.utf_8_decode
return codecs.utf_8_decode(input, errors)
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='utf-8-sig',
encode=encode,
decode=decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
lyarwood/sosreport | sos/plugins/tuned.py | 12 | 1405 | # Copyright (C) 2014 Red Hat, Inc., Peter Portante <peter.portante@redhat.com>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin
class Tuned(Plugin, RedHatPlugin):
"""Tuned system tuning daemon
"""
packages = ('tuned',)
profiles = ('system', 'performance')
plugin_name = 'tuned'
def setup(self):
self.add_cmd_output([
"tuned-adm list",
"tuned-adm active",
"tuned-adm recommend"
])
self.add_copy_spec([
"/etc/tuned.conf",
"/etc/tune-profiles"
])
self.add_copy_spec([
"/etc/tuned",
"/usr/lib/tuned",
"/var/log/tuned/tuned.log"
])
# vim: set et ts=4 sw=4 :
| gpl-2.0 |
gustavla/dotfiles | ipython_config.py | 1 | 19767 | # Configuration file for ipython.
c = get_config()
#------------------------------------------------------------------------------
# InteractiveShellApp configuration
#------------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = ''
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'none',
# 'osx', 'pyglet', 'qt', 'qt4', 'tk', 'wx').
# c.InteractiveShellApp.gui = None
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.InteractiveShellApp.exec_PYTHONSTARTUP = True
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.InteractiveShellApp.hide_initial_ns = True
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# lines of code to run at IPython startup.
# c.InteractiveShellApp.exec_lines = []
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
#------------------------------------------------------------------------------
# TerminalIPythonApp configuration
#------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.TerminalIPythonApp.matplotlib = None
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
# c.TerminalIPythonApp.exec_PYTHONSTARTUP = True
# Run the module as a script.
# c.TerminalIPythonApp.module_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = 'default'
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# Whether to display a banner upon starting IPython.
# c.TerminalIPythonApp.display_banner = True
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# If a command or file is given via the command-line, e.g. 'ipython foo.py',
# start an interactive shell after executing the file or command.
# c.TerminalIPythonApp.force_interact = False
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# The Logging format template
# c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.TerminalIPythonApp.hide_initial_ns = True
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'none',
# 'osx', 'pyglet', 'qt', 'qt4', 'tk', 'wx').
# c.TerminalIPythonApp.gui = None
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.TerminalIPythonApp.extra_config_file = ''
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.TerminalIPythonApp.pylab_import_all = True
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# c.TerminalIPythonApp.ipython_dir = ''
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# The date format used by logging formatters for %(asctime)s
# c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
#------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
#------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# Enable auto setting the terminal title.
# c.TerminalInteractiveShell.term_title = False
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
# Deprecated, use PromptManager.out_template
# c.TerminalInteractiveShell.prompt_out = 'Out[\\#]: '
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.TerminalInteractiveShell.ast_transformers = []
#
# c.TerminalInteractiveShell.object_info_string_level = 0
# Start logging to the given file in append mode.
# c.TerminalInteractiveShell.logappend = ''
# The name of the logfile to use.
# c.TerminalInteractiveShell.logfile = ''
# Deprecated, use PromptManager.in2_template
# c.TerminalInteractiveShell.prompt_in2 = ' .\\D.: '
# Deprecated, use PromptManager.justify
# c.TerminalInteractiveShell.prompts_pad_left = True
# The part of the banner to be printed after the profile
# c.TerminalInteractiveShell.banner2 = ''
# Set the editor used by IPython (default to $EDITOR/vi/notepad).
# c.TerminalInteractiveShell.editor = 'vim'
# Enable magic commands to be called without the leading %.
# c.TerminalInteractiveShell.automagic = True
# The shell program to be used for paging.
# c.TerminalInteractiveShell.pager = 'less'
#
# c.TerminalInteractiveShell.history_length = 10000
# Start logging to the default log file.
# c.TerminalInteractiveShell.logstart = False
# Automatically call the pdb debugger after every exception.
# c.TerminalInteractiveShell.pdb = False
#
# c.TerminalInteractiveShell.separate_out = ''
#
# c.TerminalInteractiveShell.separate_in = '\n'
#
# c.TerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.TerminalInteractiveShell.autocall = 0
#
# c.TerminalInteractiveShell.wildcards_case_sensitive = True
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.TerminalInteractiveShell.deep_reload = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteractiveShell.show_rewritten_input = True
# 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
# c.TerminalInteractiveShell.ast_node_interactivity = 'last_expr'
# Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
# c.TerminalInteractiveShell.confirm_exit = True
#
# c.TerminalInteractiveShell.debug = False
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = False
#
# c.TerminalInteractiveShell.readline_remove_delims = '-/~'
# The part of the banner to be printed before the profile
# c.TerminalInteractiveShell.banner1 = 'Python 3.4.1 (default, Jul 30 2014, 17:10:01) \nType "copyright", "credits" or "license" for more information.\n\nIPython 2.2.0 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
#
# c.TerminalInteractiveShell.readline_use = True
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.ipython_dir = ''
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.TerminalInteractiveShell.cache_size = 1000
# Save multi-line entries as one entry in readline history
# c.TerminalInteractiveShell.multiline_history = True
#
# c.TerminalInteractiveShell.quiet = False
# Set the color scheme (NoColor, Linux, or LightBG).
# c.TerminalInteractiveShell.colors = 'Linux'
#
# c.TerminalInteractiveShell.xmode = 'Context'
#
# c.TerminalInteractiveShell.separate_out2 = ''
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
# Deprecated, use PromptManager.in_template
# c.TerminalInteractiveShell.prompt_in1 = 'In [\\#]: '
#------------------------------------------------------------------------------
# PromptManager configuration
#------------------------------------------------------------------------------
# This is the primary interface for producing IPython's prompts.
# Continuation prompt.
c.PromptManager.in2_template = '{color.DarkGray}... '
# Input prompt. '\#' will be transformed to the prompt number
c.PromptManager.in_template = '{color.LightBlue}>>> '
#
# c.PromptManager.color_scheme = 'Linux'
# Output prompt. '\#' will be transformed to the prompt number
c.PromptManager.out_template = ''
# If True (default), each prompt will be right-aligned with the preceding one.
c.PromptManager.justify = False
#------------------------------------------------------------------------------
# HistoryManager configuration
#------------------------------------------------------------------------------
# A class to organize all history-related functionality in one place.
# HistoryManager will inherit config from: HistoryAccessor
# Write to database every x commands (higher values save disk access & power).
# Values of 1 or less effectively disable caching.
# c.HistoryManager.db_cache_size = 0
# Path to file to use for SQLite history database.
#
# By default, IPython will put the history database in the IPython profile
# directory. If you would rather share one history among profiles, you can set
# this value in each, so that they are consistent.
#
# Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts.
# If you see IPython hanging, try setting this to something on a local disk,
# e.g::
#
# ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
# c.HistoryManager.hist_file = ''
# Options for configuring the SQLite connection
#
# These options are passed as keyword args to sqlite3.connect when establishing
# database conenctions.
# c.HistoryManager.connection_options = {}
# Should the history database include output? (default: no)
# c.HistoryManager.db_log_output = False
# enable the SQLite history
#
# set enabled=False to disable the SQLite history, in which case there will be
# no stored history, no SQLite connection, and no background saving thread.
# This may be necessary in some threaded environments where IPython is embedded.
# c.HistoryManager.enabled = True
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = ''
#------------------------------------------------------------------------------
# PlainTextFormatter configuration
#------------------------------------------------------------------------------
# The default pretty-printer.
#
# This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
# If the object cannot be pretty printed, :func:`repr` is used. See the
# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
# printers. Here is a simple example::
#
# def dtype_pprinter(obj, p, cycle):
# if cycle:
# return p.text('dtype(...)')
# if hasattr(obj, 'fields'):
# if obj.fields is None:
# p.text(repr(obj))
# else:
# p.begin_group(7, 'dtype([')
# for i, field in enumerate(obj.descr):
# if i > 0:
# p.text(',')
# p.breakable()
# p.pretty(field)
# p.end_group(7, '])')
# PlainTextFormatter will inherit config from: BaseFormatter
#
# c.PlainTextFormatter.deferred_printers = {}
#
# c.PlainTextFormatter.type_printers = {}
#
# c.PlainTextFormatter.float_precision = ''
#
# c.PlainTextFormatter.pprint = True
#
# c.PlainTextFormatter.verbose = False
#
# c.PlainTextFormatter.newline = '\n'
#
# c.PlainTextFormatter.singleton_printers = {}
#
# c.PlainTextFormatter.max_width = 79
#------------------------------------------------------------------------------
# IPCompleter configuration
#------------------------------------------------------------------------------
# Extension of the completer class with IPython-specific features
# IPCompleter will inherit config from: Completer
# Whether to merge completion results into a single list
#
# If False, only the completion results from the first non-empty completer will
# be returned.
# c.IPCompleter.merge_completions = True
# Instruct the completer to use __all__ for the completion
#
# Specifically, when completing on ``object.<tab>``.
#
# When True: only those names in obj.__all__ will be included.
#
# When False [default]: the __all__ attribute is ignored
# c.IPCompleter.limit_to__all__ = False
# Instruct the completer to omit private method names
#
# Specifically, when completing on ``object.<tab>``.
#
# When 2 [default]: all names that start with '_' will be excluded.
#
# When 1: all 'magic' names (``__foo__``) will be excluded.
#
# When 0: nothing will be excluded.
# c.IPCompleter.omit__names = 2
# Activate greedy completion
#
# This will enable completion on elements of lists, results of function calls,
# etc., but can be unsafe because the code is actually evaluated on TAB.
# c.IPCompleter.greedy = False
#------------------------------------------------------------------------------
# ScriptMagics configuration
#------------------------------------------------------------------------------
# Magics for talking to scripts
#
# This defines a base `%%script` cell magic for running a cell with a program in
# a subprocess, and registers a few top-level magics that call %%script with
# common interpreters.
# Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
#
# Only necessary for items in script_magics where the default path will not find
# the right interpreter.
# c.ScriptMagics.script_paths = {}
# Extra script cell magics to define
#
# This generates simple wrappers of `%%script foo` as `%%foo`.
#
# If you want to add script magics that aren't on your path, specify them in
# script_paths
# c.ScriptMagics.script_magics = []
#------------------------------------------------------------------------------
# StoreMagics configuration
#------------------------------------------------------------------------------
# Lightweight persistence for python variables.
#
# Provides the %store magic.
# If True, any %store-d variables will be automatically restored when IPython
# starts.
# c.StoreMagics.autorestore = False
| bsd-3-clause |
aptana/Pydev | bundles/org.python.pydev.jython/Lib/keyword.py | 11 | 2065 | #! /usr/bin/env python
"""Keywords (from "graminit.c")
This file is automatically generated; please don't muck it up!
To update the symbols in this file, 'cd' to the top directory of
the python source tree after building the interpreter and run:
python Lib/keyword.py
"""
__all__ = ["iskeyword", "kwlist"]
kwlist = [
#--start keywords--
'and',
'assert',
'break',
'class',
'continue',
'def',
'del',
'elif',
'else',
'except',
'exec',
'finally',
'for',
'from',
'global',
'if',
'import',
'in',
'is',
'lambda',
'not',
'or',
'pass',
'print',
'raise',
'return',
'try',
'while',
'yield',
#--end keywords--
]
kwdict = {}
for keyword in kwlist:
kwdict[keyword] = 1
iskeyword = kwdict.has_key
def main():
import sys, re
args = sys.argv[1:]
iptfile = args and args[0] or "Python/graminit.c"
if len(args) > 1: optfile = args[1]
else: optfile = "Lib/keyword.py"
# scan the source file for keywords
fp = open(iptfile)
strprog = re.compile('"([^"]+)"')
lines = []
while 1:
line = fp.readline()
if not line: break
if line.find('{1, "') > -1:
match = strprog.search(line)
if match:
lines.append(" '" + match.group(1) + "',\n")
fp.close()
lines.sort()
# load the output skeleton from the target
fp = open(optfile)
format = fp.readlines()
fp.close()
# insert the lines of keywords
try:
start = format.index("#--start keywords--\n") + 1
end = format.index("#--end keywords--\n")
format[start:end] = lines
except ValueError:
sys.stderr.write("target does not contain format markers\n")
sys.exit(1)
# write the output file
fp = open(optfile, 'w')
fp.write(''.join(format))
fp.close()
if __name__ == "__main__":
main()
| epl-1.0 |
renfufei/shadowsocks | setup.py | 16 | 1321 | import codecs
from setuptools import setup
with codecs.open('README.rst', encoding='utf-8') as f:
long_description = f.read()
setup(
name="shadowsocks",
version="2.6.9",
license='http://www.apache.org/licenses/LICENSE-2.0',
description="A fast tunnel proxy that help you get through firewalls",
author='clowwindy',
author_email='clowwindy42@gmail.com',
url='https://github.com/shadowsocks/shadowsocks',
packages=['shadowsocks', 'shadowsocks.crypto'],
package_data={
'shadowsocks': ['README.rst', 'LICENSE']
},
install_requires=[],
entry_points="""
[console_scripts]
sslocal = shadowsocks.local:main
ssserver = shadowsocks.server:main
""",
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Internet :: Proxy Servers',
],
long_description=long_description,
)
| apache-2.0 |
didrocks/notifythis | setup.py | 1 | 3960 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
### BEGIN LICENSE
# Copyright (C) 2009 Didier Roche <didrocks@ubuntu.com>
#This program is free software: you can redistribute it and/or modify it
#under the terms of the GNU General Public License version 3, as published
#by the Free Software Foundation.
#
#This program is distributed in the hope that it will be useful, but
#WITHOUT ANY WARRANTY; without even the implied warranties of
#MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
#PURPOSE. See the GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License along
#with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
###################### DO NOT TOUCH THIS (HEAD TO THE SECOND PART) ######################
try:
import DistUtilsExtra.auto
except ImportError:
import sys
print >> sys.stderr, 'To build notifythis you need https://launchpad.net/python-distutils-extra'
sys.exit(1)
assert DistUtilsExtra.auto.__version__ >= '2.10', 'needs DistUtilsExtra.auto >= 2.10'
import os
def update_data_path(prefix, oldvalue=None):
try:
fin = file('notifythis/notifythisconfig.py', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
fields = line.split(' = ') # Separate variable from value
if fields[0] == '__notifythis_data_directory__':
# update to prefix, store oldvalue
if not oldvalue:
oldvalue = fields[1]
line = "%s = '%s'\n" % (fields[0], prefix)
else: # restore oldvalue
line = "%s = %s" % (fields[0], oldvalue)
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("WARNING: Can't find notifythis/notifythisconfig.py")
return oldvalue
def update_desktop_file(datadir):
try:
fin = file('notifythis.desktop.in', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
if 'Icon=' in line:
line = "Icon=%s\n" % (datadir + 'media/icon.png')
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("WARNING: Can't find notifythis.desktop.in")
class InstallAndUpdateDataDirectory(DistUtilsExtra.auto.install_auto):
def run(self):
if self.root or self.home:
print "WARNING: You don't use a standard --prefix installation, take care that you eventually " \
"need to update quickly/quicklyconfig.py file to adjust __quickly_data_directory__. You can " \
"ignore this warning if you are packaging and uses --prefix."
previous_value = update_data_path(self.prefix + '/share/notifythis/')
update_desktop_file(self.prefix + '/share/notifythis/')
DistUtilsExtra.auto.install_auto.run(self)
update_data_path(self.prefix, previous_value)
##################################################################################
###################### YOU SHOULD MODIFY ONLY WHAT IS BELOW ######################
##################################################################################
DistUtilsExtra.auto.setup(
name='notifythis',
version='0.2',
license='GPL-3',
author='Didier Roche',
author_email='didrocks@gmail.com',
description='Easily notify information to user from data file…',
long_description='NotifyThis is a daemon enabling notification from network or local xml files, showing icon, severity, caching data… The current use case is announcing conferences, lessons, as information during a party.',
url='https://launchpad.net/notifythis',
cmdclass={'install': InstallAndUpdateDataDirectory}
)
| gpl-3.0 |
nacc/autotest | tko/parsers/base.py | 6 | 2468 | import traceback
from autotest.tko import status_lib, utils as tko_utils
class parser(object):
"""
Abstract parser base class. Provides a generic implementation of the
standard parser interfaction functions. The derived classes must
implement a state_iterator method for this class to be useful.
"""
def start(self, job):
""" Initialize the parser for processing the results of
'job'."""
# initialize all the basic parser parameters
self.job = job
self.finished = False
self.line_buffer = status_lib.line_buffer()
# create and prime the parser state machine
self.state = self.state_iterator(self.line_buffer)
self.state.next()
def process_lines(self, lines):
""" Feed 'lines' into the parser state machine, and return
a list of all the new test results produced."""
self.line_buffer.put_multiple(lines)
try:
return self.state.next()
except StopIteration:
msg = ("WARNING: parser was called to process status "
"lines after it was end()ed\n"
"Current traceback:\n" +
traceback.format_exc() +
"\nCurrent stack:\n" +
"".join(traceback.format_stack()))
tko_utils.dprint(msg)
return []
def end(self, lines=[]):
""" Feed 'lines' into the parser state machine, signal to the
state machine that no more lines are forthcoming, and then
return a list of all the new test results produced."""
self.line_buffer.put_multiple(lines)
# run the state machine to clear out the buffer
self.finished = True
try:
return self.state.next()
except StopIteration:
msg = ("WARNING: parser was end()ed multiple times\n"
"Current traceback:\n" +
traceback.format_exc() +
"\nCurrent stack:\n" +
"".join(traceback.format_stack()))
tko_utils.dprint(msg)
return []
@staticmethod
def make_job(dir):
""" Create a new instance of the job model used by the
parser, given a results directory."""
raise NotImplementedError
def state_iterator(self, buffer):
""" A generator method that implements the actual parser
state machine. """
raise NotImplementedError
| gpl-2.0 |
Mercy-Nekesa/sokoapp | sokoapp/coupons/tests/runtests.py | 1 | 1632 | #!/usr/bin/env python
"""
Inspired by https://github.com/mbrochh/tdd-with-django-reusable-app
Thanks a lot!
"""
import os
import sys
from django.conf import settings
EXTERNAL_APPS = [
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'django.contrib.sites',
]
INTERNAL_APPS = [
'django_nose',
'coupons',
]
INSTALLED_APPS = EXTERNAL_APPS + INTERNAL_APPS
COVERAGE_MODULE_EXCLUDES = [
'tests$', 'settings$', 'urls$', 'locale$',
'migrations', 'fixtures', 'admin$', 'django_extensions',
]
COVERAGE_MODULE_EXCLUDES += EXTERNAL_APPS
if not settings.configured:
settings.configure(
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
}
},
INSTALLED_APPS=INSTALLED_APPS,
COVERAGE_MODULE_EXCLUDES=COVERAGE_MODULE_EXCLUDES,
COVERAGE_REPORT_HTML_OUTPUT_DIR=os.path.join(
os.path.dirname(__file__), 'coverage')
)
from django_coverage.coverage_runner import CoverageRunner
from django_nose import NoseTestSuiteRunner
class NoseCoverageTestRunner(CoverageRunner, NoseTestSuiteRunner):
"""Custom test runner that uses nose and coverage"""
pass
def runtests(*test_args):
failures = NoseCoverageTestRunner(verbosity=2, interactive=True).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests(*sys.argv[1:])
| mit |
jasonzio/azure-linux-extensions | TestHandlerLinux/bin/update.py | 16 | 1941 | #!/usr/bin/env python
"""
Example Azure Handler script for Linux IaaS
Update example
Reads port from Public Config if present.
Creates service_port.txt in resources dir.
Copies the service to /usr/bin and updates it
with the resource path.
"""
import os
import sys
import imp
import time
waagent=imp.load_source('waagent','/usr/sbin/waagent')
from waagent import LoggerInit
hutil=imp.load_source('HandlerUtil','./resources/HandlerUtil.py')
LoggerInit('/var/log/waagent.log','/dev/stdout')
waagent.Log("update.py starting.")
waagent.MyDistro=waagent.GetMyDistro()
logfile=waagent.Log
name,seqNo,version,config_dir,log_dir,settings_file,status_file,heartbeat_file,config=hutil.doParse(logfile,'Update')
LoggerInit('/var/log/'+name+'_Update.log','/dev/stdout')
waagent.Log(name+" - update.py starting.")
logfile=waagent.Log
hutil.doStatusReport(name,seqNo,version,status_file,time.strftime("%Y-%M-%dT%H:%M:%SZ", time.gmtime()),time.strftime("%Y-%M-%dT%H:%M:%SZ", time.gmtime()),name,
'Update', 'transitioning', '0', 'Updating', 'Process Config', 'transitioning', '0', 'Parsing ' + settings_file)
hutil.doHealthReport(heartbeat_file,'NotReady','0','Proccessing Settings')
# capture the config info from previous installation
# argv[1] is the path to the previous version.
waagent.SetFileContents('./resources/service_port.txt',waagent.GetFileContents(sys.argv[1]+'/resources/service_port.txt'))
# move the service to sbin
waagent.SetFileContents('/usr/sbin/service.py',waagent.GetFileContents('./bin/service.py'))
waagent.ReplaceStringInFile('/usr/sbin/service.py','RESOURCES_PATH',os.path.realpath('./resources'))
os.chmod('/usr/sbin/service.py',0700)
# report ready
waagent.Log(name+"updating completed.")
hutil.doExit(name,seqNo,version,0,status_file,heartbeat_file,'Update','success','0', 'Update Succeeded.', 'Exit Successfull', 'success', '0', 'Updating Completed.','Ready','0',name+' update completed.')
| apache-2.0 |
ofekd/servo | tests/wpt/web-platform-tests/tools/pytest/testing/test_config.py | 166 | 19617 | import py, pytest
import _pytest._code
from _pytest.config import getcfg, get_common_ancestor, determine_setup
from _pytest.main import EXIT_NOTESTSCOLLECTED
class TestParseIni:
def test_getcfg_and_config(self, testdir, tmpdir):
sub = tmpdir.mkdir("sub")
sub.chdir()
tmpdir.join("setup.cfg").write(_pytest._code.Source("""
[pytest]
name = value
"""))
rootdir, inifile, cfg = getcfg([sub], ["setup.cfg"])
assert cfg['name'] == "value"
config = testdir.parseconfigure(sub)
assert config.inicfg['name'] == 'value'
def test_getcfg_empty_path(self, tmpdir):
getcfg([''], ['setup.cfg']) #happens on py.test ""
def test_append_parse_args(self, testdir, tmpdir, monkeypatch):
monkeypatch.setenv('PYTEST_ADDOPTS', '--color no -rs --tb="short"')
tmpdir.join("setup.cfg").write(_pytest._code.Source("""
[pytest]
addopts = --verbose
"""))
config = testdir.parseconfig(tmpdir)
assert config.option.color == 'no'
assert config.option.reportchars == 's'
assert config.option.tbstyle == 'short'
assert config.option.verbose
#config = testdir.Config()
#args = [tmpdir,]
#config._preparse(args, addopts=False)
#assert len(args) == 1
def test_tox_ini_wrong_version(self, testdir):
testdir.makefile('.ini', tox="""
[pytest]
minversion=9.0
""")
result = testdir.runpytest()
assert result.ret != 0
result.stderr.fnmatch_lines([
"*tox.ini:2*requires*9.0*actual*"
])
@pytest.mark.parametrize("name", "setup.cfg tox.ini pytest.ini".split())
def test_ini_names(self, testdir, name):
testdir.tmpdir.join(name).write(py.std.textwrap.dedent("""
[pytest]
minversion = 1.0
"""))
config = testdir.parseconfig()
assert config.getini("minversion") == "1.0"
def test_toxini_before_lower_pytestini(self, testdir):
sub = testdir.tmpdir.mkdir("sub")
sub.join("tox.ini").write(py.std.textwrap.dedent("""
[pytest]
minversion = 2.0
"""))
testdir.tmpdir.join("pytest.ini").write(py.std.textwrap.dedent("""
[pytest]
minversion = 1.5
"""))
config = testdir.parseconfigure(sub)
assert config.getini("minversion") == "2.0"
@pytest.mark.xfail(reason="probably not needed")
def test_confcutdir(self, testdir):
sub = testdir.mkdir("sub")
sub.chdir()
testdir.makeini("""
[pytest]
addopts = --qwe
""")
result = testdir.inline_run("--confcutdir=.")
assert result.ret == 0
class TestConfigCmdlineParsing:
def test_parsing_again_fails(self, testdir):
config = testdir.parseconfig()
pytest.raises(AssertionError, lambda: config.parse([]))
def test_explicitly_specified_config_file_is_loaded(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("custom", "")
""")
testdir.makeini("""
[pytest]
custom = 0
""")
testdir.makefile(".cfg", custom = """
[pytest]
custom = 1
""")
config = testdir.parseconfig("-c", "custom.cfg")
assert config.getini("custom") == "1"
class TestConfigAPI:
def test_config_trace(self, testdir):
config = testdir.parseconfig()
l = []
config.trace.root.setwriter(l.append)
config.trace("hello")
assert len(l) == 1
assert l[0] == "hello [config]\n"
def test_config_getoption(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addoption("--hello", "-X", dest="hello")
""")
config = testdir.parseconfig("--hello=this")
for x in ("hello", "--hello", "-X"):
assert config.getoption(x) == "this"
pytest.raises(ValueError, "config.getoption('qweqwe')")
@pytest.mark.skipif('sys.version_info[:2] not in [(2, 6), (2, 7)]')
def test_config_getoption_unicode(self, testdir):
testdir.makeconftest("""
from __future__ import unicode_literals
def pytest_addoption(parser):
parser.addoption('--hello', type='string')
""")
config = testdir.parseconfig('--hello=this')
assert config.getoption('hello') == 'this'
def test_config_getvalueorskip(self, testdir):
config = testdir.parseconfig()
pytest.raises(pytest.skip.Exception,
"config.getvalueorskip('hello')")
verbose = config.getvalueorskip("verbose")
assert verbose == config.option.verbose
def test_config_getvalueorskip_None(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addoption("--hello")
""")
config = testdir.parseconfig()
with pytest.raises(pytest.skip.Exception):
config.getvalueorskip('hello')
def test_getoption(self, testdir):
config = testdir.parseconfig()
with pytest.raises(ValueError):
config.getvalue('x')
assert config.getoption("x", 1) == 1
def test_getconftest_pathlist(self, testdir, tmpdir):
somepath = tmpdir.join("x", "y", "z")
p = tmpdir.join("conftest.py")
p.write("pathlist = ['.', %r]" % str(somepath))
config = testdir.parseconfigure(p)
assert config._getconftest_pathlist('notexist', path=tmpdir) is None
pl = config._getconftest_pathlist('pathlist', path=tmpdir)
print(pl)
assert len(pl) == 2
assert pl[0] == tmpdir
assert pl[1] == somepath
def test_addini(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("myname", "my new ini value")
""")
testdir.makeini("""
[pytest]
myname=hello
""")
config = testdir.parseconfig()
val = config.getini("myname")
assert val == "hello"
pytest.raises(ValueError, config.getini, 'other')
def test_addini_pathlist(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("paths", "my new ini value", type="pathlist")
parser.addini("abc", "abc value")
""")
p = testdir.makeini("""
[pytest]
paths=hello world/sub.py
""")
config = testdir.parseconfig()
l = config.getini("paths")
assert len(l) == 2
assert l[0] == p.dirpath('hello')
assert l[1] == p.dirpath('world/sub.py')
pytest.raises(ValueError, config.getini, 'other')
def test_addini_args(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("args", "new args", type="args")
parser.addini("a2", "", "args", default="1 2 3".split())
""")
testdir.makeini("""
[pytest]
args=123 "123 hello" "this"
""")
config = testdir.parseconfig()
l = config.getini("args")
assert len(l) == 3
assert l == ["123", "123 hello", "this"]
l = config.getini("a2")
assert l == list("123")
def test_addini_linelist(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("xy", "", type="linelist")
parser.addini("a2", "", "linelist")
""")
testdir.makeini("""
[pytest]
xy= 123 345
second line
""")
config = testdir.parseconfig()
l = config.getini("xy")
assert len(l) == 2
assert l == ["123 345", "second line"]
l = config.getini("a2")
assert l == []
@pytest.mark.parametrize('str_val, bool_val',
[('True', True), ('no', False), ('no-ini', True)])
def test_addini_bool(self, testdir, str_val, bool_val):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("strip", "", type="bool", default=True)
""")
if str_val != 'no-ini':
testdir.makeini("""
[pytest]
strip=%s
""" % str_val)
config = testdir.parseconfig()
assert config.getini("strip") is bool_val
def test_addinivalue_line_existing(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("xy", "", type="linelist")
""")
testdir.makeini("""
[pytest]
xy= 123
""")
config = testdir.parseconfig()
l = config.getini("xy")
assert len(l) == 1
assert l == ["123"]
config.addinivalue_line("xy", "456")
l = config.getini("xy")
assert len(l) == 2
assert l == ["123", "456"]
def test_addinivalue_line_new(self, testdir):
testdir.makeconftest("""
def pytest_addoption(parser):
parser.addini("xy", "", type="linelist")
""")
config = testdir.parseconfig()
assert not config.getini("xy")
config.addinivalue_line("xy", "456")
l = config.getini("xy")
assert len(l) == 1
assert l == ["456"]
config.addinivalue_line("xy", "123")
l = config.getini("xy")
assert len(l) == 2
assert l == ["456", "123"]
class TestConfigFromdictargs:
def test_basic_behavior(self):
from _pytest.config import Config
option_dict = {
'verbose': 444,
'foo': 'bar',
'capture': 'no',
}
args = ['a', 'b']
config = Config.fromdictargs(option_dict, args)
with pytest.raises(AssertionError):
config.parse(['should refuse to parse again'])
assert config.option.verbose == 444
assert config.option.foo == 'bar'
assert config.option.capture == 'no'
assert config.args == args
def test_origargs(self):
"""Show that fromdictargs can handle args in their "orig" format"""
from _pytest.config import Config
option_dict = {}
args = ['-vvvv', '-s', 'a', 'b']
config = Config.fromdictargs(option_dict, args)
assert config.args == ['a', 'b']
assert config._origargs == args
assert config.option.verbose == 4
assert config.option.capture == 'no'
def test_inifilename(self, tmpdir):
tmpdir.join("foo/bar.ini").ensure().write(_pytest._code.Source("""
[pytest]
name = value
"""))
from _pytest.config import Config
inifile = '../../foo/bar.ini'
option_dict = {
'inifilename': inifile,
'capture': 'no',
}
cwd = tmpdir.join('a/b')
cwd.join('pytest.ini').ensure().write(_pytest._code.Source("""
[pytest]
name = wrong-value
should_not_be_set = true
"""))
with cwd.ensure(dir=True).as_cwd():
config = Config.fromdictargs(option_dict, ())
assert config.args == [str(cwd)]
assert config.option.inifilename == inifile
assert config.option.capture == 'no'
# this indicates this is the file used for getting configuration values
assert config.inifile == inifile
assert config.inicfg.get('name') == 'value'
assert config.inicfg.get('should_not_be_set') is None
def test_options_on_small_file_do_not_blow_up(testdir):
def runfiletest(opts):
reprec = testdir.inline_run(*opts)
passed, skipped, failed = reprec.countoutcomes()
assert failed == 2
assert skipped == passed == 0
path = testdir.makepyfile("""
def test_f1(): assert 0
def test_f2(): assert 0
""")
for opts in ([], ['-l'], ['-s'], ['--tb=no'], ['--tb=short'],
['--tb=long'], ['--fulltrace'], ['--nomagic'],
['--traceconfig'], ['-v'], ['-v', '-v']):
runfiletest(opts + [path])
def test_preparse_ordering_with_setuptools(testdir, monkeypatch):
pkg_resources = pytest.importorskip("pkg_resources")
def my_iter(name):
assert name == "pytest11"
class EntryPoint:
name = "mytestplugin"
class dist:
pass
def load(self):
class PseudoPlugin:
x = 42
return PseudoPlugin()
return iter([EntryPoint()])
monkeypatch.setattr(pkg_resources, 'iter_entry_points', my_iter)
testdir.makeconftest("""
pytest_plugins = "mytestplugin",
""")
monkeypatch.setenv("PYTEST_PLUGINS", "mytestplugin")
config = testdir.parseconfig()
plugin = config.pluginmanager.getplugin("mytestplugin")
assert plugin.x == 42
def test_plugin_preparse_prevents_setuptools_loading(testdir, monkeypatch):
pkg_resources = pytest.importorskip("pkg_resources")
def my_iter(name):
assert name == "pytest11"
class EntryPoint:
name = "mytestplugin"
def load(self):
assert 0, "should not arrive here"
return iter([EntryPoint()])
monkeypatch.setattr(pkg_resources, 'iter_entry_points', my_iter)
config = testdir.parseconfig("-p", "no:mytestplugin")
plugin = config.pluginmanager.getplugin("mytestplugin")
assert plugin is None
def test_cmdline_processargs_simple(testdir):
testdir.makeconftest("""
def pytest_cmdline_preparse(args):
args.append("-h")
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*pytest*",
"*-h*",
])
def test_invalid_options_show_extra_information(testdir):
"""display extra information when pytest exits due to unrecognized
options in the command-line"""
testdir.makeini("""
[pytest]
addopts = --invalid-option
""")
result = testdir.runpytest()
result.stderr.fnmatch_lines([
"*error: unrecognized arguments: --invalid-option*",
"* inifile: %s*" % testdir.tmpdir.join('tox.ini'),
"* rootdir: %s*" % testdir.tmpdir,
])
@pytest.mark.parametrize('args', [
['dir1', 'dir2', '-v'],
['dir1', '-v', 'dir2'],
['dir2', '-v', 'dir1'],
['-v', 'dir2', 'dir1'],
])
def test_consider_args_after_options_for_rootdir_and_inifile(testdir, args):
"""
Consider all arguments in the command-line for rootdir and inifile
discovery, even if they happen to occur after an option. #949
"""
# replace "dir1" and "dir2" from "args" into their real directory
root = testdir.tmpdir.mkdir('myroot')
d1 = root.mkdir('dir1')
d2 = root.mkdir('dir2')
for i, arg in enumerate(args):
if arg == 'dir1':
args[i] = d1
elif arg == 'dir2':
args[i] = d2
result = testdir.runpytest(*args)
result.stdout.fnmatch_lines(['*rootdir: *myroot, inifile: '])
@pytest.mark.skipif("sys.platform == 'win32'")
def test_toolongargs_issue224(testdir):
result = testdir.runpytest("-m", "hello" * 500)
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_notify_exception(testdir, capfd):
config = testdir.parseconfig()
excinfo = pytest.raises(ValueError, "raise ValueError(1)")
config.notify_exception(excinfo)
out, err = capfd.readouterr()
assert "ValueError" in err
class A:
def pytest_internalerror(self, excrepr):
return True
config.pluginmanager.register(A())
config.notify_exception(excinfo)
out, err = capfd.readouterr()
assert not err
def test_load_initial_conftest_last_ordering(testdir):
from _pytest.config import get_config
pm = get_config().pluginmanager
class My:
def pytest_load_initial_conftests(self):
pass
m = My()
pm.register(m)
hc = pm.hook.pytest_load_initial_conftests
l = hc._nonwrappers + hc._wrappers
assert l[-1].function.__module__ == "_pytest.capture"
assert l[-2].function == m.pytest_load_initial_conftests
assert l[-3].function.__module__ == "_pytest.config"
class TestWarning:
def test_warn_config(self, testdir):
testdir.makeconftest("""
l = []
def pytest_configure(config):
config.warn("C1", "hello")
def pytest_logwarning(code, message):
if message == "hello" and code == "C1":
l.append(1)
""")
testdir.makepyfile("""
def test_proper(pytestconfig):
import conftest
assert conftest.l == [1]
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_warn_on_test_item_from_request(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.fixture
def fix(request):
request.node.warn("T1", "hello")
def test_hello(fix):
pass
""")
result = testdir.runpytest()
assert result.parseoutcomes()["pytest-warnings"] > 0
assert "hello" not in result.stdout.str()
result = testdir.runpytest("-rw")
result.stdout.fnmatch_lines("""
===*pytest-warning summary*===
*WT1*test_warn_on_test_item*:5*hello*
""")
class TestRootdir:
def test_simple_noini(self, tmpdir):
assert get_common_ancestor([tmpdir]) == tmpdir
assert get_common_ancestor([tmpdir.mkdir("a"), tmpdir]) == tmpdir
assert get_common_ancestor([tmpdir, tmpdir.join("a")]) == tmpdir
with tmpdir.as_cwd():
assert get_common_ancestor([]) == tmpdir
@pytest.mark.parametrize("name", "setup.cfg tox.ini pytest.ini".split())
def test_with_ini(self, tmpdir, name):
inifile = tmpdir.join(name)
inifile.write("[pytest]\n")
a = tmpdir.mkdir("a")
b = a.mkdir("b")
for args in ([tmpdir], [a], [b]):
rootdir, inifile, inicfg = determine_setup(None, args)
assert rootdir == tmpdir
assert inifile == inifile
rootdir, inifile, inicfg = determine_setup(None, [b,a])
assert rootdir == tmpdir
assert inifile == inifile
@pytest.mark.parametrize("name", "setup.cfg tox.ini".split())
def test_pytestini_overides_empty_other(self, tmpdir, name):
inifile = tmpdir.ensure("pytest.ini")
a = tmpdir.mkdir("a")
a.ensure(name)
rootdir, inifile, inicfg = determine_setup(None, [a])
assert rootdir == tmpdir
assert inifile == inifile
def test_setuppy_fallback(self, tmpdir):
a = tmpdir.mkdir("a")
a.ensure("setup.cfg")
tmpdir.ensure("setup.py")
rootdir, inifile, inicfg = determine_setup(None, [a])
assert rootdir == tmpdir
assert inifile is None
assert inicfg == {}
def test_nothing(self, tmpdir):
rootdir, inifile, inicfg = determine_setup(None, [tmpdir])
assert rootdir == tmpdir
assert inifile is None
assert inicfg == {}
def test_with_specific_inifile(self, tmpdir):
inifile = tmpdir.ensure("pytest.ini")
rootdir, inifile, inicfg = determine_setup(inifile, [tmpdir])
assert rootdir == tmpdir
| mpl-2.0 |
danigonza/phantomjs | src/breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/encoder.py | 484 | 25695 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Code for encoding protocol message primitives.
Contains the logic for encoding every logical protocol field type
into one of the 5 physical wire types.
This code is designed to push the Python interpreter's performance to the
limits.
The basic idea is that at startup time, for every field (i.e. every
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
sizer takes a value of this field's type and computes its byte size. The
encoder takes a writer function and a value. It encodes the value into byte
strings and invokes the writer function to write those strings. Typically the
writer function is the write() method of a cStringIO.
We try to do as much work as possible when constructing the writer and the
sizer rather than when calling them. In particular:
* We copy any needed global functions to local variables, so that we do not need
to do costly global table lookups at runtime.
* Similarly, we try to do any attribute lookups at startup time if possible.
* Every field's tag is encoded to bytes at startup, since it can't change at
runtime.
* Whatever component of the field size we can compute at startup, we do.
* We *avoid* sharing code if doing so would make the code slower and not sharing
does not burden us too much. For example, encoders for repeated fields do
not just call the encoders for singular fields in a loop because this would
add an extra function call overhead for every loop iteration; instead, we
manually inline the single-value encoder into the loop.
* If a Python function lacks a return statement, Python actually generates
instructions to pop the result of the last statement off the stack, push
None onto the stack, and then return that. If we really don't care what
value is returned, then we can save two instructions by returning the
result of the last statement. It looks funny but it helps.
* We assume that type and bounds checking has happened at a higher level.
"""
__author__ = 'kenton@google.com (Kenton Varda)'
import struct
from google.protobuf.internal import wire_format
# This will overflow and thus become IEEE-754 "infinity". We would use
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
_POS_INF = 1e10000
_NEG_INF = -_POS_INF
def _VarintSize(value):
"""Compute the size of a varint value."""
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _SignedVarintSize(value):
"""Compute the size of a signed varint value."""
if value < 0: return 10
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _TagSize(field_number):
"""Returns the number of bytes required to serialize a tag with this field
number."""
# Just pass in type 0, since the type won't affect the tag+type size.
return _VarintSize(wire_format.PackTag(field_number, 0))
# --------------------------------------------------------------------
# In this section we define some generic sizers. Each of these functions
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
# It returns another function which in turn takes parameters specific to a
# particular field, e.g. the field number and whether it is repeated or packed.
# Look at the next section to see how these are used.
def _SimpleSizer(compute_value_size):
"""A sizer which uses the function compute_value_size to compute the size of
each value. Typically compute_value_size is _VarintSize."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(element)
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(element)
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(value)
return FieldSize
return SpecificSizer
def _ModifiedSizer(compute_value_size, modify_value):
"""Like SimpleSizer, but modify_value is invoked on each value before it is
passed to compute_value_size. modify_value is typically ZigZagEncode."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(modify_value(element))
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(modify_value(element))
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(modify_value(value))
return FieldSize
return SpecificSizer
def _FixedSizer(value_size):
"""Like _SimpleSizer except for a fixed-size field. The input is the size
of one value."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = len(value) * value_size
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
element_size = value_size + tag_size
def RepeatedFieldSize(value):
return len(value) * element_size
return RepeatedFieldSize
else:
field_size = value_size + tag_size
def FieldSize(value):
return field_size
return FieldSize
return SpecificSizer
# ====================================================================
# Here we declare a sizer constructor for each field type. Each "sizer
# constructor" is a function that takes (field_number, is_repeated, is_packed)
# as parameters and returns a sizer, which in turn takes a field value as
# a parameter and returns its encoded size.
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
_SignedVarintSize, wire_format.ZigZagEncode)
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
BoolSizer = _FixedSizer(1)
def StringSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a string field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element.encode('utf-8'))
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value.encode('utf-8'))
return tag_size + local_VarintSize(l) + l
return FieldSize
def BytesSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a bytes field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element)
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value)
return tag_size + local_VarintSize(l) + l
return FieldSize
def GroupSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a group field."""
tag_size = _TagSize(field_number) * 2
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += element.ByteSize()
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + value.ByteSize()
return FieldSize
def MessageSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a message field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = element.ByteSize()
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = value.ByteSize()
return tag_size + local_VarintSize(l) + l
return FieldSize
# --------------------------------------------------------------------
# MessageSet is special.
def MessageSetItemSizer(field_number):
"""Returns a sizer for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
_TagSize(3))
local_VarintSize = _VarintSize
def FieldSize(value):
l = value.ByteSize()
return static_size + local_VarintSize(l) + l
return FieldSize
# ====================================================================
# Encoders!
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
local_chr = chr
def EncodeVarint(write, value):
bits = value & 0x7f
value >>= 7
while value:
write(local_chr(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(local_chr(bits))
return EncodeVarint
def _SignedVarintEncoder():
"""Return an encoder for a basic signed varint value (does not include
tag)."""
local_chr = chr
def EncodeSignedVarint(write, value):
if value < 0:
value += (1 << 64)
bits = value & 0x7f
value >>= 7
while value:
write(local_chr(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(local_chr(bits))
return EncodeSignedVarint
_EncodeVarint = _VarintEncoder()
_EncodeSignedVarint = _SignedVarintEncoder()
def _VarintBytes(value):
"""Encode the given integer as a varint and return the bytes. This is only
called at startup time so it doesn't need to be fast."""
pieces = []
_EncodeVarint(pieces.append, value)
return "".join(pieces)
def TagBytes(field_number, wire_type):
"""Encode the given tag and return the bytes. Only called at startup."""
return _VarintBytes(wire_format.PackTag(field_number, wire_type))
# --------------------------------------------------------------------
# As with sizers (see above), we have a number of common encoder
# implementations.
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
"""Return a constructor for an encoder for fields of a particular type.
Args:
wire_type: The field's wire type, for encoding tags.
encode_value: A function which encodes an individual value, e.g.
_EncodeVarint().
compute_value_size: A function which computes the size of an individual
value, e.g. _VarintSize().
"""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(element)
local_EncodeVarint(write, size)
for element in value:
encode_value(write, element)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
encode_value(write, element)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return encode_value(write, value)
return EncodeField
return SpecificEncoder
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
"""Like SimpleEncoder but additionally invokes modify_value on every value
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(modify_value(element))
local_EncodeVarint(write, size)
for element in value:
encode_value(write, modify_value(element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
encode_value(write, modify_value(element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return encode_value(write, modify_value(value))
return EncodeField
return SpecificEncoder
def _StructPackEncoder(wire_type, format):
"""Return a constructor for an encoder for a fixed-width field.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size)
for element in value:
write(local_struct_pack(format, element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
write(local_struct_pack(format, element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return write(local_struct_pack(format, value))
return EncodeField
return SpecificEncoder
def _FloatingPointEncoder(wire_type, format):
"""Return a constructor for an encoder for float fields.
This is like StructPackEncoder, but catches errors that may be due to
passing non-finite floating-point values to struct.pack, and makes a
second attempt to encode those values.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
if value_size == 4:
def EncodeNonFiniteOrRaise(write, value):
# Remember that the serialized form uses little-endian byte order.
if value == _POS_INF:
write('\x00\x00\x80\x7F')
elif value == _NEG_INF:
write('\x00\x00\x80\xFF')
elif value != value: # NaN
write('\x00\x00\xC0\x7F')
else:
raise
elif value_size == 8:
def EncodeNonFiniteOrRaise(write, value):
if value == _POS_INF:
write('\x00\x00\x00\x00\x00\x00\xF0\x7F')
elif value == _NEG_INF:
write('\x00\x00\x00\x00\x00\x00\xF0\xFF')
elif value != value: # NaN
write('\x00\x00\x00\x00\x00\x00\xF8\x7F')
else:
raise
else:
raise ValueError('Can\'t encode floating-point values that are '
'%d bytes long (only 4 or 8)' % value_size)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size)
for element in value:
# This try/except block is going to be faster than any code that
# we could write to check whether element is finite.
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
try:
write(local_struct_pack(format, value))
except SystemError:
EncodeNonFiniteOrRaise(write, value)
return EncodeField
return SpecificEncoder
# ====================================================================
# Here we declare an encoder constructor for each field type. These work
# very similarly to sizer constructors, described earlier.
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
wire_format.ZigZagEncode)
# Note that Python conveniently guarantees that when using the '<' prefix on
# formats, they will also have the same size across all platforms (as opposed
# to without the prefix, where their sizes depend on the C compiler's basic
# type sizes).
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
def BoolEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a boolean field."""
false_byte = chr(0)
true_byte = chr(1)
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value))
for element in value:
if element:
write(true_byte)
else:
write(false_byte)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
if element:
write(true_byte)
else:
write(false_byte)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeField(write, value):
write(tag_bytes)
if value:
return write(true_byte)
return write(false_byte)
return EncodeField
def StringEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a string field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
encoded = element.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded))
write(encoded)
return EncodeRepeatedField
else:
def EncodeField(write, value):
encoded = value.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded))
return write(encoded)
return EncodeField
def BytesEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a bytes field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(tag)
local_EncodeVarint(write, local_len(element))
write(element)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(tag)
local_EncodeVarint(write, local_len(value))
return write(value)
return EncodeField
def GroupEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a group field."""
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(start_tag)
element._InternalSerialize(write)
write(end_tag)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(start_tag)
value._InternalSerialize(write)
return write(end_tag)
return EncodeField
def MessageEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a message field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(tag)
local_EncodeVarint(write, element.ByteSize())
element._InternalSerialize(write)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(tag)
local_EncodeVarint(write, value.ByteSize())
return value._InternalSerialize(write)
return EncodeField
# --------------------------------------------------------------------
# As before, MessageSet is special.
def MessageSetItemEncoder(field_number):
"""Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
start_bytes = "".join([
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
TagBytes(2, wire_format.WIRETYPE_VARINT),
_VarintBytes(field_number),
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_EncodeVarint = _EncodeVarint
def EncodeField(write, value):
write(start_bytes)
local_EncodeVarint(write, value.ByteSize())
value._InternalSerialize(write)
return write(end_bytes)
return EncodeField
| bsd-3-clause |
aml-development/ozp-backend | ozpcenter/api/listing/elasticsearch_util.py | 1 | 31164 | """
Elasticsearch Utils
--------------------------
Contains Elasticsearch common functions
=====
Code was developed to work with Elasticsearch 2.4.*
Reference
number_of_shards
number_of_replicas
https://www.elastic.co/guide/en/elasticsearch/guide/current/_how_primary_and_replica_shards_interact.html
ordering
https://www.elastic.co/guide/en/elasticsearch/reference/2.4/search-request-sort.html#_sort_order
analyzer
https://www.elastic.co/guide/en/elasticsearch/reference/2.4/analyzer.html
completion
https://qbox.io/blog/quick-and-dirty-autocomplete-with-elasticsearch-completion-suggest
=====
Code was developed to work with Elasticsearch 6.3.x
Mapping
https://www.elastic.co/guide/en/elasticsearch/reference/6.3/mapping.html#mapping-limit-settings
Analyzer:
https://www.elastic.co/guide/en/elasticsearch/reference/6.3/search-analyzer.html
Field Types:
Keyword - A field to index structured content such as email addresses, hostnames, status codes, zip codes or tags.
They are typically used for filtering (Find me all blog posts where status is published), for sorting, and for aggregations. Keyword fields are only searchable by their exact value.
Text - If you need to index full text content such as email bodies or product descriptions, it is likely that you should rather use a text field.
"""
import json
import logging
from elasticsearch import Elasticsearch
from elasticsearch import exceptions
from django.conf import settings
from ozpcenter import errors
logger = logging.getLogger('ozp-center.' + str(__name__))
class ElasticsearchFactory(object):
def __init__(self):
self.es_client = None
def check_elasticsearch(self):
"""
Method used to check to see if elasticsearch is up
"""
self.get_client()
if settings.ES_ENABLED is False:
raise errors.ElasticsearchServiceUnavailable('Elasticsearch is disabled in the settings')
try:
results = self.es_client.info()
# Results: {'name': 'Human Top', 'version': {'build_snapshot': False, 'number': '2.4.0', 'build_hash': 'ce9f0c7394dee074091dd1bc4e9469251181fc55',
# 'build_timestamp': '2016-08-29T09:14:17Z', 'lucene_version': '5.5.2'}, 'cluster_name': 'elasticsearch', 'tagline': 'You Know, for Search'}
keys_to_check = ['name', 'version', 'cluster_name', 'tagline']
for key in keys_to_check:
if key not in results:
raise errors.ElasticsearchServiceUnavailable('Elasticsearch Results missing keys')
return True
except exceptions.SerializationError:
# Exception Value: Unknown mimetype, unable to deserialize: text/html
raise errors.ElasticsearchServiceUnavailable('Elasticsearch Serialization Error')
except exceptions.AuthenticationException:
# Ngnix BasicAuth Fail: TransportError(401, '<html>\r\n<head><title>401 Authorization
# Required</title></head>\r\n<body bgcolor="white">\r\n<center><h1>401 Authorization Required</h1></center>\r\n<hr><center>nginx/1.11.6</center>\r\n</body>\r\n</html>\r\n')
raise errors.ElasticsearchServiceUnavailable('Elasticsearch Authentication Exception')
except exceptions.ConnectionError:
# ConnectionError(<urllib3.connection.HTTPConnection object at 0x7f6343212c50>: Failed to establish a new connection: [Errno 111] Connection refused) ...
raise errors.ElasticsearchServiceUnavailable('Elasticsearch Connection Error')
except exceptions.TransportError:
# Nginx reverse proxy can't find elasticsearch but correct BasicAuth
# TransportError(502, 'An error occurred.</h1>\n<p>Sorry, the page you are looking for is currently unavailable.<br/>\nPlease try again later.....
raise errors.ElasticsearchServiceUnavailable('Elasticsearch Transport Error')
raise errors.ElasticsearchServiceUnavailable('Elasticsearch Check Error')
def get_heath(self):
self.check_elasticsearch()
return self.es_client.cluster.health()
def recreate_index_mapping(self, index_name, index_mapping):
self.get_client()
if settings.ES_ENABLED is True:
self.check_elasticsearch()
logger.info('Checking to see if Index [{}] exist'.format(index_name))
if self.es_client.indices.exists(index_name):
logger.info('Deleting [{}] index...'.format(index_name))
res = self.es_client.indices.delete(index=index_name)
logger.info('Delete acknowledged: {}'.format(res.get('acknowledged', False)))
logger.info('Creating [{}] index...'.format(index_name))
res = self.es_client.indices.create(index=index_name, body=index_mapping)
logger.info('Create Index Acknowledged: {}'.format(res.get('acknowledged', False)))
self.es_client.cluster.health(wait_for_status='yellow', request_timeout=20)
else:
logger.debug('Elasticsearch is not enabled')
def get_client(self):
if settings.ES_ENABLED is True:
if self.es_client is None:
kwords = {}
kwords['hosts'] = settings.ES_HOST
if settings.ES_BASIC_AUTH:
kwords['http_auth'] = (settings.ES_AUTH_USERNAME, settings.ES_AUTH_PASSWORD)
# Create ES client
self.es_client = Elasticsearch(**kwords)
return self.es_client
def wait_for_yellow_cluster_heath(self):
self.check_elasticsearch()
self.es_client.cluster.health(wait_for_status='yellow', request_timeout=20)
elasticsearch_factory = ElasticsearchFactory()
def get_listing_mapping():
"""
Creates ElasticSearch Mapping for Listing(models.Model)
Fields:
Full Text
title (text)
description (text)
description_short (text)
tags (text)
Filtering
tags:
categories:
agencies:
listing_types:
is_508_compliant (boolean)
Default Filtering
is_deleted (boolean): 0
is_enabled (boolean): 1
approval_status (String): "APPROVED"
Returns:
Python Dictionary with ElasticSearch Mapping
"""
return {
# dynamic option is strict to prevent any unknown field from automatically being index
"dynamic": "strict",
"properties": {
# Title is used for searching
# Title.keyword_lowercase is used for ordering
"title": {
"type": "text",
"analyzer": "autocomplete",
"search_analyzer": "autocomplete",
"fields": {
"keyword_lowercase": {
"type": "text",
"analyzer": "keyword_lowercase_analyzer"
}
}
},
# description is used for searching
"description": {
"type": "text",
"analyzer": "autocomplete",
"search_analyzer": "autocomplete"
},
# description_short is used for searching
"description_short": {
"type": "text",
"analyzer": "autocomplete",
"search_analyzer": "autocomplete"
},
# = Filtering fields =
# categories is used for Filtering
"categories": {
"type": "nested",
"properties": {
"description": {
"type": "text"
},
"id": {
"type": "integer"
},
"title": {
"type": "text",
"analyzer": "keyword_lowercase_analyzer"
}
}
},
# agency_short_name is used for filtering (include/exclude)
"agency_short_name": {
"type": "text",
"analyzer": "keyword_lowercase_analyzer"
},
# approval_status is used for filtering
"approval_status": {
"type": "text",
"analyzer": "keyword_lowercase_analyzer"
},
# is_deleted is used for filtering
"is_deleted": {
"type": "boolean"
},
# is_enabled is used for filtering
"is_enabled": {
"type": "boolean"
},
# is_private is used for filtering, used to filter out private apps for different organizations
"is_private": {
"type": "boolean"
},
# is_508_compliant is used for filtering
"is_508_compliant": {
"type": "boolean"
},
# listing_type_title is used for filtering , ex: ['Web Application', 'Web Services', 'Widget'..]
"listing_type_title": {
"type": "text",
"analyzer": "keyword_lowercase_analyzer"
},
# tags used for searching
# tags[].name used for searching
# tags[].name_string used for filtering
"tags": {
"type": "nested",
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "text",
"analyzer": "autocomplete",
"search_analyzer": "autocomplete"
},
"name_string": {
"type": "text",
"analyzer": "keyword_lowercase_analyzer"
}
}
},
# is_featured could be used for filtering
"is_featured": {
"type": "boolean"
},
# = metadata fields =
# Index Option - Should the field be searchable? False
# id is metadata (displaying)
"id": {
"type": "integer",
# "index": False
},
# unique_name used for metadata (displaying)
"unique_name": {
"type": "text",
"index": False
},
# owners used for metadata (displaying)
"owners": {
"properties": {
"display_name": {
"type": "text",
"index": False
},
"id": {
"type": "integer",
"index": False
},
"user": {
"properties": {
"username": {
"type": "text",
"index": False
},
}
}
},
},
# approved_date used for metadata (displaying)
"approved_date": {
"type": "date",
},
# agency_id is metadata (displaying)
"agency_id": {
"type": "integer"
},
# agency_title is metadata (displaying)
"agency_title": {
"type": "keyword",
},
# security_marking used to enforce security check for listing before showing to user
"security_marking": {
"type": "keyword",
},
# listing_type_id is metadata
"listing_type_id": {
"type": "integer"
},
# listing_type_description is metadata
"listing_type_description": {
"type": "text"
},
# launch_url is metadata
"launch_url": {
"type": "text",
"index": False
},
# metadata
"banner_icon": {
"properties": {
"file_extension": {
"type": "text",
"index": False
},
"id": {
"type": "integer",
"index": False
},
"security_marking": {
"type": "text",
"index": False
}
}
},
# metadata
"large_banner_icon": {
"properties": {
"file_extension": {
"type": "text",
"index": False
},
"id": {
"type": "integer",
"index": False
},
"security_marking": {
"type": "text",
"index": False
}
}
},
# large_icon is used for metadata
"large_icon": {
"properties": {
"file_extension": {
"type": "text",
"index": False
},
"id": {
"type": "integer",
"index": False
},
"security_marking": {
"type": "text",
"index": False
}
}
},
# small_icon is used for metadata
"small_icon": {
"properties": {
"file_extension": {
"type": "text",
"index": False
},
"id": {
"type": "integer",
"index": False
},
"security_marking": {
"type": "text",
"index": False
}
}
},
# used for metadata
"total_rate1": {
"type": "integer"
},
# used for metadata
"total_rate2": {
"type": "integer"
},
# used for metadata
"total_rate3": {
"type": "integer"
},
# used for metadata
"total_rate4": {
"type": "integer"
},
# used for metadata
"total_rate5": {
"type": "integer"
},
# used for metadata
"total_reviews": {
"type": "integer"
},
# used for metadata
"total_review_responses": {
"type": "integer"
},
# used for metadata
"total_votes": {
"type": "integer"
},
# used for metadata
"avg_rate": {
"type": "double"
},
# used for metadata
"usage_requirements": {
"type": "text",
"index": False
},
# used for metadata
"system_requirements": {
"type": "text",
"index": False
}
}
}
def prepare_clean_listing_record(listing_serializer_record):
"""
Clean Record
Sample Record (record_json) after clean
{
"id": 316,
"title": "JotSpot 28",
"description": "Jot things down",
"unique_name": "ozp.test.jotspot.28",
"description_short": "Jot stuff down",
"approval_status": "APPROVED",
"is_enabled": true,
"is_featured": true,
"is_deleted": false,
"avg_rate": 4,
"total_votes": 1,
"total_rate5": 0,
"total_rate4": 1,
"total_rate3": 0,
"total_rate2": 0,
"total_rate1": 0,
"total_reviews": 1,
"security_marking": "UNCLASSIFIED",
"is_private": false,
"agency": {
"id": 1,
"title": "Ministry of Truth",
"short_name": "Minitrue"
},
"listing_type": {
"id": 1,
"title": "web application",
"description": "web applications"
},
"categories": [
{
"id": 4,
"title": "Education",
"description": "Educational in nature"
},
{
"id": 14,
"title": "Tools",
"description": "Tools and Utilities"
}
],
"tags": [
{
"id": 1,
"name": "demo"
}
]
}
Args:
record: One record of ReadOnlyListingSerializer
"""
keys_to_remove = ['contacts',
'last_activity',
'required_listings',
'current_rejection',
'what_is_new',
'iframe_compatible',
'edited_date',
'featured_date',
'version_name',
'feedback_score',
'intents']
# Clean Record
for key in keys_to_remove:
if key in listing_serializer_record:
del listing_serializer_record[key]
image_keys_to_clean = ['large_icon',
'small_icon',
'banner_icon',
'large_banner_icon']
# Clean Large_icon
for image_key in image_keys_to_clean:
if listing_serializer_record.get(image_key):
del listing_serializer_record[image_key]['image_type']
del listing_serializer_record[image_key]['uuid']
del listing_serializer_record['agency']['icon']
owners = []
for owner in listing_serializer_record['owners']:
current_owner = {}
current_owner['id'] = owner['id']
current_owner['display_name'] = owner['display_name']
current_owner['user'] = {}
current_owner['user']['username'] = owner['user']['username']
owners.append(current_owner)
listing_serializer_record['owners'] = owners
record_clean_obj = json.loads(json.dumps(listing_serializer_record))
# title_suggest = {"input": [ record_clean_obj['title'] ] }
# record_clean_obj['title_suggest'] =title_suggest
# Flatten Agency Obj - Makes the search query easier
record_clean_obj['agency_id'] = record_clean_obj['agency']['id']
record_clean_obj['agency_short_name'] = record_clean_obj['agency']['short_name']
record_clean_obj['agency_title'] = record_clean_obj['agency']['title']
del record_clean_obj['agency']
# Flatten listing_type Obj - - Makes the search query easier
record_clean_obj['listing_type_id'] = record_clean_obj['listing_type']['id']
record_clean_obj['listing_type_description'] = record_clean_obj['listing_type']['description']
record_clean_obj['listing_type_title'] = record_clean_obj['listing_type']['title']
del record_clean_obj['listing_type']
tags = []
for tag_entry in record_clean_obj['tags']:
tag_entry['name_string'] = tag_entry['name']
tags.append(tag_entry)
record_clean_obj['tags'] = tags
return record_clean_obj
def get_mapping_setting_obj(number_of_shards=None, number_of_replicas=None):
"""
This method creates the elasticsearch mapping object
Args:
number_of_shards(int): Number of shards that index should to have
number_of_replicas(int): Number of replicas that index should have
Returns:
Mapping Object(dictionary): Elasticsearch mapping object
"""
if number_of_shards is None:
number_of_shards = settings.ES_NUMBER_OF_SHARDS
if number_of_replicas is None:
number_of_replicas = settings.ES_NUMBER_OF_REPLICAS
data = {
"settings": {
"number_of_shards": number_of_shards,
"number_of_replicas": number_of_replicas,
"analysis": {
"filter": {
"autocomplete_filter": {
"type": "edge_ngram",
"min_gram": 1,
"max_gram": 20
}
},
"analyzer": {
"autocomplete": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"lowercase",
"autocomplete_filter"
]
},
"keyword_lowercase_analyzer": {
"tokenizer": "keyword",
"filter": ["lowercase"]
}
}
}
},
"mappings": {
"listings": get_listing_mapping()
}
}
return data
def update_es_listing(current_listing_id, record, is_new):
"""
Update "term": {
"is_deleted": 0
}
},
{
"term": {
"is_enabled": 1
}
},
{
"match": {
"approval_status": "APPROVED"
}
}Listing Record in Elasticsearch
Args:
current_listing_id: Lisitng Id
record(dict): serialized listing
is_new: backend is new
"""
# Create ES client
es_client = elasticsearch_factory.get_client()
if settings.ES_ENABLED is False:
logger.warn('Elasticsearch Service Not Enabled')
elif not es_client.ping():
logger.warn('Elasticsearch Service Unavailable')
# raise errors.ElasticsearchServiceUnavailable()
else:
# If the index does not exist in Elasticsearch, create index so that adding records work
if not es_client.indices.exists(settings.ES_INDEX_NAME):
request_body = get_mapping_setting_obj()
logger.info('Creating [{}] index...'.format(settings.ES_INDEX_NAME))
res = es_client.indices.create(index=settings.ES_INDEX_NAME, body=request_body)
logger.info('Create Index Acknowledged: {}'.format(res.get('acknowledged', False)))
es_client.cluster.health(wait_for_status='yellow', request_timeout=20)
es_record_exist = es_client.exists(
index=settings.ES_INDEX_NAME,
doc_type=settings.ES_TYPE_NAME,
id=current_listing_id,
refresh=True
)
record_clean_obj = prepare_clean_listing_record(record)
if is_new is not None:
if es_record_exist:
es_client.update(
index=settings.ES_INDEX_NAME,
doc_type=settings.ES_TYPE_NAME,
id=current_listing_id,
refresh=True,
body={"doc": record_clean_obj}
)
else:
es_client.create(
index=settings.ES_INDEX_NAME,
doc_type=settings.ES_TYPE_NAME,
id=current_listing_id,
refresh=True,
body=record_clean_obj
)
else:
if es_record_exist:
es_client.update(
index=settings.ES_INDEX_NAME,
doc_type=settings.ES_TYPE_NAME,
id=current_listing_id,
refresh=True,
body={"doc": record_clean_obj}
)
else:
# Ensure if doc exist in es, then update
es_client.create(
index=settings.ES_INDEX_NAME,
doc_type=settings.ES_TYPE_NAME,
id=current_listing_id,
refresh=True,
body=record_clean_obj
)
def encode_special_characters(user_string):
"""
Encode Special Characters for user's search Strings
Args:
user_string(string): raw string to encode
Returns:
Encode string for elasticsearch
"""
if user_string is None:
return ""
sp_chars = ['+', '-', '=', '|', '<', '>', '!', '(', ')', '{', '}', '[', ']', '^', '"', '~', '*', '?', ':', '\\', '/']
# List of special characters can be found here: https://www.elastic.co/guide/en/elasticsearch/reference/2.4/query-dsl-query-string-query.html#_reserved_characters
output_list = []
for char in user_string:
if char in sp_chars:
# replace
output_list.append(char.replace(char, '\\' + char))
else:
output_list.append(char)
return "".join(output_list)
def make_search_query_obj(search_param_parser, exclude_agencies=None):
"""
Function is used to make elasticsearch query for searching
Args:
search_param_parser(SearchParamParser): Object with search parameters
search(str): Search Keyword
user_offset(int): Offset
user_limit(int): Limit
categories([str,str,..]): List category Strings
agencies([str,str,..]): List agencies Strings
listing_types([str,str,..]): List listing types Strings
minscore(float): Minscore Float
ordering([str,str,str]): List of fields to order
"""
user_string = encode_special_characters(search_param_parser.search_string)
# Pagination
user_offset = search_param_parser.offset
user_limit = search_param_parser.limit # Size
# user_limit_set = filter_params.get('limit_set', False)
# Filtering
tags = search_param_parser.tags
categories = search_param_parser.categories
agencies = search_param_parser.agencies
listing_types = search_param_parser.listing_types
is_508_compliant = search_param_parser.is_508_compliant
# Ordering
ordering = search_param_parser.ordering
# Boost
boost_title = search_param_parser.boost_title
boost_description = search_param_parser.boost_description
boost_description_short = search_param_parser.boost_description_short
boost_tags = search_param_parser.boost_tags
min_score = search_param_parser.min_score
# Exclude_agencies
exclude_agencies = exclude_agencies or []
# Default Filter
# Filters out listing that are not deleted, enabled, and Approved
filter_data = [
{
"term": {
"is_deleted": False
}
},
{
"term": {
"is_enabled": True
}
},
{
"match": {
"approval_status": "APPROVED"
}
}
]
if is_508_compliant is True:
filter_data.append({
"term": {
"is_508_compliant": True
}
})
elif is_508_compliant is False:
filter_data.append({
"term": {
"is_508_compliant": False
}
})
# Agencies (agency_short_name) to filter
if agencies:
agencies_temp = []
for agency_short_name in agencies:
current_agency_data = {
"match": {
"agency_short_name": agency_short_name
}
}
agencies_temp.append(current_agency_data)
agencies_data = {
"bool": {
"should": agencies_temp
}
}
filter_data.append(agencies_data)
# Agencies (agency_short_name ex, Minitrue) to exclude
if exclude_agencies:
exclude_agencies_temp = []
for exclude_agency_short_name in exclude_agencies:
temp_filter = {
"bool": {
"filter": [
{
"match": {
"agency_short_name": exclude_agency_short_name
}
},
{
"match": {
"is_private": True
}
}
]
}
}
exclude_agencies_temp.append(temp_filter)
agencies_query_data = {
"bool": {
"must_not": exclude_agencies_temp
}
}
filter_data.append(agencies_query_data)
# Listing Types to filter
if listing_types:
listing_types_temp = []
for listing_type_title in listing_types:
current_listing_type_data = {
"match": {
"listing_type_title": listing_type_title
}
}
listing_types_temp.append(current_listing_type_data)
listing_type_data = {
"bool": {
"should": listing_types_temp
}
}
filter_data.append(listing_type_data)
# Tags to filter
if tags:
tags_temp = []
for tag in tags:
current_tag_data = {
"match": {
"tags.name_string": tag
}
}
tags_temp.append(current_tag_data)
tags_data = {
"nested": {
"boost": 1,
"path": "tags",
"query": {
"bool": {
"should": tags_temp
}
}
}
}
filter_data.append(tags_data)
# Categories to filter
if categories:
categories_temp = []
for category in categories:
current_category_data = {
"match": {
"categories.title": category
}
}
categories_temp.append(current_category_data)
categories_data = {
"nested": {
"boost": 1,
"path": "categories",
"query": {
"bool": {
"should": categories_temp
}
}
}
}
filter_data.append(categories_data)
temp_should = []
if user_string:
bt = boost_title
bd = boost_description
bds = boost_description_short
temp_should.append({
"nested": {
"boost": boost_tags,
"query": {
"query_string": {
"fields": [
"tags.name"
],
"query": user_string
}
},
"path": "tags"
}
})
# Search the title first to give it the score it needs and weight to order
# the list by title preferance.
temp_should.append({
"match": {
"title": user_string
}
})
# The reason fuzziness is needed using the sample_data is because if
# searching for 'ir', the results should bring up 'air mail' listings
# without it will not bring 'air mail' listings
# TODO: Investigate why search for 'a' does not bring 'air mail' to results
temp_should.append({
"multi_match": {
"query": user_string,
"type": "best_fields",
"fields": ["title^" + str(bt), "description^" + str(bd), "description_short^" + str(bds)],
"tie_breaker": 0.3,
"minimum_should_match": "60%",
"analyzer": "english",
"fuzziness": "10"
# fuzziness changes fixes missing first letter issue with searches (10).
}
})
else:
temp_should.append({"match_all": {}})
# When querying with match_all the '_score' should 1
search_query = {
"size": user_limit,
"min_score": min_score,
"query": {
"bool": {
"should": temp_should,
"filter": filter_data
}
}
}
# If user_string has one character, lower the min_score
# this will make the closest results appear
if len(user_string) == 1:
search_query['min_score'] = 0.05
if ordering:
sort_list = []
for order_item in ordering:
order = 'asc'
if order_item[0] == '-':
order_item = order_item[1:]
order = 'desc'
# TODO: Figure out a way to get raw field dynamically
if order_item == 'title':
order_item = 'title.keyword_lowercase'
sort_list.append({order_item: {'order': order}})
search_query['sort'] = sort_list
if user_offset:
search_query['from'] = user_offset
return search_query
| apache-2.0 |
luxnovalabs/enjigo_door | web_interface/djangoappengine/tests/test_order.py | 28 | 2128 | from django.test import TestCase
from .models import OrderedModel
class OrderTest(TestCase):
def create_ordered_model_items(self):
pks = []
priorities = [5, 2, 9, 1]
for pk, priority in enumerate(priorities):
pk += 1
model = OrderedModel(pk=pk, priority=priority)
model.save()
pks.append(model.pk)
return pks, priorities
def test_default_order(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in OrderedModel.objects.all()],
sorted(priorities, reverse=True))
def test_override_default_order(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in
OrderedModel.objects.all().order_by('priority')],
sorted(priorities))
def test_remove_default_order(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.pk for item in OrderedModel.objects.all().order_by()],
sorted(pks))
def test_order_with_pk_filter(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(pk__in=pks)],
sorted(priorities, reverse=True))
# Test with id__in.
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(id__in=pks)],
sorted(priorities, reverse=True))
# Test reverse.
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(pk__in=pks).reverse()],
sorted(priorities, reverse=False))
def test_remove_default_order_with_pk_filter(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(pk__in=pks).order_by()],
priorities)
# TODO: Test multiple orders.
| unlicense |
sookasa/heroku-buildpack-python-pipeline | vendor/pip-1.2.1/tests/test_pip.py | 9 | 21936 | #!/usr/bin/env python
import os
import sys
import tempfile
import shutil
import glob
import atexit
import textwrap
import site
from scripttest import TestFileEnvironment, FoundDir
from tests.path import Path, curdir, u
from pip.util import rmtree
pyversion = sys.version[:3]
# the directory containing all the tests
here = Path(__file__).abspath.folder
# the root of this pip source distribution
src_folder = here.folder
download_cache = tempfile.mkdtemp(prefix='pip-test-cache')
site_packages_suffix = site.USER_SITE[len(site.USER_BASE) + 1:]
def path_to_url(path):
"""
Convert a path to URI. The path will be made absolute and
will not have quoted path parts.
(adapted from pip.util)
"""
path = os.path.normpath(os.path.abspath(path))
drive, path = os.path.splitdrive(path)
filepath = path.split(os.path.sep)
url = '/'.join(filepath)
if drive:
return 'file:///' + drive + url
return 'file://' +url
def demand_dirs(path):
if not os.path.exists(path):
os.makedirs(path)
# Tweak the path so we can find up-to-date pip sources
# (http://bitbucket.org/ianb/pip/issue/98)
sys.path = [src_folder] + sys.path
def create_virtualenv(where, distribute=False):
import virtualenv
if sys.version_info[0] > 2:
distribute = True
virtualenv.create_environment(
where, use_distribute=distribute, unzip_setuptools=True)
return virtualenv.path_locations(where)
def relpath(root, other):
"""a poor man's os.path.relpath, since we may not have Python 2.6"""
prefix = root+Path.sep
assert other.startswith(prefix)
return Path(other[len(prefix):])
if 'PYTHONPATH' in os.environ:
del os.environ['PYTHONPATH']
try:
any
except NameError:
def any(seq):
for item in seq:
if item:
return True
return False
def clear_environ(environ):
return dict(((k, v) for k, v in environ.items()
if not k.lower().startswith('pip_')))
def install_setuptools(env):
easy_install = os.path.join(env.bin_path, 'easy_install')
version = 'setuptools==0.6c11'
if sys.platform != 'win32':
return env.run(easy_install, version)
tempdir = tempfile.mkdtemp()
try:
for f in glob.glob(easy_install+'*'):
shutil.copy2(f, tempdir)
return env.run(os.path.join(tempdir, 'easy_install'), version)
finally:
rmtree(tempdir)
env = None
def reset_env(environ=None, use_distribute=None):
global env
# FastTestPipEnv reuses env, not safe if use_distribute specified
if use_distribute is None:
env = FastTestPipEnvironment(environ)
else:
env = TestPipEnvironment(environ, use_distribute=use_distribute)
return env
class TestFailure(AssertionError):
"""
An "assertion" failed during testing.
"""
pass
#
# This cleanup routine prevents the __del__ method that cleans up the tree of
# the last TestPipEnvironment from firing after shutil has already been
# unloaded. It also ensures that FastTestPipEnvironment doesn't leave an
# environment hanging around that might confuse the next test run.
#
def _cleanup():
global env
del env
rmtree(download_cache, ignore_errors=True)
rmtree(fast_test_env_root, ignore_errors=True)
rmtree(fast_test_env_backup, ignore_errors=True)
atexit.register(_cleanup)
class TestPipResult(object):
def __init__(self, impl, verbose=False):
self._impl = impl
if verbose:
print(self.stdout)
if self.stderr:
print('======= stderr ========')
print(self.stderr)
print('=======================')
def __getattr__(self, attr):
return getattr(self._impl, attr)
if sys.platform == 'win32':
@property
def stdout(self):
return self._impl.stdout.replace('\r\n', '\n')
@property
def stderr(self):
return self._impl.stderr.replace('\r\n', '\n')
def __str__(self):
return str(self._impl).replace('\r\n', '\n')
else:
# Python doesn't automatically forward __str__ through __getattr__
def __str__(self):
return str(self._impl)
def assert_installed(self, pkg_name, with_files=[], without_files=[], without_egg_link=False, use_user_site=False):
e = self.test_env
pkg_dir = e.venv/ 'src'/ pkg_name.lower()
if use_user_site:
egg_link_path = e.user_site / pkg_name + '.egg-link'
else:
egg_link_path = e.site_packages / pkg_name + '.egg-link'
if without_egg_link:
if egg_link_path in self.files_created:
raise TestFailure('unexpected egg link file created: '\
'%r\n%s' % (egg_link_path, self))
else:
if not egg_link_path in self.files_created:
raise TestFailure('expected egg link file missing: '\
'%r\n%s' % (egg_link_path, self))
egg_link_file = self.files_created[egg_link_path]
if not (# FIXME: I don't understand why there's a trailing . here
egg_link_file.bytes.endswith('.')
and egg_link_file.bytes[:-1].strip().endswith(pkg_dir)):
raise TestFailure(textwrap.dedent(u('''\
Incorrect egg_link file %r
Expected ending: %r
------- Actual contents -------
%s
-------------------------------''' % (
egg_link_file,
pkg_dir + u('\n.'),
egg_link_file.bytes))))
if use_user_site:
pth_file = Path.string(e.user_site / 'easy-install.pth')
else:
pth_file = Path.string(e.site_packages / 'easy-install.pth')
if (pth_file in self.files_updated) == without_egg_link:
raise TestFailure('%r unexpectedly %supdated by install' % (
pth_file, (not without_egg_link and 'not ' or '')))
if (pkg_dir in self.files_created) == (curdir in without_files):
raise TestFailure(textwrap.dedent('''\
expected package directory %r %sto be created
actually created:
%s
''') % (
Path.string(pkg_dir),
(curdir in without_files and 'not ' or ''),
sorted(self.files_created.keys())))
for f in with_files:
if not (pkg_dir/f).normpath in self.files_created:
raise TestFailure('Package directory %r missing '\
'expected content %f' % (pkg_dir, f))
for f in without_files:
if (pkg_dir/f).normpath in self.files_created:
raise TestFailure('Package directory %r has '\
'unexpected content %f' % (pkg_dir, f))
class TestPipEnvironment(TestFileEnvironment):
"""A specialized TestFileEnvironment for testing pip"""
#
# Attribute naming convention
# ---------------------------
#
# Instances of this class have many attributes representing paths
# in the filesystem. To keep things straight, absolute paths have
# a name of the form xxxx_path and relative paths have a name that
# does not end in '_path'.
# The following paths are relative to the root_path, and should be
# treated by clients as instance attributes. The fact that they
# are defined in the class is an implementation detail
# where we'll create the virtual Python installation for testing
#
# Named with a leading dot to reduce the chance of spurious
# results due to being mistaken for the virtualenv package.
venv = Path('.virtualenv')
# The root of a directory tree to be used arbitrarily by tests
scratch = Path('scratch')
exe = sys.platform == 'win32' and '.exe' or ''
verbose = False
def __init__(self, environ=None, use_distribute=None):
self.root_path = Path(tempfile.mkdtemp('-piptest'))
# We will set up a virtual environment at root_path.
self.scratch_path = self.root_path / self.scratch
self.venv_path = self.root_path / self.venv
if not environ:
environ = os.environ.copy()
environ = clear_environ(environ)
environ['PIP_DOWNLOAD_CACHE'] = str(download_cache)
environ['PIP_NO_INPUT'] = '1'
environ['PIP_LOG_FILE'] = str(self.root_path/'pip-log.txt')
super(TestPipEnvironment, self).__init__(
self.root_path, ignore_hidden=False,
environ=environ, split_cmd=False, start_clear=False,
cwd=self.scratch_path, capture_temp=True, assert_no_temp=True)
demand_dirs(self.venv_path)
demand_dirs(self.scratch_path)
if use_distribute is None:
use_distribute = os.environ.get('PIP_TEST_USE_DISTRIBUTE', False)
self.use_distribute = use_distribute
# Create a virtualenv and remember where it's putting things.
virtualenv_paths = create_virtualenv(self.venv_path, distribute=self.use_distribute)
assert self.venv_path == virtualenv_paths[0] # sanity check
for id, path in zip(('venv', 'lib', 'include', 'bin'), virtualenv_paths):
setattr(self, id+'_path', Path(path))
setattr(self, id, relpath(self.root_path, path))
assert self.venv == TestPipEnvironment.venv # sanity check
self.site_packages = self.lib/'site-packages'
self.user_base_path = self.venv_path/'user'
self.user_site_path = self.venv_path/'user'/site_packages_suffix
self.user_site = relpath(self.root_path, self.user_site_path)
demand_dirs(self.user_site_path)
self.environ["PYTHONUSERBASE"] = self.user_base_path
# create easy-install.pth in user_site, so we always have it updated instead of created
open(self.user_site_path/'easy-install.pth', 'w').close()
# put the test-scratch virtualenv's bin dir first on the PATH
self.environ['PATH'] = Path.pathsep.join((self.bin_path, self.environ['PATH']))
# test that test-scratch virtualenv creation produced sensible venv python
result = self.run('python', '-c', 'import sys; print(sys.executable)')
pythonbin = result.stdout.strip()
if Path(pythonbin).noext != self.bin_path/'python':
raise RuntimeError(
"Oops! 'python' in our test environment runs %r"
" rather than expected %r" % (pythonbin, self.bin_path/'python'))
# make sure we have current setuptools to avoid svn incompatibilities
if not self.use_distribute:
install_setuptools(self)
# Uninstall whatever version of pip came with the virtualenv.
# Earlier versions of pip were incapable of
# self-uninstallation on Windows, so we use the one we're testing.
self.run('python', '-c',
'"import sys; sys.path.insert(0, %r); import pip; sys.exit(pip.main());"' % os.path.dirname(here),
'uninstall', '-vvv', '-y', 'pip')
# Install this version instead
self.run('python', 'setup.py', 'install', cwd=src_folder, expect_stderr=True)
self._use_cached_pypi_server()
def _ignore_file(self, fn):
if fn.endswith('__pycache__') or fn.endswith(".pyc"):
result = True
else:
result = super(TestPipEnvironment, self)._ignore_file(fn)
return result
def run(self, *args, **kw):
if self.verbose:
print('>> running %s %s' % (args, kw))
cwd = kw.pop('cwd', None)
run_from = kw.pop('run_from', None)
assert not cwd or not run_from, "Don't use run_from; it's going away"
cwd = Path.string(cwd or run_from or self.cwd)
assert not isinstance(cwd, Path)
return TestPipResult(super(TestPipEnvironment, self).run(cwd=cwd, *args, **kw), verbose=self.verbose)
def __del__(self):
rmtree(str(self.root_path), ignore_errors=True)
def _use_cached_pypi_server(self):
site_packages = self.root_path / self.site_packages
pth = open(os.path.join(site_packages, 'pypi_intercept.pth'), 'w')
pth.write('import sys; ')
pth.write('sys.path.insert(0, %r); ' % str(here))
pth.write('import pypi_server; pypi_server.PyPIProxy.setup(); ')
pth.write('sys.path.remove(%r); ' % str(here))
pth.close()
fast_test_env_root = here / 'tests_cache' / 'test_ws'
fast_test_env_backup = here / 'tests_cache' / 'test_ws_backup'
class FastTestPipEnvironment(TestPipEnvironment):
def __init__(self, environ=None):
import virtualenv
self.root_path = fast_test_env_root
self.backup_path = fast_test_env_backup
self.scratch_path = self.root_path / self.scratch
# We will set up a virtual environment at root_path.
self.venv_path = self.root_path / self.venv
if not environ:
environ = os.environ.copy()
environ = clear_environ(environ)
environ['PIP_DOWNLOAD_CACHE'] = str(download_cache)
environ['PIP_NO_INPUT'] = '1'
environ['PIP_LOG_FILE'] = str(self.root_path/'pip-log.txt')
TestFileEnvironment.__init__(self,
self.root_path, ignore_hidden=False,
environ=environ, split_cmd=False, start_clear=False,
cwd=self.scratch_path, capture_temp=True, assert_no_temp=True)
virtualenv_paths = virtualenv.path_locations(self.venv_path)
for id, path in zip(('venv', 'lib', 'include', 'bin'), virtualenv_paths):
setattr(self, id+'_path', Path(path))
setattr(self, id, relpath(self.root_path, path))
assert self.venv == TestPipEnvironment.venv # sanity check
self.site_packages = self.lib/'site-packages'
self.user_base_path = self.venv_path/'user'
self.user_site_path = self.venv_path/'user'/'lib'/self.lib.name/'site-packages'
self.user_site = relpath(self.root_path, self.user_site_path)
self.environ["PYTHONUSERBASE"] = self.user_base_path
# put the test-scratch virtualenv's bin dir first on the PATH
self.environ['PATH'] = Path.pathsep.join((self.bin_path, self.environ['PATH']))
self.use_distribute = os.environ.get('PIP_TEST_USE_DISTRIBUTE', False)
if self.root_path.exists:
rmtree(self.root_path)
if self.backup_path.exists:
shutil.copytree(self.backup_path, self.root_path, True)
else:
demand_dirs(self.venv_path)
demand_dirs(self.scratch_path)
# Create a virtualenv and remember where it's putting things.
create_virtualenv(self.venv_path, distribute=self.use_distribute)
demand_dirs(self.user_site_path)
# create easy-install.pth in user_site, so we always have it updated instead of created
open(self.user_site_path/'easy-install.pth', 'w').close()
# test that test-scratch virtualenv creation produced sensible venv python
result = self.run('python', '-c', 'import sys; print(sys.executable)')
pythonbin = result.stdout.strip()
if Path(pythonbin).noext != self.bin_path/'python':
raise RuntimeError(
"Oops! 'python' in our test environment runs %r"
" rather than expected %r" % (pythonbin, self.bin_path/'python'))
# make sure we have current setuptools to avoid svn incompatibilities
if not self.use_distribute:
install_setuptools(self)
# Uninstall whatever version of pip came with the virtualenv.
# Earlier versions of pip were incapable of
# self-uninstallation on Windows, so we use the one we're testing.
self.run('python', '-c',
'"import sys; sys.path.insert(0, %r); import pip; sys.exit(pip.main());"' % os.path.dirname(here),
'uninstall', '-vvv', '-y', 'pip')
# Install this version instead
self.run('python', 'setup.py', 'install', cwd=src_folder, expect_stderr=True)
shutil.copytree(self.root_path, self.backup_path, True)
self._use_cached_pypi_server()
assert self.root_path.exists
def __del__(self):
pass # shutil.rmtree(str(self.root_path), ignore_errors=True)
def run_pip(*args, **kw):
result = env.run('pip', *args, **kw)
ignore = []
for path, f in result.files_before.items():
# ignore updated directories, often due to .pyc or __pycache__
if (path in result.files_updated and
isinstance(result.files_updated[path], FoundDir)):
ignore.append(path)
for path in ignore:
del result.files_updated[path]
return result
def write_file(filename, text, dest=None):
"""Write a file in the dest (default=env.scratch_path)
"""
env = get_env()
if dest:
complete_path = dest/ filename
else:
complete_path = env.scratch_path/ filename
f = open(complete_path, 'w')
f.write(text)
f.close()
def mkdir(dirname):
os.mkdir(os.path.join(get_env().scratch_path, dirname))
def get_env():
if env is None:
reset_env()
return env
# FIXME ScriptTest does something similar, but only within a single
# ProcResult; this generalizes it so states can be compared across
# multiple commands. Maybe should be rolled into ScriptTest?
def diff_states(start, end, ignore=None):
"""
Differences two "filesystem states" as represented by dictionaries
of FoundFile and FoundDir objects.
Returns a dictionary with following keys:
``deleted``
Dictionary of files/directories found only in the start state.
``created``
Dictionary of files/directories found only in the end state.
``updated``
Dictionary of files whose size has changed (FIXME not entirely
reliable, but comparing contents is not possible because
FoundFile.bytes is lazy, and comparing mtime doesn't help if
we want to know if a file has been returned to its earlier
state).
Ignores mtime and other file attributes; only presence/absence and
size are considered.
"""
ignore = ignore or []
def prefix_match(path, prefix):
if path == prefix:
return True
prefix = prefix.rstrip(os.path.sep) + os.path.sep
return path.startswith(prefix)
start_keys = set([k for k in start.keys()
if not any([prefix_match(k, i) for i in ignore])])
end_keys = set([k for k in end.keys()
if not any([prefix_match(k, i) for i in ignore])])
deleted = dict([(k, start[k]) for k in start_keys.difference(end_keys)])
created = dict([(k, end[k]) for k in end_keys.difference(start_keys)])
updated = {}
for k in start_keys.intersection(end_keys):
if (start[k].size != end[k].size):
updated[k] = end[k]
return dict(deleted=deleted, created=created, updated=updated)
def assert_all_changes(start_state, end_state, expected_changes):
"""
Fails if anything changed that isn't listed in the
expected_changes.
start_state is either a dict mapping paths to
scripttest.[FoundFile|FoundDir] objects or a TestPipResult whose
files_before we'll test. end_state is either a similar dict or a
TestPipResult whose files_after we'll test.
Note: listing a directory means anything below
that directory can be expected to have changed.
"""
start_files = start_state
end_files = end_state
if isinstance(start_state, TestPipResult):
start_files = start_state.files_before
if isinstance(end_state, TestPipResult):
end_files = end_state.files_after
diff = diff_states(start_files, end_files, ignore=expected_changes)
if list(diff.values()) != [{}, {}, {}]:
raise TestFailure('Unexpected changes:\n' + '\n'.join(
[k + ': ' + ', '.join(v.keys()) for k, v in diff.items()]))
# Don't throw away this potentially useful information
return diff
def _create_test_package(env):
mkdir('version_pkg')
version_pkg_path = env.scratch_path/'version_pkg'
write_file('version_pkg.py', textwrap.dedent('''\
def main():
print('0.1')
'''), version_pkg_path)
write_file('setup.py', textwrap.dedent('''\
from setuptools import setup, find_packages
setup(name='version_pkg',
version='0.1',
packages=find_packages(),
py_modules=['version_pkg'],
entry_points=dict(console_scripts=['version_pkg=version_pkg:main']))
'''), version_pkg_path)
env.run('git', 'init', cwd=version_pkg_path)
env.run('git', 'add', '.', cwd=version_pkg_path)
env.run('git', 'commit', '-q',
'--author', 'Pip <python-virtualenv@googlegroups.com>',
'-am', 'initial version', cwd=version_pkg_path)
return version_pkg_path
def _change_test_package_version(env, version_pkg_path):
write_file('version_pkg.py', textwrap.dedent('''\
def main():
print("some different version")'''), version_pkg_path)
env.run('git', 'commit', '-q',
'--author', 'Pip <python-virtualenv@googlegroups.com>',
'-am', 'messed version',
cwd=version_pkg_path, expect_stderr=True)
if __name__ == '__main__':
sys.stderr.write("Run pip's tests using nosetests. Requires virtualenv, ScriptTest, and nose.\n")
sys.exit(1)
| mit |
aaossa/Dear-Notebooks | Web scraping/Download big files/download_big_files_asynchronous.py | 1 | 1220 | # https://community.nitrous.io/tutorials/asynchronous-programming-with-python-3
import aiohttp
import asyncio
import itertools
async def download(url, parts=16):
print("URL: {}".format(url))
async def get_partial_content(_url, _part, start, end):
print("Part {}/{} (Bytes {} to {})".format(_part, parts, start, end))
h = {"Range": "bytes={}-{}".format(start, end - 1 if end else "")}
async with aiohttp.get(_url, headers=h) as resp:
return _part, await resp.read()
async with aiohttp.head(url) as resp:
size = int(resp.headers["Content-Length"])
ranges = list(range(0, size, size // parts))
res, _ = await asyncio.wait(
[get_partial_content(url, i, start, end) for i, (start, end) in
enumerate(itertools.zip_longest(ranges, ranges[1:], fillvalue=""))])
sorted_result = sorted(task.result() for task in res)
return b"".join(data for _, data in sorted_result)
if __name__ == '__main__':
url = "http://github-images.s3.amazonaws.com/blog/2011/cc-wallpaper-desktop.png"
loop = asyncio.get_event_loop()
bs = loop.run_until_complete(download(url))
with open("test_async.png", "wb") as f:
f.write(bs)
| gpl-3.0 |
yawnosnorous/python-for-android | python-modules/twisted/twisted/words/xish/xpathparser.py | 54 | 19140 | # Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# See LICENSE for details.
# DO NOT EDIT xpathparser.py!
#
# It is generated from xpathparser.g using Yapps. Make needed changes there.
# This also means that the generated Python may not conform to Twisted's coding
# standards.
# HOWTO Generate me:
#
# 1.) Grab a copy of yapps2, version 2.1.1:
# http://theory.stanford.edu/~amitp/Yapps/
#
# Note: Do NOT use the package in debian/ubuntu as it has incompatible
# modifications.
#
# 2.) Generate the grammar:
#
# yapps2 xpathparser.g xpathparser.py.proto
#
# 3.) Edit the output to depend on the embedded runtime, not yappsrt.
#
# sed -e '/^import yapps/d' -e '/^[^#]/s/yappsrt\.//g' \
# xpathparser.py.proto > xpathparser.py
"""
XPath Parser.
Besides the parser code produced by Yapps, this module also defines the
parse-time exception classes, a scanner class, a base class for parsers
produced by Yapps, and a context class that keeps track of the parse stack.
These have been copied from the Yapps runtime.
"""
import sys, re
class SyntaxError(Exception):
"""When we run into an unexpected token, this is the exception to use"""
def __init__(self, charpos=-1, msg="Bad Token", context=None):
Exception.__init__(self)
self.charpos = charpos
self.msg = msg
self.context = context
def __str__(self):
if self.charpos < 0: return 'SyntaxError'
else: return 'SyntaxError@char%s(%s)' % (repr(self.charpos), self.msg)
class NoMoreTokens(Exception):
"""Another exception object, for when we run out of tokens"""
pass
class Scanner:
"""Yapps scanner.
The Yapps scanner can work in context sensitive or context
insensitive modes. The token(i) method is used to retrieve the
i-th token. It takes a restrict set that limits the set of tokens
it is allowed to return. In context sensitive mode, this restrict
set guides the scanner. In context insensitive mode, there is no
restriction (the set is always the full set of tokens).
"""
def __init__(self, patterns, ignore, input):
"""Initialize the scanner.
@param patterns: [(terminal, uncompiled regex), ...] or C{None}
@param ignore: [terminal,...]
@param input: string
If patterns is C{None}, we assume that the subclass has defined
C{self.patterns} : [(terminal, compiled regex), ...]. Note that the
patterns parameter expects uncompiled regexes, whereas the
C{self.patterns} field expects compiled regexes.
"""
self.tokens = [] # [(begin char pos, end char pos, token name, matched text), ...]
self.restrictions = []
self.input = input
self.pos = 0
self.ignore = ignore
self.first_line_number = 1
if patterns is not None:
# Compile the regex strings into regex objects
self.patterns = []
for terminal, regex in patterns:
self.patterns.append( (terminal, re.compile(regex)) )
def get_token_pos(self):
"""Get the current token position in the input text."""
return len(self.tokens)
def get_char_pos(self):
"""Get the current char position in the input text."""
return self.pos
def get_prev_char_pos(self, i=None):
"""Get the previous position (one token back) in the input text."""
if self.pos == 0: return 0
if i is None: i = -1
return self.tokens[i][0]
def get_line_number(self):
"""Get the line number of the current position in the input text."""
# TODO: make this work at any token/char position
return self.first_line_number + self.get_input_scanned().count('\n')
def get_column_number(self):
"""Get the column number of the current position in the input text."""
s = self.get_input_scanned()
i = s.rfind('\n') # may be -1, but that's okay in this case
return len(s) - (i+1)
def get_input_scanned(self):
"""Get the portion of the input that has been tokenized."""
return self.input[:self.pos]
def get_input_unscanned(self):
"""Get the portion of the input that has not yet been tokenized."""
return self.input[self.pos:]
def token(self, i, restrict=None):
"""Get the i'th token in the input.
If C{i} is one past the end, then scan for another token.
@param i: token index
@param restrict: [token, ...] or C{None}; if restrict is
C{None}, then any token is allowed. You may call
token(i) more than once. However, the restrict set
may never be larger than what was passed in on the
first call to token(i).
"""
if i == len(self.tokens):
self.scan(restrict)
if i < len(self.tokens):
# Make sure the restriction is more restricted. This
# invariant is needed to avoid ruining tokenization at
# position i+1 and higher.
if restrict and self.restrictions[i]:
for r in restrict:
if r not in self.restrictions[i]:
raise NotImplementedError("Unimplemented: restriction set changed")
return self.tokens[i]
raise NoMoreTokens()
def __repr__(self):
"""Print the last 10 tokens that have been scanned in"""
output = ''
for t in self.tokens[-10:]:
output = '%s\n (@%s) %s = %s' % (output,t[0],t[2],repr(t[3]))
return output
def scan(self, restrict):
"""Should scan another token and add it to the list, self.tokens,
and add the restriction to self.restrictions"""
# Keep looking for a token, ignoring any in self.ignore
while 1:
# Search the patterns for the longest match, with earlier
# tokens in the list having preference
best_match = -1
best_pat = '(error)'
for p, regexp in self.patterns:
# First check to see if we're ignoring this token
if restrict and p not in restrict and p not in self.ignore:
continue
m = regexp.match(self.input, self.pos)
if m and len(m.group(0)) > best_match:
# We got a match that's better than the previous one
best_pat = p
best_match = len(m.group(0))
# If we didn't find anything, raise an error
if best_pat == '(error)' and best_match < 0:
msg = 'Bad Token'
if restrict:
msg = 'Trying to find one of '+', '.join(restrict)
raise SyntaxError(self.pos, msg)
# If we found something that isn't to be ignored, return it
if best_pat not in self.ignore:
# Create a token with this data
token = (self.pos, self.pos+best_match, best_pat,
self.input[self.pos:self.pos+best_match])
self.pos = self.pos + best_match
# Only add this token if it's not in the list
# (to prevent looping)
if not self.tokens or token != self.tokens[-1]:
self.tokens.append(token)
self.restrictions.append(restrict)
return
else:
# This token should be ignored ..
self.pos = self.pos + best_match
class Parser:
"""Base class for Yapps-generated parsers.
"""
def __init__(self, scanner):
self._scanner = scanner
self._pos = 0
def _peek(self, *types):
"""Returns the token type for lookahead; if there are any args
then the list of args is the set of token types to allow"""
tok = self._scanner.token(self._pos, types)
return tok[2]
def _scan(self, type):
"""Returns the matched text, and moves to the next token"""
tok = self._scanner.token(self._pos, [type])
if tok[2] != type:
raise SyntaxError(tok[0], 'Trying to find '+type+' :'+ ' ,'.join(self._scanner.restrictions[self._pos]))
self._pos = 1 + self._pos
return tok[3]
class Context:
"""Class to represent the parser's call stack.
Every rule creates a Context that links to its parent rule. The
contexts can be used for debugging.
"""
def __init__(self, parent, scanner, tokenpos, rule, args=()):
"""Create a new context.
@param parent: Context object or C{None}
@param scanner: Scanner object
@param tokenpos: scanner token position
@type tokenpos: L{int}
@param rule: name of the rule
@type rule: L{str}
@param args: tuple listing parameters to the rule
"""
self.parent = parent
self.scanner = scanner
self.tokenpos = tokenpos
self.rule = rule
self.args = args
def __str__(self):
output = ''
if self.parent: output = str(self.parent) + ' > '
output += self.rule
return output
def print_line_with_pointer(text, p):
"""Print the line of 'text' that includes position 'p',
along with a second line with a single caret (^) at position p"""
# TODO: separate out the logic for determining the line/character
# location from the logic for determining how to display an
# 80-column line to stderr.
# Now try printing part of the line
text = text[max(p-80, 0):p+80]
p = p - max(p-80, 0)
# Strip to the left
i = text[:p].rfind('\n')
j = text[:p].rfind('\r')
if i < 0 or (0 <= j < i): i = j
if 0 <= i < p:
p = p - i - 1
text = text[i+1:]
# Strip to the right
i = text.find('\n', p)
j = text.find('\r', p)
if i < 0 or (0 <= j < i): i = j
if i >= 0:
text = text[:i]
# Now shorten the text
while len(text) > 70 and p > 60:
# Cut off 10 chars
text = "..." + text[10:]
p = p - 7
# Now print the string, along with an indicator
print >>sys.stderr, '> ',text
print >>sys.stderr, '> ',' '*p + '^'
def print_error(input, err, scanner):
"""Print error messages, the parser stack, and the input text -- for human-readable error messages."""
# NOTE: this function assumes 80 columns :-(
# Figure out the line number
line_number = scanner.get_line_number()
column_number = scanner.get_column_number()
print >>sys.stderr, '%d:%d: %s' % (line_number, column_number, err.msg)
context = err.context
if not context:
print_line_with_pointer(input, err.charpos)
while context:
# TODO: add line number
print >>sys.stderr, 'while parsing %s%s:' % (context.rule, tuple(context.args))
print_line_with_pointer(input, context.scanner.get_prev_char_pos(context.tokenpos))
context = context.parent
def wrap_error_reporter(parser, rule):
try:
return getattr(parser, rule)()
except SyntaxError, e:
input = parser._scanner.input
print_error(input, e, parser._scanner)
except NoMoreTokens:
print >>sys.stderr, 'Could not complete parsing; stopped around here:'
print >>sys.stderr, parser._scanner
from twisted.words.xish.xpath import AttribValue, BooleanValue, CompareValue
from twisted.words.xish.xpath import Function, IndexValue, LiteralValue
from twisted.words.xish.xpath import _AnyLocation, _Location
# Begin -- grammar generated by Yapps
import sys, re
class XPathParserScanner(Scanner):
patterns = [
('","', re.compile(',')),
('"@"', re.compile('@')),
('"\\)"', re.compile('\\)')),
('"\\("', re.compile('\\(')),
('"\\]"', re.compile('\\]')),
('"\\["', re.compile('\\[')),
('"//"', re.compile('//')),
('"/"', re.compile('/')),
('\\s+', re.compile('\\s+')),
('INDEX', re.compile('[0-9]+')),
('WILDCARD', re.compile('\\*')),
('IDENTIFIER', re.compile('[a-zA-Z][a-zA-Z0-9_\\-]*')),
('ATTRIBUTE', re.compile('\\@[a-zA-Z][a-zA-Z0-9_\\-]*')),
('FUNCNAME', re.compile('[a-zA-Z][a-zA-Z0-9_]*')),
('CMP_EQ', re.compile('\\=')),
('CMP_NE', re.compile('\\!\\=')),
('STR_DQ', re.compile('"([^"]|(\\"))*?"')),
('STR_SQ', re.compile("'([^']|(\\'))*?'")),
('OP_AND', re.compile('and')),
('OP_OR', re.compile('or')),
('END', re.compile('$')),
]
def __init__(self, str):
Scanner.__init__(self,None,['\\s+'],str)
class XPathParser(Parser):
Context = Context
def XPATH(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'XPATH', [])
PATH = self.PATH(_context)
result = PATH; current = result
while self._peek('END', '"/"', '"//"') != 'END':
PATH = self.PATH(_context)
current.childLocation = PATH; current = current.childLocation
if self._peek() not in ['END', '"/"', '"//"']:
raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['END', '"/"', '"//"']))
END = self._scan('END')
return result
def PATH(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'PATH', [])
_token = self._peek('"/"', '"//"')
if _token == '"/"':
self._scan('"/"')
result = _Location()
else: # == '"//"'
self._scan('"//"')
result = _AnyLocation()
_token = self._peek('IDENTIFIER', 'WILDCARD')
if _token == 'IDENTIFIER':
IDENTIFIER = self._scan('IDENTIFIER')
result.elementName = IDENTIFIER
else: # == 'WILDCARD'
WILDCARD = self._scan('WILDCARD')
result.elementName = None
while self._peek('"\\["', 'END', '"/"', '"//"') == '"\\["':
self._scan('"\\["')
PREDICATE = self.PREDICATE(_context)
result.predicates.append(PREDICATE)
self._scan('"\\]"')
if self._peek() not in ['"\\["', 'END', '"/"', '"//"']:
raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['"\\["', 'END', '"/"', '"//"']))
return result
def PREDICATE(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'PREDICATE', [])
_token = self._peek('INDEX', '"\\("', '"@"', 'FUNCNAME', 'STR_DQ', 'STR_SQ')
if _token != 'INDEX':
EXPR = self.EXPR(_context)
return EXPR
else: # == 'INDEX'
INDEX = self._scan('INDEX')
return IndexValue(INDEX)
def EXPR(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'EXPR', [])
FACTOR = self.FACTOR(_context)
e = FACTOR
while self._peek('OP_AND', 'OP_OR', '"\\)"', '"\\]"') in ['OP_AND', 'OP_OR']:
BOOLOP = self.BOOLOP(_context)
FACTOR = self.FACTOR(_context)
e = BooleanValue(e, BOOLOP, FACTOR)
if self._peek() not in ['OP_AND', 'OP_OR', '"\\)"', '"\\]"']:
raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['OP_AND', 'OP_OR', '"\\)"', '"\\]"']))
return e
def BOOLOP(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'BOOLOP', [])
_token = self._peek('OP_AND', 'OP_OR')
if _token == 'OP_AND':
OP_AND = self._scan('OP_AND')
return OP_AND
else: # == 'OP_OR'
OP_OR = self._scan('OP_OR')
return OP_OR
def FACTOR(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'FACTOR', [])
_token = self._peek('"\\("', '"@"', 'FUNCNAME', 'STR_DQ', 'STR_SQ')
if _token != '"\\("':
TERM = self.TERM(_context)
return TERM
else: # == '"\\("'
self._scan('"\\("')
EXPR = self.EXPR(_context)
self._scan('"\\)"')
return EXPR
def TERM(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'TERM', [])
VALUE = self.VALUE(_context)
t = VALUE
if self._peek('CMP_EQ', 'CMP_NE', 'OP_AND', 'OP_OR', '"\\)"', '"\\]"') in ['CMP_EQ', 'CMP_NE']:
CMP = self.CMP(_context)
VALUE = self.VALUE(_context)
t = CompareValue(t, CMP, VALUE)
return t
def VALUE(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'VALUE', [])
_token = self._peek('"@"', 'FUNCNAME', 'STR_DQ', 'STR_SQ')
if _token == '"@"':
self._scan('"@"')
IDENTIFIER = self._scan('IDENTIFIER')
return AttribValue(IDENTIFIER)
elif _token == 'FUNCNAME':
FUNCNAME = self._scan('FUNCNAME')
f = Function(FUNCNAME); args = []
self._scan('"\\("')
if self._peek('"\\)"', '"@"', 'FUNCNAME', '","', 'STR_DQ', 'STR_SQ') not in ['"\\)"', '","']:
VALUE = self.VALUE(_context)
args.append(VALUE)
while self._peek('","', '"\\)"') == '","':
self._scan('","')
VALUE = self.VALUE(_context)
args.append(VALUE)
if self._peek() not in ['","', '"\\)"']:
raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['","', '"\\)"']))
self._scan('"\\)"')
f.setParams(*args); return f
else: # in ['STR_DQ', 'STR_SQ']
STR = self.STR(_context)
return LiteralValue(STR[1:len(STR)-1])
def CMP(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'CMP', [])
_token = self._peek('CMP_EQ', 'CMP_NE')
if _token == 'CMP_EQ':
CMP_EQ = self._scan('CMP_EQ')
return CMP_EQ
else: # == 'CMP_NE'
CMP_NE = self._scan('CMP_NE')
return CMP_NE
def STR(self, _parent=None):
_context = self.Context(_parent, self._scanner, self._pos, 'STR', [])
_token = self._peek('STR_DQ', 'STR_SQ')
if _token == 'STR_DQ':
STR_DQ = self._scan('STR_DQ')
return STR_DQ
else: # == 'STR_SQ'
STR_SQ = self._scan('STR_SQ')
return STR_SQ
def parse(rule, text):
P = XPathParser(XPathParserScanner(text))
return wrap_error_reporter(P, rule)
if __name__ == '__main__':
from sys import argv, stdin
if len(argv) >= 2:
if len(argv) >= 3:
f = open(argv[2],'r')
else:
f = stdin
print parse(argv[1], f.read())
else: print >>sys.stderr, 'Args: <rule> [<filename>]'
# End -- grammar generated by Yapps
| apache-2.0 |
att-comdev/armada | armada/common/policies/service.py | 1 | 1640 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from armada.common.policies import base
armada_policies = [
policy.DocumentedRuleDefault(
name=base.ARMADA % 'create_endpoints',
check_str=base.RULE_ADMIN_REQUIRED,
description='Install manifest charts',
operations=[{'path': '/api/v1.0/apply/', 'method': 'POST'}]),
policy.DocumentedRuleDefault(
name=base.ARMADA % 'validate_manifest',
check_str=base.RULE_ADMIN_REQUIRED,
description='Validate manifest',
operations=[{'path': '/api/v1.0/validatedesign/', 'method': 'POST'}]),
policy.DocumentedRuleDefault(
name=base.ARMADA % 'test_release',
check_str=base.RULE_ADMIN_REQUIRED,
description='Test release',
operations=[{'path': '/api/v1.0/test/{release}', 'method': 'GET'}]),
policy.DocumentedRuleDefault(
name=base.ARMADA % 'test_manifest',
check_str=base.RULE_ADMIN_REQUIRED,
description='Test manifest',
operations=[{'path': '/api/v1.0/tests/', 'method': 'POST'}]),
]
def list_rules():
return armada_policies
| apache-2.0 |
adelina-t/nova | nova/tests/functional/v3/test_hypervisors.py | 15 | 2770 | # Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.compute import api as compute_api
from nova.tests.functional.v3 import api_sample_base
class HypervisorsSampleJsonTests(api_sample_base.ApiSampleTestBaseV3):
ADMIN_API = True
extension_name = "os-hypervisors"
def test_hypervisors_list(self):
response = self._do_get('os-hypervisors')
self._verify_response('hypervisors-list-resp', {}, response, 200)
def test_hypervisors_search(self):
response = self._do_get('os-hypervisors/fake/search')
self._verify_response('hypervisors-search-resp', {}, response, 200)
def test_hypervisors_servers(self):
response = self._do_get('os-hypervisors/fake/servers')
self._verify_response('hypervisors-servers-resp', {}, response, 200)
def test_hypervisors_detail(self):
hypervisor_id = 1
subs = {
'hypervisor_id': hypervisor_id
}
response = self._do_get('os-hypervisors/detail')
subs.update(self._get_regexes())
self._verify_response('hypervisors-detail-resp', subs, response, 200)
def test_hypervisors_show(self):
hypervisor_id = 1
subs = {
'hypervisor_id': hypervisor_id
}
response = self._do_get('os-hypervisors/%s' % hypervisor_id)
subs.update(self._get_regexes())
self._verify_response('hypervisors-show-resp', subs, response, 200)
def test_hypervisors_statistics(self):
response = self._do_get('os-hypervisors/statistics')
self._verify_response('hypervisors-statistics-resp', {}, response, 200)
def test_hypervisors_uptime(self):
def fake_get_host_uptime(self, context, hyp):
return (" 08:32:11 up 93 days, 18:25, 12 users, load average:"
" 0.20, 0.12, 0.14")
self.stubs.Set(compute_api.HostAPI,
'get_host_uptime', fake_get_host_uptime)
hypervisor_id = 1
response = self._do_get('os-hypervisors/%s/uptime' % hypervisor_id)
subs = {
'hypervisor_id': hypervisor_id,
}
self._verify_response('hypervisors-uptime-resp', subs, response, 200)
| apache-2.0 |
priomsrb/fixthecode | wsgi/fixthecode/wsgi.py | 1 | 1431 | """
WSGI config for fixthecode project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "fixthecode.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fixthecode.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| gpl-2.0 |
kingfisher1337/tns | tnslib/tebd.py | 1 | 14341 | import numpy as np
from numpy import dot, einsum
from numpy import tensordot as tdot
from scipy.optimize import minimize
import ctm
import gates
def _make_double_layer_tensor(a, D):
return einsum(a, [8,0,2,4,6], a.conj(), [8,1,3,5,7]).reshape([D**2]*4)
def _itebd_square_fu_singlebond(a, b, abg, env):
tdot(b, b.conj(), [0,0])
def _itebd_square_cost_fct(env, abg, p, D):
def cost_fct_impl(m):
m = m.reshape(D**6, p**2)
mH = m.conj().transpose()
return dot(env, dot(m,mH).reshape(D**12)) - 2.0 * np.real(dot(env, dot(abg,mH).reshape(D**12)))
return cost_fct_impl
def _itebd_square_fu_bond(p, D, a, b, g, env, err=1e-6, max_iterations=100):
"""
env6---+---+-----+
| | | |
| 0 1 |
+--5 2--+
| 4 3 |
| | | |
+-----+---+-----+
"""
idmat = np.identity(D, dtype=float)
envVec = env.reshape(D**12)
abg = einsum(a, [9,0,8,4,5], b, [10,1,2,3,8], g, [9,10,6,7])
b2 = b
d3 = None
for j in xrange(max_iterations):
d2 = None
for j2 in xrange(max_iterations):
S = tdot(
envVec,
einsum(
tdot(abg, b2.conj(), [7,0]), [0,2,4,6,8,10,12,3,5,7,14],
idmat, [1,13],
idmat, [9,15],
idmat, [11,16]
).reshape(D**12,p,D,D,D,D),
[0,0]
).reshape(p*D**4)
R = einsum(
tdot(_make_double_layer_tensor(b2,D), env, [[0,1,2],[1,2,3]]).reshape([D]*8), [2,7,1,6,3,8,4,9],
idmat, [0,5]).reshape([p*D**4]*2)
a2vec = np.linalg.lstsq(R, S)[0]
a2 = a2vec.reshape(p,D,D,D,D)
d = dot(a2vec.conj(), dot(R, a2vec)) - 2.0 * np.real(dot(a2vec.conj(), S))
if d2 is not None and np.abs(d-d2) < err:
break
d2 = d
d2 = None
for j2 in xrange(max_iterations):
S = tdot(
envVec,
einsum(
tdot(abg, a2.conj(), [6,0]), [0,2,4,6,8,10,12,1,16,9,11],
idmat, [3,13],
idmat, [5,14],
idmat, [7,15]
).reshape(D**12,p,D,D,D,D),
[0,0]
).reshape(p*D**4)
R = einsum(
tdot(_make_double_layer_tensor(a2,D), env, [[0,2,3],[0,4,5]]).reshape([D]*8), [4,9,1,6,2,7,3,8],
idmat, [0,5]).reshape([p*D**4]*2)
b2vec = np.linalg.lstsq(R, S)[0]
b2 = b2vec.reshape(p,D,D,D,D)
d = dot(b2vec.conj(), dot(R, b2vec)) - 2.0 * np.real(dot(b2vec.conj(), S))
if d2 is not None and np.abs(d-d2) < err:
break
d2 = d
if d3 is not None and np.abs(d-d3) < err:
break
d3 = d
return a2, b2
def itebd_square(a, b, gx, gy, chi, ctmrgerr=1e-6, ctmrg_max_iterations=1000000, tebd_max_iterations=1000000, tebd_update_err=1e-5, tebd_update_max_iterations=100, env=None):
p, D = a.shape[:2]
kronecker = np.fromfunction(np.vectorize(lambda j,k: 1. if j==k else 0), (p,p), dtype=int)
gx2 = gx.swapaxes(0,1).swapaxes(2,3)
gy2 = gy.swapaxes(0,1).swapaxes(2,3)
aDL = _make_double_layer_tensor(a, D)
bDL = _make_double_layer_tensor(b, D)
mz = None
for j in xrange(tebd_max_iterations):
#if j % 100 == 0:
#print "[itebd_square] {:d} iterations done".format(j)
env2 = env
mz2 = mz
env, env2, err, num_iterations = ctm.ctmrg_square_2x2(aDL, bDL, chi, err=ctmrgerr, env=env2, iteration_bunch=10)
xDL = einsum(einsum(a, [5,1,2,3,4], gates.sigmaz, [0,5]), [9,0,2,4,8], a.conj(), [9,1,3,5,7]).reshape(D**8)
e = env.toarray1x1a(aDL, bDL).reshape(D**8)
mz = dot(e, xDL) / dot(e, aDL.reshape(D**8))
#if j % 10 == 0:
if mz2 is not None:
#print "[itebd_square] mz estimate: {:.15e}; err: {:.15e}".format(mz, np.abs(mz-mz2))
if np.abs(mz-mz2) < 1e-6:
break
a, b = _itebd_square_fu_bond(p, D, a, b, gx, env.toarray1x2ab(aDL,bDL))
b, a = _itebd_square_fu_bond(p, D, np.rollaxis(b,1,5), np.rollaxis(a,1,5), gy2, np.rollaxis(env.toarray2x1ab(aDL,bDL),0,6))
a, b = np.rollaxis(a,4,1), np.rollaxis(b,4,1)
b, a = _itebd_square_fu_bond(p, D, b, a, gx2, env.toarray1x2ba(aDL,bDL))
a, b = _itebd_square_fu_bond(p, D, np.rollaxis(a,1,5), np.rollaxis(b,1,5), gy, np.rollaxis(env.toarray2x1ba(aDL,bDL),0,6))
a, b = np.rollaxis(a,4,1), np.rollaxis(b,4,1)
"""
abgx = tdot(einsum(a, [6,0,8,4,5], b, [7,1,2,3,8]), gx, [[6,7],[0,1]])
bagx = tdot(einsum(b, [6,0,8,4,5], a, [7,1,2,3,8]), gx, [[6,7],[0,1]])
abgy = tdot(einsum(a, [7,8,2,3,4], b, [6,0,1,8,5]), gy, [[6,7],[0,1]])
bagy = tdot(einsum(b, [7,8,2,3,4], a, [6,0,1,8,5]), gy, [[6,7],[0,1]])
d2 = None
e6 = env.toarray1x2ab(aDL, bDL)
e12 = e6.reshape([D]*12)
for k in xrange(tebd_update_max_iterations): # a-right b-left
bDL2 = _make_double_layer_tensor(b, D)
R = einsum(kronecker, [0,5], tdot(bDL2, e6, [[0,1,2],[1,2,3]]).swapaxes(0,1).reshape([D]*8), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,1,6,11,7,12,8,13,9,3,10,4], tdot(abgx, b.conj(), [7,0]), [5,6,7,8,9,10,0,11,12,13,2]).reshape(p*D**4)
a = np.linalg.lstsq(R, S)[0].reshape(p,D,D,D,D)
aDL2 = _make_double_layer_tensor(a, D)
R = einsum(kronecker, [0,5], tdot(e6, aDL2, [[0,4,5],[0,2,3]]).reshape([D]*8), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,11,6,1,7,2,8,3,9,12,10,13], tdot(abgx, a.conj(), [6,0]), [5,6,7,8,9,10,0,11,4,12,13]).reshape(p*D**4)
bvec = np.linalg.lstsq(R, S)[0]
b = bvec.reshape(p,D,D,D,D)
d = dot(bvec.conj(), dot(R, bvec)) - 2.0 * np.real(dot(bvec.conj(), S))
if d2 is not None:
if np.abs(d-d2) < tebd_update_err:
break
d2 = d
d2 = None
e6 = env.toarray2x1ab(aDL, bDL)
e12 = e6.reshape([D]*12)
for k in xrange(tebd_update_max_iterations): # update a-up b-down
bDL2 = _make_double_layer_tensor(b, D)
R = einsum(kronecker, [0,5], tdot(bDL2, e6, [[0,1,3],[0,1,5]]).reshape([D]*8), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,11,6,12,7,2,8,3,9,4,10,13], tdot(abgy, b.conj(), [6,0]), [5,6,7,8,9,10,0,11,12,1,13]).reshape(p*D**4)
a = np.linalg.lstsq(R, S)[0].reshape(p,D,D,D,D)
aDL2 = _make_double_layer_tensor(a, D)
R = einsum(kronecker, [0,5], tdot(e6, aDL2, [[2,3,4],[1,2,3]]).swapaxes(2,3).reshape([D]*8), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,1,6,2,7,11,8,12,9,13,10,4], tdot(abgy, a.conj(), [7,0]), [5,6,7,8,9,10,0,3,11,12,13]).reshape(p*D**4)
bvec = np.linalg.lstsq(R, S)[0]
b = bvec.reshape(p,D,D,D,D)
d = dot(bvec.conj(), dot(R, bvec)) - 2.0 * np.real(dot(bvec.conj(), S))
if d2 is not None:
if np.abs(d-d2) < tebd_update_err:
break
d2 = d
d2 = None
e6 = env.toarray1x2ba(aDL, bDL)
e12 = e6.reshape([D]*12)
for k in xrange(tebd_update_max_iterations): # b-right a-left
bDL2 = _make_double_layer_tensor(b, D)
R = einsum(kronecker, [0,5], tdot(e6, bDL2, [[0,4,5],[0,2,3]]).reshape([D]*8), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,11,6,1,7,2,8,3,9,12,10,13], tdot(bagx, b.conj(), [6,0]), [5,6,7,8,9,10,0,11,4,12,13]).reshape(p*D**4)
a = np.linalg.lstsq(R, S)[0].reshape(p,D,D,D,D)
aDL2 = _make_double_layer_tensor(a, D)
R = einsum(kronecker, [0,5], tdot(aDL2, e6, [[0,1,2],[1,2,3]]).swapaxes(0,1).reshape([D]*8), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,1,6,11,7,12,8,13,9,3,10,4], tdot(bagx, a.conj(), [7,0]), [5,6,7,8,9,10,0,11,12,13,2]).reshape(p*D**4)
bvec = np.linalg.lstsq(R, S)[0]
b = bvec.reshape(p,D,D,D,D)
d = dot(bvec.conj(), dot(R, bvec)) - 2.0 * np.real(dot(bvec.conj(), S))
if d2 is not None:
#print "[tebd_square] {:d} iterations done for b-right a-left; cost fct err: {:.15e}".format(k,np.abs(d-d2))
if np.abs(d-d2) < tebd_update_err:
break
d2 = d
d2 = None
e6 = env.toarray2x1ba(aDL, bDL)
e12 = e6.reshape([D]*12)
for k in xrange(tebd_update_max_iterations): # b-up a-down
bDL2 = _make_double_layer_tensor(b, D)
R = einsum(kronecker, [0,5], tdot(e6, bDL2, [[2,3,4],[1,2,3]]).reshape([D]*8).swapaxes(2,3), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,1,6,2,7,11,8,12,9,13,10,4], tdot(bagy, b.conj(), [7,0]), [5,6,7,8,9,10,0,3,11,12,13]).reshape(p*D**4)
a = np.linalg.lstsq(R, S)[0].reshape(p,D,D,D,D)
aDL2 = _make_double_layer_tensor(a, D)
R = einsum(kronecker, [0,5], tdot(aDL2, e6, [[0,1,3],[0,1,5]]).reshape([D]*8), [1,6,2,7,3,8,4,9]).reshape([p*D**4]*2)
S = einsum(e12, [5,11,6,12,7,2,8,3,9,4,10,13], tdot(bagy, a.conj(), [6,0]), [5,6,7,8,9,10,0,11,12,1,13]).reshape(p*D**4)
bvec = np.linalg.lstsq(R, S)[0]
b = bvec.reshape(p,D,D,D,D)
d = dot(bvec.conj(), dot(R, bvec)) - 2.0 * np.real(dot(bvec.conj(), S))
if d2 is not None:
#print "[tebd_square] {:d} iterations done for b-up a-down; cost fct err: {:.15e}".format(k,np.abs(d-d2))
if np.abs(d-d2) < tebd_update_err:
break
d2 = d
"""
a /= np.max(np.abs(a))
b /= np.max(np.abs(b))
aDL = _make_double_layer_tensor(a, D)
bDL = _make_double_layer_tensor(b, D)
return a, b, env
def _itebd_square_pepo_invsymm_cost(R, S, D, kappa):
def _itebd_square_pepo_invsymm_cost_impl(U):
U = U.reshape(kappa*D, D)
U2 = tdot(U, U, [1,1])
return einsum(R, [0,1,2,3], U2, [0,1], U2, [2,3]) - 2.0 * einsum(S, [0,1], U2, [0,1])
return _itebd_square_pepo_invsymm_cost_impl
def _init_downscaling_costfct(a, a2, D, kappa):
def _init_downscaling_costfct_impl(x):
U = x[:kappa*D**2].reshape(kappa*D, D)
V = x[kappa*D**2:].reshape(kappa*D, D)
aTest = einsum(a2, [0,5,6,7,8], V, [5,1], U, [6,2], V, [7,3], U, [8,4])
return np.sum(np.abs(a - aTest))
return _init_downscaling_costfct_impl
def itebd_square_pepo_invsymm(a, g, chi, env=None):
if np.sum(np.abs(a - a.swapaxes(1,3))) > 1e-15:
raise ValueError("given iPEPS is not invariant under spatial inversion")
if np.sum(np.abs(a - a.swapaxes(2,4))) > 1e-15:
raise ValueError("given iPEPS is not invariant under spatial inversion")
if np.sum(np.abs(g - g.swapaxes(2,4))) > 1e-15:
raise ValueError("given iPEPO is not invariant under spatial inversion")
if np.sum(np.abs(g - g.swapaxes(3,5))) > 1e-15:
raise ValueError("given iPEPO is not invariant under spatial inversion")
p, D = a.shape[:2]
kappa = g.shape[1]
mz = None
U2 = V2 = np.fromfunction(np.vectorize(lambda j,k: 1. if j==k else 0), (kappa*D,D), dtype=int)
a2 = einsum(a, [9,1,3,5,7], g, [9,0,2,4,6,8]).reshape([p] + [kappa*D]*4)
a3 = einsum(a2, [0,5,6,7,8], V2, [5,1], U2, [6,2], V2, [7,3], U2, [8,4])
print a-a3
print np.max(np.abs(a-a3))
exit()
x = minimize(_init_downscaling_costfct(a, a2, D, kappa), np.concatenate([U2.flatten(), V2.flatten()]))
U2 = x.x[:kappa*D**2].reshape(kappa*D, D)
V2 = x.x[kappa*D**2:].reshape(kappa*D, D)
print x
#print U
#print V
exit()
for j in xrange(5):
aDL = _make_double_layer_tensor(a, D)
env, env2, err, num_iterations = ctm.ctmrg_square_1x1_invsymm(aDL, chi, env=env, verbose=True)
xDL = einsum(einsum(a, [5,1,2,3,4], gates.sigmaz, [0,5]), [9,0,2,4,8], a.conj(), [9,1,3,5,7]).reshape(D**8)
e = env.toarray1x1().reshape(D**8)
mz, mz2 = dot(e, xDL) / dot(e, aDL.reshape(D**8)), mz
if mz2 is not None:
print "[itebd_square_pepo_invsymm] mz estimate: {:.15e}; err: {:.15e}".format(mz, np.abs(mz-mz2))
if np.abs(mz-mz2) < 1e-6:
break
a2 = einsum(a, [9,1,3,5,7], g, [9,0,2,4,6,8]).reshape([p] + [kappa*D]*4)
e = env.toarray1x2()
a2L = einsum(a2, [0,5,2,6,4], V2, [5,1], V2, [6,3])
a2R = einsum(a2L, [0,1,5,3,4], U2, [5,2]).reshape(p,D,kappa*D,D,D)
a2L = einsum(a2L, [0,1,2,3,5], U2, [5,4]).reshape(p,D,D,D,kappa*D)
a2L = einsum(a2L, [8,0,2,4,6], a2L, [8,1,3,5,7]).reshape(D**2,(kappa*D)**2,D**2,D**2)
a2R = einsum(a2R, [8,0,2,4,6], a2R, [8,1,3,5,7]).reshape(D**2,D**2,D**2,(kappa*D)**2)
R = einsum(einsum(e, [4,1,2,3,5,6], a2L, [4,0,5,6]), [0,2,3,4], a2R, [2,3,4,1]).reshape([kappa*D]*4).swapaxes(1,2)
S = einsum(R, [2,2,0,1])
U = minimize(_itebd_square_pepo_invsymm_cost(R,S,D,kappa), U2.reshape(kappa*D**2)).x.reshape(kappa*D, D)
#print U
e = env.toarray2x1()
a2U = einsum(a2, [0,1,5,3,6], U2, [5,2], U2, [6,4])
a2D = einsum(a2U, [0,1,2,5,4], V2, [5,3])
a2U = einsum(a2U, [0,5,2,3,4], V2, [5,1])
a2U = einsum(a2U, [8,0,2,4,6], a2U, [8,1,3,5,7]).reshape(D**2,D**2,(kappa*D)**2,D**2)
a2D = einsum(a2D, [8,0,2,4,6], a2D, [8,1,3,5,7]).reshape((kappa*D)**2,D**2,D**2,D**2)
R = einsum(e, [2,3,4,5,6,7], a2U, [2,3,0,7], a2D, [1,4,5,6]).reshape([kappa*D]*4).swapaxes(1,2)
S = einsum(R, [2,2,0,1])
V = minimize(_itebd_square_pepo_invsymm_cost(R,S,D,kappa), V2.reshape(kappa*D**2)).x.reshape(kappa*D, D)
#print V
a = einsum(a2, [0,5,6,7,8], V, [5,1], U, [6,2], V, [7,3], U, [8,4])
U2,V2 = U,V
print a
return a, env, j+1
| gpl-3.0 |
indictranstech/fbd_erpnext | erpnext/accounts/report/gst_sales_details/gst_sales_details.py | 1 | 1362 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, getdate, cstr
from frappe import _
from datetime import date, timedelta
def execute(filters=None):
columns, res = [], []
validate_filters(filters)
columns = get_columns()
res = get_result(filters)
return columns , res
def validate_filters(filters):
if filters.from_date > filters.to_date:
frappe.throw(_("To Date must be greater than From Date"))
def get_columns():
return [_("Customer Name") + ":Link/Customer:180",_("Date") + ":Date:120", _("ID") + ":Link/Sales Invoice:120",
_("Rate") + ":Float:120", _("Sales Value") + ":Float:120", _("GST Collected") + ":Float:150",
_("Gross") + ":Float:150"]
def get_result(filters):
data = []
conditions = get_conditions(filters)
data = frappe.db.sql("""select customer_name, date, form_id, output_rate, output_sales_value, output_gst_collected,
output_gross from `tabGST Details` where gst_type = '-GST Output' %s """ % conditions, as_list=1)
return data
def get_conditions(filters):
conditions = ""
if filters.get("from_date"): conditions += " and date between '%s'" %filters["from_date"]
if filters.get("to_date"): conditions += " and '%s'" %filters["to_date"]
return conditions
| agpl-3.0 |
guorendong/iridium-browser-ubuntu | third_party/mojo/src/mojo/public/tools/bindings/pylib/mojom/parse/parser.py | 32 | 13000 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a syntax tree from a Mojo IDL file."""
import imp
import os.path
import sys
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
path = os.path.abspath(__file__)
while True:
path, tail = os.path.split(path)
assert tail
if tail == dirname:
return path
try:
imp.find_module("ply")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove("public"), "public/third_party"))
from ply import lex
from ply import yacc
from ..error import Error
from . import ast
from .lexer import Lexer
_MAX_ORDINAL_VALUE = 0xffffffff
_MAX_ARRAY_SIZE = 0xffffffff
class ParseError(Error):
"""Class for errors from the parser."""
def __init__(self, filename, message, lineno=None, snippet=None):
Error.__init__(self, filename, message, lineno=lineno,
addenda=([snippet] if snippet else None))
# We have methods which look like they could be functions:
# pylint: disable=R0201
class Parser(object):
def __init__(self, lexer, source, filename):
self.tokens = lexer.tokens
self.source = source
self.filename = filename
# Names of functions
#
# In general, we name functions after the left-hand-side of the rule(s) that
# they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
#
# There may be multiple functions handling rules for the same left-hand-side;
# then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
# where N is a number (numbered starting from 1). Note that using multiple
# functions is actually more efficient than having single functions handle
# multiple rules (and, e.g., distinguishing them by examining |len(p)|).
#
# It's also possible to have a function handling multiple rules with different
# left-hand-sides. We do not do this.
#
# See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
# TODO(vtl): Get rid of the braces in the module "statement". (Consider
# renaming "module" -> "package".) Then we'll be able to have a single rule
# for root (by making module "optional").
def p_root_1(self, p):
"""root : """
p[0] = ast.Mojom(None, ast.ImportList(), [])
def p_root_2(self, p):
"""root : root module"""
if p[1].module is not None:
raise ParseError(self.filename,
"Multiple \"module\" statements not allowed:",
p[2].lineno, snippet=self._GetSnippet(p[2].lineno))
if p[1].import_list.items or p[1].definition_list:
raise ParseError(
self.filename,
"\"module\" statements must precede imports and definitions:",
p[2].lineno, snippet=self._GetSnippet(p[2].lineno))
p[0] = p[1]
p[0].module = p[2]
def p_root_3(self, p):
"""root : root import"""
if p[1].definition_list:
raise ParseError(self.filename,
"\"import\" statements must precede definitions:",
p[2].lineno, snippet=self._GetSnippet(p[2].lineno))
p[0] = p[1]
p[0].import_list.Append(p[2])
def p_root_4(self, p):
"""root : root definition"""
p[0] = p[1]
p[0].definition_list.append(p[2])
def p_import(self, p):
"""import : IMPORT STRING_LITERAL SEMI"""
# 'eval' the literal to strip the quotes.
# TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
p[0] = ast.Import(eval(p[2]), filename=self.filename, lineno=p.lineno(2))
def p_module(self, p):
"""module : attribute_section MODULE identifier_wrapped SEMI"""
p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
def p_definition(self, p):
"""definition : struct
| union
| interface
| enum
| const"""
p[0] = p[1]
def p_attribute_section_1(self, p):
"""attribute_section : """
p[0] = None
def p_attribute_section_2(self, p):
"""attribute_section : LBRACKET attribute_list RBRACKET"""
p[0] = p[2]
def p_attribute_list_1(self, p):
"""attribute_list : """
p[0] = ast.AttributeList()
def p_attribute_list_2(self, p):
"""attribute_list : nonempty_attribute_list"""
p[0] = p[1]
def p_nonempty_attribute_list_1(self, p):
"""nonempty_attribute_list : attribute"""
p[0] = ast.AttributeList(p[1])
def p_nonempty_attribute_list_2(self, p):
"""nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
p[0] = p[1]
p[0].Append(p[3])
def p_attribute(self, p):
"""attribute : NAME EQUALS evaled_literal
| NAME EQUALS NAME"""
p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
def p_evaled_literal(self, p):
"""evaled_literal : literal"""
# 'eval' the literal to strip the quotes.
p[0] = eval(p[1])
def p_struct(self, p):
"""struct : attribute_section STRUCT NAME LBRACE struct_body RBRACE SEMI"""
p[0] = ast.Struct(p[3], p[1], p[5])
def p_struct_body_1(self, p):
"""struct_body : """
p[0] = ast.StructBody()
def p_struct_body_2(self, p):
"""struct_body : struct_body const
| struct_body enum
| struct_body struct_field"""
p[0] = p[1]
p[0].Append(p[2])
def p_struct_field(self, p):
"""struct_field : attribute_section typename NAME ordinal default SEMI"""
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
def p_union(self, p):
"""union : attribute_section UNION NAME LBRACE union_body RBRACE SEMI"""
p[0] = ast.Union(p[3], p[1], p[5])
def p_union_body_1(self, p):
"""union_body : """
p[0] = ast.UnionBody()
def p_union_body_2(self, p):
"""union_body : union_body union_field"""
p[0] = p[1]
p[1].Append(p[2])
def p_union_field(self, p):
"""union_field : attribute_section typename NAME ordinal SEMI"""
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
def p_default_1(self, p):
"""default : """
p[0] = None
def p_default_2(self, p):
"""default : EQUALS constant"""
p[0] = p[2]
def p_interface(self, p):
"""interface : attribute_section INTERFACE NAME LBRACE interface_body \
RBRACE SEMI"""
p[0] = ast.Interface(p[3], p[1], p[5])
def p_interface_body_1(self, p):
"""interface_body : """
p[0] = ast.InterfaceBody()
def p_interface_body_2(self, p):
"""interface_body : interface_body const
| interface_body enum
| interface_body method"""
p[0] = p[1]
p[0].Append(p[2])
def p_response_1(self, p):
"""response : """
p[0] = None
def p_response_2(self, p):
"""response : RESPONSE LPAREN parameter_list RPAREN"""
p[0] = p[3]
def p_method(self, p):
"""method : attribute_section NAME ordinal LPAREN parameter_list RPAREN \
response SEMI"""
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
def p_parameter_list_1(self, p):
"""parameter_list : """
p[0] = ast.ParameterList()
def p_parameter_list_2(self, p):
"""parameter_list : nonempty_parameter_list"""
p[0] = p[1]
def p_nonempty_parameter_list_1(self, p):
"""nonempty_parameter_list : parameter"""
p[0] = ast.ParameterList(p[1])
def p_nonempty_parameter_list_2(self, p):
"""nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
p[0] = p[1]
p[0].Append(p[3])
def p_parameter(self, p):
"""parameter : attribute_section typename NAME ordinal"""
p[0] = ast.Parameter(p[3], p[1], p[4], p[2],
filename=self.filename, lineno=p.lineno(3))
def p_typename(self, p):
"""typename : nonnullable_typename QSTN
| nonnullable_typename"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = p[1] + "?"
def p_nonnullable_typename(self, p):
"""nonnullable_typename : basictypename
| array
| fixed_array
| associative_array
| interfacerequest"""
p[0] = p[1]
def p_basictypename(self, p):
"""basictypename : identifier
| handletype"""
p[0] = p[1]
def p_handletype(self, p):
"""handletype : HANDLE
| HANDLE LANGLE NAME RANGLE"""
if len(p) == 2:
p[0] = p[1]
else:
if p[3] not in ('data_pipe_consumer',
'data_pipe_producer',
'message_pipe',
'shared_buffer'):
# Note: We don't enable tracking of line numbers for everything, so we
# can't use |p.lineno(3)|.
raise ParseError(self.filename, "Invalid handle type %r:" % p[3],
lineno=p.lineno(1),
snippet=self._GetSnippet(p.lineno(1)))
p[0] = "handle<" + p[3] + ">"
def p_array(self, p):
"""array : ARRAY LANGLE typename RANGLE"""
p[0] = p[3] + "[]"
def p_fixed_array(self, p):
"""fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
value = int(p[5])
if value == 0 or value > _MAX_ARRAY_SIZE:
raise ParseError(self.filename, "Fixed array size %d invalid:" % value,
lineno=p.lineno(5),
snippet=self._GetSnippet(p.lineno(5)))
p[0] = p[3] + "[" + p[5] + "]"
def p_associative_array(self, p):
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
p[0] = p[5] + "{" + p[3] + "}"
def p_interfacerequest(self, p):
"""interfacerequest : identifier AMP"""
p[0] = p[1] + "&"
def p_ordinal_1(self, p):
"""ordinal : """
p[0] = None
def p_ordinal_2(self, p):
"""ordinal : ORDINAL"""
value = int(p[1][1:])
if value > _MAX_ORDINAL_VALUE:
raise ParseError(self.filename, "Ordinal value %d too large:" % value,
lineno=p.lineno(1),
snippet=self._GetSnippet(p.lineno(1)))
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
def p_enum(self, p):
"""enum : attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
RBRACE SEMI
| attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
COMMA RBRACE SEMI"""
p[0] = ast.Enum(p[3], p[1], p[5], filename=self.filename,
lineno=p.lineno(2))
def p_nonempty_enum_value_list_1(self, p):
"""nonempty_enum_value_list : enum_value"""
p[0] = ast.EnumValueList(p[1])
def p_nonempty_enum_value_list_2(self, p):
"""nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
p[0] = p[1]
p[0].Append(p[3])
def p_enum_value(self, p):
"""enum_value : attribute_section NAME
| attribute_section NAME EQUALS int
| attribute_section NAME EQUALS identifier_wrapped"""
p[0] = ast.EnumValue(p[2], p[1], p[4] if len(p) == 5 else None,
filename=self.filename, lineno=p.lineno(2))
def p_const(self, p):
"""const : CONST typename NAME EQUALS constant SEMI"""
p[0] = ast.Const(p[3], p[2], p[5])
def p_constant(self, p):
"""constant : literal
| identifier_wrapped"""
p[0] = p[1]
def p_identifier_wrapped(self, p):
"""identifier_wrapped : identifier"""
p[0] = ('IDENTIFIER', p[1])
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
def p_identifier(self, p):
"""identifier : NAME
| NAME DOT identifier"""
p[0] = ''.join(p[1:])
def p_literal(self, p):
"""literal : int
| float
| TRUE
| FALSE
| DEFAULT
| STRING_LITERAL"""
p[0] = p[1]
def p_int(self, p):
"""int : int_const
| PLUS int_const
| MINUS int_const"""
p[0] = ''.join(p[1:])
def p_int_const(self, p):
"""int_const : INT_CONST_DEC
| INT_CONST_HEX"""
p[0] = p[1]
def p_float(self, p):
"""float : FLOAT_CONST
| PLUS FLOAT_CONST
| MINUS FLOAT_CONST"""
p[0] = ''.join(p[1:])
def p_error(self, e):
if e is None:
# Unexpected EOF.
# TODO(vtl): Can we figure out what's missing?
raise ParseError(self.filename, "Unexpected end of file")
raise ParseError(self.filename, "Unexpected %r:" % e.value, lineno=e.lineno,
snippet=self._GetSnippet(e.lineno))
def _GetSnippet(self, lineno):
return self.source.split('\n')[lineno - 1]
def Parse(source, filename):
lexer = Lexer(filename)
parser = Parser(lexer, source, filename)
lex.lex(object=lexer)
yacc.yacc(module=parser, debug=0, write_tables=0)
tree = yacc.parse(source)
return tree
| bsd-3-clause |
niboshi/chainer | tests/chainer_tests/functions_tests/array_tests/test_concat.py | 8 | 3200 | import unittest
import numpy
from chainer import functions
from chainer import testing
from chainer.testing import backend
@backend.inject_backend_tests(
None,
# CPU tests
testing.product({
'use_cuda': [False],
'use_ideep': ['never', 'always'],
})
# GPU tests
+ [{'use_cuda': True}]
# ChainerX tests
+ [
{'use_chainerx': True, 'chainerx_device': 'native:0'},
{'use_chainerx': True, 'chainerx_device': 'cuda:0'},
])
@testing.parameterize(*testing.product_dict(
[
{'shape': (2, 7, 3), 'axis': 1,
'slices': [(slice(None), slice(None, 2)), (slice(None), slice(2, 5)),
(slice(None), slice(5, None))]},
{'shape': (7, 3), 'axis': 0,
'slices': [slice(None, 2), slice(2, 5), slice(5, None)]},
{'shape': (2,), 'axis': 0, 'slices': [slice(None, 1), slice(1, None)]},
{'shape': (2,), 'axis': 0, 'slices': [()]},
{'shape': (2, 7, 3), 'axis': 1,
'slices': [(slice(None), slice(None, 2)), (slice(None), slice(2, 5)),
(slice(None), slice(5, None))]},
{'shape': (2, 7, 3), 'axis': 1,
'slices': [(slice(None), slice(None, 2)), (slice(None), slice(2, 5)),
(slice(None), slice(5, None))]},
{'shape': (2, 7, 3), 'axis': -2,
'slices': [(slice(None), slice(None, 2)), (slice(None), slice(2, 5)),
(slice(None), slice(5, None))]},
{'shape': (7, 3, 2, 2), 'axis': 0,
'slices': [slice(None, 2), slice(2, 5), slice(5, None)]},
{'shape': (2, 7, 3, 5), 'axis': 1,
'slices': [(slice(None), slice(None, 2), slice(None)),
(slice(None), slice(2, 5), slice(None)),
(slice(None), slice(5, None), slice(None))]},
{'shape': (2, 7, 3, 5), 'axis': -1,
'slices': [(slice(None), slice(None), slice(None), slice(None, 2)),
(slice(None), slice(None), slice(None), slice(2, 3)),
(slice(None), slice(None), slice(None), slice(3, None))]},
{'shape': (2, 7, 3, 5), 'axis': -3,
'slices': [(slice(None), slice(None, 2), slice(None)),
(slice(None), slice(2, 5), slice(None)),
(slice(None), slice(5, None), slice(None))]},
],
[
{'dtype': numpy.float16},
{'dtype': numpy.float32},
{'dtype': numpy.float64},
],
))
class TestConcat(testing.FunctionTestCase):
def generate_inputs(self):
shape = self.shape
dtype = self.dtype
y = numpy.random.uniform(-1, 1, shape).astype(dtype)
xs = tuple([y[s] for s in self.slices])
return xs
def forward(self, inputs, device):
y = functions.concat(inputs, self.axis)
return y,
def forward_expected(self, inputs):
y = numpy.concatenate(inputs, self.axis)
return y,
class TestConcatInvalidAxisType(unittest.TestCase):
def test_invlaid_axis_type(self):
inputs = [numpy.random.rand(3, 4), numpy.random.rand(3, 1)]
with self.assertRaises(TypeError):
functions.concat(inputs, 'a')
testing.run_module(__name__, __file__)
| mit |
crmccreary/openerp_server | openerp/addons/l10n_ch/account_wizard.py | 8 | 1854 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Donors: Hasa Sàrl, Open Net Sàrl and Prisme Solutions Informatique SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import tools
from osv import osv
import addons
import os
class WizardMultiChartsAccounts(osv.osv_memory):
_inherit ='wizard.multi.charts.accounts'
_defaults = {
'bank_accounts_id': False,
'code_digits': 0,
'sale_tax': False,
'purchase_tax':False
}
def execute(self, cr, uid, ids, context=None):
"""Override of code in order to be able to link journal with account in XML"""
res = super(WizardMultiChartsAccounts, self).execute(cr, uid, ids, context)
path = addons.get_module_resource(os.path.join('l10n_ch','sterchi_chart','account_journal_rel.xml'))
tools.convert_xml_import(cr, 'l10n_ch', path, idref=None, mode='init', noupdate=True, report=None)
return res
WizardMultiChartsAccounts()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ryfeus/lambda-packs | Tensorflow_Pandas_Numpy/source3.6/tensorflow/contrib/bayesflow/python/ops/custom_grad.py | 44 | 1222 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for specifying custom gradients.
See ${python/contrib.bayesflow.custom_gradient}.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.contrib.bayesflow.python.ops.custom_grad_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
'custom_gradient',
]
remove_undocumented(__name__, _allowed_symbols)
| mit |
40123210/w17b_exam | static/Brython3.1.1-20150328-091302/Lib/test/test_int.py | 765 | 12587 | import sys
import unittest
from test.support import run_unittest
L = [
('0', 0),
('1', 1),
('9', 9),
('10', 10),
('99', 99),
('100', 100),
('314', 314),
(' 314', 314),
('314 ', 314),
(' \t\t 314 \t\t ', 314),
(repr(sys.maxsize), sys.maxsize),
(' 1x', ValueError),
(' 1 ', 1),
(' 1\02 ', ValueError),
('', ValueError),
(' ', ValueError),
(' \t\t ', ValueError),
("\u0200", ValueError)
]
class IntTestCases(unittest.TestCase):
def test_basic(self):
self.assertEqual(int(314), 314)
self.assertEqual(int(3.14), 3)
# Check that conversion from float truncates towards zero
self.assertEqual(int(-3.14), -3)
self.assertEqual(int(3.9), 3)
self.assertEqual(int(-3.9), -3)
self.assertEqual(int(3.5), 3)
self.assertEqual(int(-3.5), -3)
self.assertEqual(int("-3"), -3)
self.assertEqual(int(" -3 "), -3)
self.assertEqual(int("\N{EM SPACE}-3\N{EN SPACE}"), -3)
# Different base:
self.assertEqual(int("10",16), 16)
# Test conversion from strings and various anomalies
for s, v in L:
for sign in "", "+", "-":
for prefix in "", " ", "\t", " \t\t ":
ss = prefix + sign + s
vv = v
if sign == "-" and v is not ValueError:
vv = -v
try:
self.assertEqual(int(ss), vv)
except ValueError:
pass
s = repr(-1-sys.maxsize)
x = int(s)
self.assertEqual(x+1, -sys.maxsize)
self.assertIsInstance(x, int)
# should return int
self.assertEqual(int(s[1:]), sys.maxsize+1)
# should return int
x = int(1e100)
self.assertIsInstance(x, int)
x = int(-1e100)
self.assertIsInstance(x, int)
# SF bug 434186: 0x80000000/2 != 0x80000000>>1.
# Worked by accident in Windows release build, but failed in debug build.
# Failed in all Linux builds.
x = -1-sys.maxsize
self.assertEqual(x >> 1, x//2)
self.assertRaises(ValueError, int, '123\0')
self.assertRaises(ValueError, int, '53', 40)
# SF bug 1545497: embedded NULs were not detected with
# explicit base
self.assertRaises(ValueError, int, '123\0', 10)
self.assertRaises(ValueError, int, '123\x00 245', 20)
x = int('1' * 600)
self.assertIsInstance(x, int)
self.assertRaises(TypeError, int, 1, 12)
self.assertEqual(int('0o123', 0), 83)
self.assertEqual(int('0x123', 16), 291)
# Bug 1679: "0x" is not a valid hex literal
self.assertRaises(ValueError, int, "0x", 16)
self.assertRaises(ValueError, int, "0x", 0)
self.assertRaises(ValueError, int, "0o", 8)
self.assertRaises(ValueError, int, "0o", 0)
self.assertRaises(ValueError, int, "0b", 2)
self.assertRaises(ValueError, int, "0b", 0)
# Bug #3236: Return small longs from PyLong_FromString
self.assertTrue(int("10") is 10)
self.assertTrue(int("-1") is -1)
# SF bug 1334662: int(string, base) wrong answers
# Various representations of 2**32 evaluated to 0
# rather than 2**32 in previous versions
self.assertEqual(int('100000000000000000000000000000000', 2), 4294967296)
self.assertEqual(int('102002022201221111211', 3), 4294967296)
self.assertEqual(int('10000000000000000', 4), 4294967296)
self.assertEqual(int('32244002423141', 5), 4294967296)
self.assertEqual(int('1550104015504', 6), 4294967296)
self.assertEqual(int('211301422354', 7), 4294967296)
self.assertEqual(int('40000000000', 8), 4294967296)
self.assertEqual(int('12068657454', 9), 4294967296)
self.assertEqual(int('4294967296', 10), 4294967296)
self.assertEqual(int('1904440554', 11), 4294967296)
self.assertEqual(int('9ba461594', 12), 4294967296)
self.assertEqual(int('535a79889', 13), 4294967296)
self.assertEqual(int('2ca5b7464', 14), 4294967296)
self.assertEqual(int('1a20dcd81', 15), 4294967296)
self.assertEqual(int('100000000', 16), 4294967296)
self.assertEqual(int('a7ffda91', 17), 4294967296)
self.assertEqual(int('704he7g4', 18), 4294967296)
self.assertEqual(int('4f5aff66', 19), 4294967296)
self.assertEqual(int('3723ai4g', 20), 4294967296)
self.assertEqual(int('281d55i4', 21), 4294967296)
self.assertEqual(int('1fj8b184', 22), 4294967296)
self.assertEqual(int('1606k7ic', 23), 4294967296)
self.assertEqual(int('mb994ag', 24), 4294967296)
self.assertEqual(int('hek2mgl', 25), 4294967296)
self.assertEqual(int('dnchbnm', 26), 4294967296)
self.assertEqual(int('b28jpdm', 27), 4294967296)
self.assertEqual(int('8pfgih4', 28), 4294967296)
self.assertEqual(int('76beigg', 29), 4294967296)
self.assertEqual(int('5qmcpqg', 30), 4294967296)
self.assertEqual(int('4q0jto4', 31), 4294967296)
self.assertEqual(int('4000000', 32), 4294967296)
self.assertEqual(int('3aokq94', 33), 4294967296)
self.assertEqual(int('2qhxjli', 34), 4294967296)
self.assertEqual(int('2br45qb', 35), 4294967296)
self.assertEqual(int('1z141z4', 36), 4294967296)
# tests with base 0
# this fails on 3.0, but in 2.x the old octal syntax is allowed
self.assertEqual(int(' 0o123 ', 0), 83)
self.assertEqual(int(' 0o123 ', 0), 83)
self.assertEqual(int('000', 0), 0)
self.assertEqual(int('0o123', 0), 83)
self.assertEqual(int('0x123', 0), 291)
self.assertEqual(int('0b100', 0), 4)
self.assertEqual(int(' 0O123 ', 0), 83)
self.assertEqual(int(' 0X123 ', 0), 291)
self.assertEqual(int(' 0B100 ', 0), 4)
# without base still base 10
self.assertEqual(int('0123'), 123)
self.assertEqual(int('0123', 10), 123)
# tests with prefix and base != 0
self.assertEqual(int('0x123', 16), 291)
self.assertEqual(int('0o123', 8), 83)
self.assertEqual(int('0b100', 2), 4)
self.assertEqual(int('0X123', 16), 291)
self.assertEqual(int('0O123', 8), 83)
self.assertEqual(int('0B100', 2), 4)
# the code has special checks for the first character after the
# type prefix
self.assertRaises(ValueError, int, '0b2', 2)
self.assertRaises(ValueError, int, '0b02', 2)
self.assertRaises(ValueError, int, '0B2', 2)
self.assertRaises(ValueError, int, '0B02', 2)
self.assertRaises(ValueError, int, '0o8', 8)
self.assertRaises(ValueError, int, '0o08', 8)
self.assertRaises(ValueError, int, '0O8', 8)
self.assertRaises(ValueError, int, '0O08', 8)
self.assertRaises(ValueError, int, '0xg', 16)
self.assertRaises(ValueError, int, '0x0g', 16)
self.assertRaises(ValueError, int, '0Xg', 16)
self.assertRaises(ValueError, int, '0X0g', 16)
# SF bug 1334662: int(string, base) wrong answers
# Checks for proper evaluation of 2**32 + 1
self.assertEqual(int('100000000000000000000000000000001', 2), 4294967297)
self.assertEqual(int('102002022201221111212', 3), 4294967297)
self.assertEqual(int('10000000000000001', 4), 4294967297)
self.assertEqual(int('32244002423142', 5), 4294967297)
self.assertEqual(int('1550104015505', 6), 4294967297)
self.assertEqual(int('211301422355', 7), 4294967297)
self.assertEqual(int('40000000001', 8), 4294967297)
self.assertEqual(int('12068657455', 9), 4294967297)
self.assertEqual(int('4294967297', 10), 4294967297)
self.assertEqual(int('1904440555', 11), 4294967297)
self.assertEqual(int('9ba461595', 12), 4294967297)
self.assertEqual(int('535a7988a', 13), 4294967297)
self.assertEqual(int('2ca5b7465', 14), 4294967297)
self.assertEqual(int('1a20dcd82', 15), 4294967297)
self.assertEqual(int('100000001', 16), 4294967297)
self.assertEqual(int('a7ffda92', 17), 4294967297)
self.assertEqual(int('704he7g5', 18), 4294967297)
self.assertEqual(int('4f5aff67', 19), 4294967297)
self.assertEqual(int('3723ai4h', 20), 4294967297)
self.assertEqual(int('281d55i5', 21), 4294967297)
self.assertEqual(int('1fj8b185', 22), 4294967297)
self.assertEqual(int('1606k7id', 23), 4294967297)
self.assertEqual(int('mb994ah', 24), 4294967297)
self.assertEqual(int('hek2mgm', 25), 4294967297)
self.assertEqual(int('dnchbnn', 26), 4294967297)
self.assertEqual(int('b28jpdn', 27), 4294967297)
self.assertEqual(int('8pfgih5', 28), 4294967297)
self.assertEqual(int('76beigh', 29), 4294967297)
self.assertEqual(int('5qmcpqh', 30), 4294967297)
self.assertEqual(int('4q0jto5', 31), 4294967297)
self.assertEqual(int('4000001', 32), 4294967297)
self.assertEqual(int('3aokq95', 33), 4294967297)
self.assertEqual(int('2qhxjlj', 34), 4294967297)
self.assertEqual(int('2br45qc', 35), 4294967297)
self.assertEqual(int('1z141z5', 36), 4294967297)
def test_intconversion(self):
# Test __int__()
class ClassicMissingMethods:
pass
self.assertRaises(TypeError, int, ClassicMissingMethods())
class MissingMethods(object):
pass
self.assertRaises(TypeError, int, MissingMethods())
class Foo0:
def __int__(self):
return 42
class Foo1(object):
def __int__(self):
return 42
class Foo2(int):
def __int__(self):
return 42
class Foo3(int):
def __int__(self):
return self
class Foo4(int):
def __int__(self):
return 42
class Foo5(int):
def __int__(self):
return 42.
self.assertEqual(int(Foo0()), 42)
self.assertEqual(int(Foo1()), 42)
self.assertEqual(int(Foo2()), 42)
self.assertEqual(int(Foo3()), 0)
self.assertEqual(int(Foo4()), 42)
self.assertRaises(TypeError, int, Foo5())
class Classic:
pass
for base in (object, Classic):
class IntOverridesTrunc(base):
def __int__(self):
return 42
def __trunc__(self):
return -12
self.assertEqual(int(IntOverridesTrunc()), 42)
class JustTrunc(base):
def __trunc__(self):
return 42
self.assertEqual(int(JustTrunc()), 42)
for trunc_result_base in (object, Classic):
class Integral(trunc_result_base):
def __int__(self):
return 42
class TruncReturnsNonInt(base):
def __trunc__(self):
return Integral()
self.assertEqual(int(TruncReturnsNonInt()), 42)
class NonIntegral(trunc_result_base):
def __trunc__(self):
# Check that we avoid infinite recursion.
return NonIntegral()
class TruncReturnsNonIntegral(base):
def __trunc__(self):
return NonIntegral()
try:
int(TruncReturnsNonIntegral())
except TypeError as e:
self.assertEqual(str(e),
"__trunc__ returned non-Integral"
" (type NonIntegral)")
else:
self.fail("Failed to raise TypeError with %s" %
((base, trunc_result_base),))
def test_error_message(self):
testlist = ('\xbd', '123\xbd', ' 123 456 ')
for s in testlist:
try:
int(s)
except ValueError as e:
self.assertIn(s.strip(), e.args[0])
else:
self.fail("Expected int(%r) to raise a ValueError", s)
def test_main():
run_unittest(IntTestCases)
if __name__ == "__main__":
test_main()
| agpl-3.0 |
hashemd/Advanced-Virtual-Digest | libs/bs4/tests/test_html5lib.py | 20 | 2299 | """Tests to ensure that the html5lib tree builder generates good trees."""
import warnings
try:
from bs4.builder import HTML5TreeBuilder
HTML5LIB_PRESENT = True
except ImportError, e:
HTML5LIB_PRESENT = False
from bs4.element import SoupStrainer
from bs4.testing import (
HTML5TreeBuilderSmokeTest,
SoupTest,
skipIf,
)
@skipIf(
not HTML5LIB_PRESENT,
"html5lib seems not to be present, not testing its tree builder.")
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
"""See ``HTML5TreeBuilderSmokeTest``."""
@property
def default_builder(self):
return HTML5TreeBuilder()
def test_soupstrainer(self):
# The html5lib tree builder does not support SoupStrainers.
strainer = SoupStrainer("b")
markup = "<p>A <b>bold</b> statement.</p>"
with warnings.catch_warnings(record=True) as w:
soup = self.soup(markup, parse_only=strainer)
self.assertEqual(
soup.decode(), self.document_for(markup))
self.assertTrue(
"the html5lib tree builder doesn't support parse_only" in
str(w[0].message))
def test_correctly_nested_tables(self):
"""html5lib inserts <tbody> tags where other parsers don't."""
markup = ('<table id="1">'
'<tr>'
"<td>Here's another table:"
'<table id="2">'
'<tr><td>foo</td></tr>'
'</table></td>')
self.assertSoupEquals(
markup,
'<table id="1"><tbody><tr><td>Here\'s another table:'
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
'</td></tr></tbody></table>')
self.assertSoupEquals(
"<table><thead><tr><td>Foo</td></tr></thead>"
"<tbody><tr><td>Bar</td></tr></tbody>"
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
def test_xml_declaration_followed_by_doctype(self):
markup = '''<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<p>foo</p>
</body>
</html>'''
soup = self.soup(markup)
# Verify that we can reach the <p> tag; this means the tree is connected.
self.assertEqual(b"<p>foo</p>", soup.p.encode())
| mit |
wyom/sympy | sympy/functions/combinatorial/tests/test_comb_factorials.py | 16 | 12552 | from sympy import (S, Symbol, symbols, factorial, factorial2, binomial,
rf, ff, gamma, polygamma, EulerGamma, O, pi, nan,
oo, zoo, simplify, expand_func, Product)
from sympy.functions.combinatorial.factorials import subfactorial
from sympy.functions.special.gamma_functions import uppergamma
from sympy.utilities.pytest import XFAIL
def test_rf_eval_apply():
x, y = symbols('x,y')
assert rf(nan, y) == nan
assert rf(x, y) == rf(x, y)
assert rf(oo, 0) == 1
assert rf(-oo, 0) == 1
assert rf(oo, 6) == oo
assert rf(-oo, 7) == -oo
assert rf(oo, -6) == oo
assert rf(-oo, -7) == oo
assert rf(x, 0) == 1
assert rf(x, 1) == x
assert rf(x, 2) == x*(x + 1)
assert rf(x, 3) == x*(x + 1)*(x + 2)
assert rf(x, 5) == x*(x + 1)*(x + 2)*(x + 3)*(x + 4)
assert rf(x, -1) == 1/(x - 1)
assert rf(x, -2) == 1/((x - 1)*(x - 2))
assert rf(x, -3) == 1/((x - 1)*(x - 2)*(x - 3))
assert rf(1, 100) == factorial(100)
assert rf(x**2 + 3*x, 2) == x**4 + 8*x**3 + 19*x**2 + 12*x
assert rf(x**3 + x, -2) == 1/(x**6 - 9*x**5 + 35*x**4 - 75*x**3 + 94*x**2 - 66*x + 20)
n = Symbol('n', integer=True)
k = Symbol('k', integer=True)
m = Symbol('m', integer=True, nonnegative=True)
assert rf(x, m).is_integer is None
assert rf(n, k).is_integer is None
assert rf(n, m).is_integer is True
assert rf(n, k + pi).is_integer is False
assert rf(n, m + pi).is_integer is False
assert rf(pi, m).is_integer is False
def test_ff_eval_apply():
x, y = symbols('x,y')
assert ff(nan, y) == nan
assert ff(x, y) == ff(x, y)
assert ff(oo, 0) == 1
assert ff(-oo, 0) == 1
assert ff(oo, 6) == oo
assert ff(-oo, 7) == -oo
assert ff(oo, -6) == oo
assert ff(-oo, -7) == oo
assert ff(x, 0) == 1
assert ff(x, 1) == x
assert ff(x, 2) == x*(x - 1)
assert ff(x, 3) == x*(x - 1)*(x - 2)
assert ff(x, 5) == x*(x - 1)*(x - 2)*(x - 3)*(x - 4)
assert ff(x, -1) == 1/(x + 1)
assert ff(x, -2) == 1/((x + 1)*(x + 2))
assert ff(x, -3) == 1/((x + 1)*(x + 2)*(x + 3))
assert ff(100, 100) == factorial(100)
assert ff(2*x**2 - 5*x, 2) == 4*x**4 - 28*x**3 + 59*x**2 - 35*x
assert ff(x**2 + 3*x, -2) == 1/(x**4 + 12*x**3 + 49*x**2 + 78*x + 40)
n = Symbol('n', integer=True)
k = Symbol('k', integer=True)
m = Symbol('m', integer=True, nonnegative=True)
assert ff(x, m).is_integer is None
assert ff(n, k).is_integer is None
assert ff(n, m).is_integer is True
assert ff(n, k + pi).is_integer is False
assert ff(n, m + pi).is_integer is False
assert ff(pi, m).is_integer is False
def test_factorial():
x = Symbol('x')
n = Symbol('n', integer=True)
k = Symbol('k', integer=True, nonnegative=True)
r = Symbol('r', integer=False)
s = Symbol('s', integer=False, negative=True)
t = Symbol('t', nonnegative=True)
u = Symbol('u', noninteger=True)
v = Symbol('v', integer=True, negative=True)
assert factorial(-2) == zoo
assert factorial(0) == 1
assert factorial(7) == 5040
assert factorial(n).func == factorial
assert factorial(2*n).func == factorial
assert factorial(x).is_integer is None
assert factorial(n).is_integer is None
assert factorial(k).is_integer
assert factorial(r).is_integer is None
assert factorial(n).is_positive is None
assert factorial(k).is_positive
assert factorial(x).is_real is None
assert factorial(n).is_real is None
assert factorial(k).is_real is True
assert factorial(r).is_real is None
assert factorial(s).is_real is True
assert factorial(t).is_real is True
assert factorial(u).is_real is True
assert factorial(x).is_composite is None
assert factorial(n).is_composite is None
assert factorial(k).is_composite is None
assert factorial(k + 3).is_composite is True
assert factorial(r).is_composite is None
assert factorial(s).is_composite is None
assert factorial(t).is_composite is None
assert factorial(u).is_composite is None
assert factorial(v).is_composite is False
assert factorial(oo) == oo
def test_factorial_diff():
n = Symbol('n', integer=True)
assert factorial(n).diff(n) == \
gamma(1 + n)*polygamma(0, 1 + n)
assert factorial(n**2).diff(n) == \
2*n*gamma(1 + n**2)*polygamma(0, 1 + n**2)
def test_factorial_series():
n = Symbol('n', integer=True)
assert factorial(n).series(n, 0, 3) == \
1 - n*EulerGamma + n**2*(EulerGamma**2/2 + pi**2/12) + O(n**3)
def test_factorial_rewrite():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True, nonnegative=True)
assert factorial(n).rewrite(gamma) == gamma(n + 1)
assert str(factorial(k).rewrite(Product)) == 'Product(_i, (_i, 1, k))'
def test_factorial2():
n = Symbol('n', integer=True)
assert factorial2(-1) == 1
assert factorial2(0) == 1
assert factorial2(7) == 105
assert factorial2(8) == 384
assert factorial2(n).func == factorial2
# The following is exhaustive
tt = Symbol('tt', integer=True, nonnegative=True)
tte = Symbol('tte', even=True, nonnegative=True)
tpe = Symbol('tpe', even=True, positive=True)
tto = Symbol('tto', odd=True, nonnegative=True)
tf = Symbol('tf', integer=True, nonnegative=False)
tfe = Symbol('tfe', even=True, nonnegative=False)
tfo = Symbol('tfo', odd=True, nonnegative=False)
ft = Symbol('ft', integer=False, nonnegative=True)
ff = Symbol('ff', integer=False, nonnegative=False)
fn = Symbol('fn', integer=False)
nt = Symbol('nt', nonnegative=True)
nf = Symbol('nf', nonnegative=False)
nn = Symbol('nn')
assert factorial2(n).is_integer is None
assert factorial2(tt - 1).is_integer
assert factorial2(tte - 1).is_integer
assert factorial2(tpe - 3).is_integer
assert factorial2(tto - 4).is_integer
assert factorial2(tto - 2).is_integer
assert factorial2(tf).is_integer is None
assert factorial2(tfe).is_integer is None
assert factorial2(tfo).is_integer is None
assert factorial2(ft).is_integer is None
assert factorial2(ff).is_integer is None
assert factorial2(fn).is_integer is None
assert factorial2(nt).is_integer is None
assert factorial2(nf).is_integer is None
assert factorial2(nn).is_integer is None
assert factorial2(n).is_positive is None
assert factorial2(tt - 1).is_positive is True
assert factorial2(tte - 1).is_positive is True
assert factorial2(tpe - 3).is_positive is True
assert factorial2(tpe - 1).is_positive is True
assert factorial2(tto - 2).is_positive is True
assert factorial2(tto - 1).is_positive is True
assert factorial2(tf).is_positive is None
assert factorial2(tfe).is_positive is None
assert factorial2(tfo).is_positive is None
assert factorial2(ft).is_positive is None
assert factorial2(ff).is_positive is None
assert factorial2(fn).is_positive is None
assert factorial2(nt).is_positive is None
assert factorial2(nf).is_positive is None
assert factorial2(nn).is_positive is None
assert factorial2(tt).is_even is None
assert factorial2(tt).is_odd is None
assert factorial2(tte).is_even is None
assert factorial2(tte).is_odd is None
assert factorial2(tte + 2).is_even is True
assert factorial2(tpe).is_even is True
assert factorial2(tto).is_odd is True
assert factorial2(tf).is_even is None
assert factorial2(tf).is_odd is None
assert factorial2(tfe).is_even is None
assert factorial2(tfe).is_odd is None
assert factorial2(tfo).is_even is False
assert factorial2(tfo).is_odd is None
def test_binomial():
x = Symbol('x')
n = Symbol('n', integer=True)
nz = Symbol('nz', integer=True, nonzero=True)
k = Symbol('k', integer=True)
kp = Symbol('kp', integer=True, positive=True)
u = Symbol('u', negative=True)
p = Symbol('p', positive=True)
z = Symbol('z', zero=True)
nt = Symbol('nt', integer=False)
assert binomial(0, 0) == 1
assert binomial(1, 1) == 1
assert binomial(10, 10) == 1
assert binomial(n, z) == 1
assert binomial(1, 2) == 0
assert binomial(1, -1) == 0
assert binomial(-1, 1) == -1
assert binomial(-1, -1) == 1
assert binomial(S.Half, S.Half) == 1
assert binomial(-10, 1) == -10
assert binomial(-10, 7) == -11440
assert binomial(n, -1).func == binomial
assert binomial(kp, -1) == 0
assert binomial(nz, 0) == 1
assert expand_func(binomial(n, 1)) == n
assert expand_func(binomial(n, 2)) == n*(n - 1)/2
assert expand_func(binomial(n, n - 2)) == n*(n - 1)/2
assert expand_func(binomial(n, n - 1)) == n
assert binomial(n, 3).func == binomial
assert binomial(n, 3).expand(func=True) == n**3/6 - n**2/2 + n/3
assert expand_func(binomial(n, 3)) == n*(n - 2)*(n - 1)/6
assert binomial(n, n) == 1
assert binomial(n, n + 1).func == binomial # e.g. (-1, 0) == 1
assert binomial(kp, kp + 1) == 0
assert binomial(n, u).func == binomial
assert binomial(kp, u) == 0
assert binomial(n, p).func == binomial
assert binomial(n, k).func == binomial
assert binomial(n, n + p).func == binomial
assert binomial(kp, kp + p) == 0
assert expand_func(binomial(n, n - 3)) == n*(n - 2)*(n - 1)/6
assert binomial(n, k).is_integer
assert binomial(nt, k).is_integer is None
assert binomial(x, nt).is_integer is False
def test_binomial_diff():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True)
assert binomial(n, k).diff(n) == \
(-polygamma(0, 1 + n - k) + polygamma(0, 1 + n))*binomial(n, k)
assert binomial(n**2, k**3).diff(n) == \
2*n*(-polygamma(
0, 1 + n**2 - k**3) + polygamma(0, 1 + n**2))*binomial(n**2, k**3)
assert binomial(n, k).diff(k) == \
(-polygamma(0, 1 + k) + polygamma(0, 1 + n - k))*binomial(n, k)
assert binomial(n**2, k**3).diff(k) == \
3*k**2*(-polygamma(
0, 1 + k**3) + polygamma(0, 1 + n**2 - k**3))*binomial(n**2, k**3)
def test_binomial_rewrite():
n = Symbol('n', integer=True)
k = Symbol('k', integer=True)
assert binomial(n, k).rewrite(
factorial) == factorial(n)/(factorial(k)*factorial(n - k))
assert binomial(
n, k).rewrite(gamma) == gamma(n + 1)/(gamma(k + 1)*gamma(n - k + 1))
@XFAIL
def test_factorial_simplify_fail():
# simplify(factorial(x + 1).diff(x) - ((x + 1)*factorial(x)).diff(x))) == 0
from sympy.abc import x
assert simplify(x*polygamma(0, x + 1) - x*polygamma(0, x + 2) +
polygamma(0, x + 1) - polygamma(0, x + 2) + 1) == 0
def test_subfactorial():
assert all(subfactorial(i) == ans for i, ans in enumerate(
[1, 0, 1, 2, 9, 44, 265, 1854, 14833, 133496]))
assert subfactorial(oo) == oo
assert subfactorial(nan) == nan
x = Symbol('x')
assert subfactorial(x).rewrite(uppergamma) == uppergamma(x + 1, -1)/S.Exp1
tt = Symbol('tt', integer=True, nonnegative=True)
tf = Symbol('tf', integer=True, nonnegative=False)
tn = Symbol('tf', integer=True)
ft = Symbol('ft', integer=False, nonnegative=True)
ff = Symbol('ff', integer=False, nonnegative=False)
fn = Symbol('ff', integer=False)
nt = Symbol('nt', nonnegative=True)
nf = Symbol('nf', nonnegative=False)
nn = Symbol('nf')
te = Symbol('te', even=True, nonnegative=True)
to = Symbol('to', odd=True, nonnegative=True)
assert subfactorial(tt).is_integer
assert subfactorial(tf).is_integer is None
assert subfactorial(tn).is_integer is None
assert subfactorial(ft).is_integer is None
assert subfactorial(ff).is_integer is None
assert subfactorial(fn).is_integer is None
assert subfactorial(nt).is_integer is None
assert subfactorial(nf).is_integer is None
assert subfactorial(nn).is_integer is None
assert subfactorial(tt).is_nonnegative
assert subfactorial(tf).is_nonnegative is None
assert subfactorial(tn).is_nonnegative is None
assert subfactorial(ft).is_nonnegative is None
assert subfactorial(ff).is_nonnegative is None
assert subfactorial(fn).is_nonnegative is None
assert subfactorial(nt).is_nonnegative is None
assert subfactorial(nf).is_nonnegative is None
assert subfactorial(nn).is_nonnegative is None
assert subfactorial(tt).is_even is None
assert subfactorial(tt).is_odd is None
assert subfactorial(te).is_odd is True
assert subfactorial(to).is_even is True
| bsd-3-clause |
cadeef/murano-app-incubator | io.murano.apps.openstack.MuranoDashboard/Resources/scripts/settings.py | 5 | 5035 | import logging
import os
import sys
import tempfile
from openstack_dashboard import exceptions
ROOT_PATH = os.path.dirname(os.path.abspath(__file__))
BIN_DIR = os.path.abspath(os.path.join(ROOT_PATH, '..', 'bin'))
if ROOT_PATH not in sys.path:
sys.path.append(ROOT_PATH)
DEBUG = False
TEMPLATE_DEBUG = DEBUG
METADATA_CACHE_DIR = os.path.join('/tmp',
'muranodashboard-cache')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join('/tmp', 'openstack-dashboard.sqlite')
}
}
SITE_BRANDING = 'OpenStack Dashboard'
LOGIN_URL = '/auth/login/'
LOGOUT_URL = '/auth/logout/'
# LOGIN_REDIRECT_URL can be used as an alternative for
# HORIZON_CONFIG.user_home, if user_home is not set.
# Do not set it to '/home/', as this will cause circular redirect loop
LOGIN_REDIRECT_URL = '/'
MEDIA_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'media'))
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.abspath(os.path.join(ROOT_PATH, '..', 'static'))
STATIC_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin/'
ROOT_URLCONF = 'openstack_dashboard.urls'
HORIZON_CONFIG = {
'dashboards': ('project', 'admin', 'settings', 'murano'),
'default_dashboard': 'murano',
'user_home': 'muranodashboard.views.get_user_home',
'ajax_queue_limit': 10,
'auto_fade_alerts': {
'delay': 3000,
'fade_duration': 1500,
'types': ['alert-success', 'alert-info']
},
'help_url': "http://docs.openstack.org",
'exceptions': {'recoverable': exceptions.RECOVERABLE,
'not_found': exceptions.NOT_FOUND,
'unauthorized': exceptions.UNAUTHORIZED},
}
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'muranodashboard.middleware.ExceptionMiddleware',
'django.middleware.doc.XViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
'horizon.context_processors.horizon',
'openstack_dashboard.context_processors.openstack'
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'horizon.loaders.TemplateLoader',
)
TEMPLATE_DIRS = (
os.path.join(ROOT_PATH, 'templates'),
)
STATICFILES_FINDERS = (
'compressor.finders.CompressorFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
MAX_FILE_SIZE_MB = 5
less_binary = os.path.join(BIN_DIR, 'less', 'lessc')
COMPRESS_PRECOMPILERS = (
('text/less', (less_binary + ' {infile} {outfile}')),
)
COMPRESS_CSS_FILTERS = (
'compressor.filters.css_default.CssAbsoluteFilter',
)
COMPRESS_ENABLED = True
COMPRESS_OUTPUT_DIR = 'muranodashboard'
COMPRESS_CSS_HASHING_METHOD = 'hash'
COMPRESS_PARSER = 'compressor.parser.HtmlParser'
INSTALLED_APPS = (
'openstack_dashboard',
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'compressor',
'horizon',
'openstack_dashboard.dashboards.project',
'openstack_dashboard.dashboards.admin',
'openstack_dashboard.dashboards.settings',
'openstack_auth',
'floppyforms',
'muranodashboard'
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
AUTHENTICATION_BACKENDS = ('openstack_auth.backend.KeystoneBackend',)
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
SESSION_COOKIE_HTTPONLY = True
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_COOKIE_SECURE = False
SECRET_KEY = 'some_random_value'
gettext_noop = lambda s: s
LANGUAGES = (
('en', gettext_noop('English')),
('it', gettext_noop('Italiano')),
('es', gettext_noop('Spanish')),
('fr', gettext_noop('French')),
('ja', gettext_noop('Japanese')),
('pt', gettext_noop('Portuguese')),
('pl', gettext_noop('Polish')),
('zh-cn', gettext_noop('Simplified Chinese')),
('zh-tw', gettext_noop('Traditional Chinese')),
)
LANGUAGE_CODE = 'en'
USE_I18N = True
USE_L10N = True
USE_TZ = True
OPENSTACK_KEYSTONE_DEFAULT_ROLE = 'Member'
DEFAULT_EXCEPTION_REPORTER_FILTER = 'horizon.exceptions.HorizonReporterFilter'
try:
from local.local_settings import * # noqa
except ImportError:
logging.warning("No local_settings file found.")
if DEBUG:
logging.basicConfig(level=logging.DEBUG)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer' | apache-2.0 |
jasonzzz/ansible | lib/ansible/plugins/callback/syslog_json.py | 54 | 2725 | # Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import logging
import logging.handlers
import socket
from ansible.plugins.callback import CallbackBase
class CallbackModule(CallbackBase):
"""
logs ansible-playbook and ansible runs to a syslog server in json format
make sure you have in ansible.cfg:
callback_plugins = <path_to_callback_plugins_folder>
and put the plugin in <path_to_callback_plugins_folder>
This plugin makes use of the following environment variables:
SYSLOG_SERVER (optional): defaults to localhost
SYSLOG_PORT (optional): defaults to 514
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'syslog_json'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self.logger = logging.getLogger('ansible logger')
self.logger.setLevel(logging.DEBUG)
self.handler = logging.handlers.SysLogHandler(
address = (os.getenv('SYSLOG_SERVER','localhost'),
os.getenv('SYSLOG_PORT',514)),
facility=logging.handlers.SysLogHandler.LOG_USER
)
self.logger.addHandler(self.handler)
self.hostname = socket.gethostname()
def runner_on_failed(self, host, res, ignore_errors=False):
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def runner_on_ok(self, host, res):
self.logger.info('%s ansible-command: task execution OK; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def runner_on_skipped(self, host, item=None):
self.logger.info('%s ansible-command: task execution SKIPPED; host: %s; message: %s' % (self.hostname,host, 'skipped'))
def runner_on_unreachable(self, host, res):
self.logger.error('%s ansible-command: task execution UNREACHABLE; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def runner_on_async_failed(self, host, res):
self.logger.error('%s ansible-command: task execution FAILED; host: %s; message: %s' % (self.hostname,host,self._dump_results(res)))
def playbook_on_import_for_host(self, host, imported_file):
self.logger.info('%s ansible-command: playbook IMPORTED; host: %s; message: imported file %s' % (self.hostname,host,imported_file))
def playbook_on_not_import_for_host(self, host, missing_file):
self.logger.info('%s ansible-command: playbook NOT IMPORTED; host: %s; message: missing file %s' % (self.hostname,host,missing_file))
| gpl-3.0 |
viniciusgama/blog_gae | djangoappengine/settings_base.py | 13 | 1463 | # Initialize App Engine SDK if necessary
try:
from google.appengine.api import api_proxy_stub_map
except ImportError:
from .boot import setup_env
setup_env()
from djangoappengine.utils import on_production_server, have_appserver
DEBUG = not on_production_server
TEMPLATE_DEBUG = DEBUG
ROOT_URLCONF = 'urls'
DATABASES = {
'default': {
'ENGINE': 'djangoappengine.db',
# Other settings which you might want to override in your settings.py
# Activates high-replication support for remote_api
# 'HIGH_REPLICATION': True,
# Switch to the App Engine for Business domain
# 'DOMAIN': 'googleplex.com',
},
}
if on_production_server:
EMAIL_BACKEND = 'djangoappengine.mail.AsyncEmailBackend'
else:
EMAIL_BACKEND = 'djangoappengine.mail.EmailBackend'
PREPARE_UPLOAD_BACKEND = 'djangoappengine.storage.prepare_upload'
SERVE_FILE_BACKEND = 'djangoappengine.storage.serve_file'
DEFAULT_FILE_STORAGE = 'djangoappengine.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangoappengine.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'TIMEOUT': 0,
}
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
if not on_production_server:
INTERNAL_IPS = ('127.0.0.1',)
| bsd-3-clause |
mhnatiuk/phd_sociology_of_religion | scrapper/build/cryptography/src/cryptography/exceptions.py | 2 | 1131 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
class _Reasons(object):
BACKEND_MISSING_INTERFACE = object()
UNSUPPORTED_HASH = object()
UNSUPPORTED_CIPHER = object()
UNSUPPORTED_PADDING = object()
UNSUPPORTED_MGF = object()
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = object()
UNSUPPORTED_ELLIPTIC_CURVE = object()
UNSUPPORTED_SERIALIZATION = object()
UNSUPPORTED_X509 = object()
class UnsupportedAlgorithm(Exception):
def __init__(self, message, reason=None):
super(UnsupportedAlgorithm, self).__init__(message)
self._reason = reason
class AlreadyFinalized(Exception):
pass
class AlreadyUpdated(Exception):
pass
class NotYetFinalized(Exception):
pass
class InvalidTag(Exception):
pass
class InvalidSignature(Exception):
pass
class InternalError(Exception):
pass
class InvalidKey(Exception):
pass
class InvalidToken(Exception):
pass
| gpl-2.0 |
alejob/mdanalysis | package/MDAnalysis/lib/formats/__init__.py | 1 | 1067 | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# MDAnalysis --- http://www.mdanalysis.org
# Copyright (c) 2006-2016 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
from . import libmdaxdr
__all__ = ['libmdaxdr']
| gpl-2.0 |
liminspace/dju-common | dju_common/db.py | 1 | 2044 | # coding=utf-8
import gc
from django.shortcuts import _get_queryset
def get_object_or_None(klass, *args, **kwargs):
"""
Uses get() to return an object or None if the object does not exist.
klass may be a Model, Manager, or QuerySet object.
All other passed arguments and keyword arguments are used in the get() query.
"""
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
return None
def chunked_qs(qs, order_by='pk', chunksize=1000, yield_values=True):
qs = qs.order_by(order_by)
if order_by.startswith('-'):
fn = 'lt'
ord_field = order_by[1:]
else:
fn = 'gt'
ord_field = order_by
last_ordered_val = None
empty = False
while not empty:
empty = True
chunk_qs = qs
if last_ordered_val is not None:
chunk_qs = chunk_qs.filter(**{'{}__{}'.format(ord_field, fn): last_ordered_val})
chunk_qs = chunk_qs[:chunksize]
if yield_values:
row = None
for row in chunk_qs:
yield row
if row is not None:
last_ordered_val = getattr(row, ord_field)
empty = False
else:
rows = tuple(chunk_qs)
yield rows
if rows:
last_ordered_val = getattr(rows[-1], ord_field)
empty = False
gc.collect()
def each_fields(for_fields, fields):
"""
select_related(
'field__related_field1__text1', 'field__related_field1__text2',
'field__related_field2__text1', 'field__related_field2__text2',
)
select_related(*each_fields(['field__related_field1', 'field__related_field2'], ['text1', 'text2']))
each_fields('field__related_field1', ['text1', 'text2'])
"""
if isinstance(for_fields, basestring):
for_fields = (for_fields,)
r = set()
for ff in for_fields:
for f in fields:
r.add(ff + '__' + f)
return list(r)
| mit |
mcollins12321/anita | venv/lib/python2.7/site-packages/setuptools/compat.py | 456 | 2094 | import sys
import itertools
PY3 = sys.version_info >= (3,)
PY2 = not PY3
if PY2:
basestring = basestring
import __builtin__ as builtins
import ConfigParser
from StringIO import StringIO
BytesIO = StringIO
func_code = lambda o: o.func_code
func_globals = lambda o: o.func_globals
im_func = lambda o: o.im_func
from htmlentitydefs import name2codepoint
import httplib
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from BaseHTTPServer import BaseHTTPRequestHandler
iteritems = lambda o: o.iteritems()
long_type = long
maxsize = sys.maxint
unichr = unichr
unicode = unicode
bytes = str
from urllib import url2pathname, splittag, pathname2url
import urllib2
from urllib2 import urlopen, HTTPError, URLError, unquote, splituser
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
filterfalse = itertools.ifilterfalse
exec("""def reraise(tp, value, tb=None):
raise tp, value, tb""")
if PY3:
basestring = str
import builtins
import configparser as ConfigParser
from io import StringIO, BytesIO
func_code = lambda o: o.__code__
func_globals = lambda o: o.__globals__
im_func = lambda o: o.__func__
from html.entities import name2codepoint
import http.client as httplib
from http.server import HTTPServer, SimpleHTTPRequestHandler
from http.server import BaseHTTPRequestHandler
iteritems = lambda o: o.items()
long_type = int
maxsize = sys.maxsize
unichr = chr
unicode = str
bytes = bytes
from urllib.error import HTTPError, URLError
import urllib.request as urllib2
from urllib.request import urlopen, url2pathname, pathname2url
from urllib.parse import (
urlparse, urlunparse, unquote, splituser, urljoin, urlsplit,
urlunsplit, splittag,
)
filterfalse = itertools.filterfalse
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
| mit |
undoware/neutron-drive | google_appengine/lib/django_1_3/django/contrib/gis/utils/geoip.py | 316 | 14811 | """
This module houses the GeoIP object, a ctypes wrapper for the MaxMind GeoIP(R)
C API (http://www.maxmind.com/app/c). This is an alternative to the GPL
licensed Python GeoIP interface provided by MaxMind.
GeoIP(R) is a registered trademark of MaxMind, LLC of Boston, Massachusetts.
For IP-based geolocation, this module requires the GeoLite Country and City
datasets, in binary format (CSV will not work!). The datasets may be
downloaded from MaxMind at http://www.maxmind.com/download/geoip/database/.
Grab GeoIP.dat.gz and GeoLiteCity.dat.gz, and unzip them in the directory
corresponding to settings.GEOIP_PATH. See the GeoIP docstring and examples
below for more details.
TODO: Verify compatibility with Windows.
Example:
>>> from django.contrib.gis.utils import GeoIP
>>> g = GeoIP()
>>> g.country('google.com')
{'country_code': 'US', 'country_name': 'United States'}
>>> g.city('72.14.207.99')
{'area_code': 650,
'city': 'Mountain View',
'country_code': 'US',
'country_code3': 'USA',
'country_name': 'United States',
'dma_code': 807,
'latitude': 37.419200897216797,
'longitude': -122.05740356445312,
'postal_code': '94043',
'region': 'CA'}
>>> g.lat_lon('salon.com')
(37.789798736572266, -122.39420318603516)
>>> g.lon_lat('uh.edu')
(-95.415199279785156, 29.77549934387207)
>>> g.geos('24.124.1.80').wkt
'POINT (-95.2087020874023438 39.0392990112304688)'
"""
import os, re
from ctypes import c_char_p, c_float, c_int, Structure, CDLL, POINTER
from ctypes.util import find_library
from django.conf import settings
if not settings.configured: settings.configure()
# Creating the settings dictionary with any settings, if needed.
GEOIP_SETTINGS = dict((key, getattr(settings, key))
for key in ('GEOIP_PATH', 'GEOIP_LIBRARY_PATH', 'GEOIP_COUNTRY', 'GEOIP_CITY')
if hasattr(settings, key))
lib_path = GEOIP_SETTINGS.get('GEOIP_LIBRARY_PATH', None)
# GeoIP Exception class.
class GeoIPException(Exception): pass
# The shared library for the GeoIP C API. May be downloaded
# from http://www.maxmind.com/download/geoip/api/c/
if lib_path:
lib_name = None
else:
# TODO: Is this really the library name for Windows?
lib_name = 'GeoIP'
# Getting the path to the GeoIP library.
if lib_name: lib_path = find_library(lib_name)
if lib_path is None: raise GeoIPException('Could not find the GeoIP library (tried "%s"). '
'Try setting GEOIP_LIBRARY_PATH in your settings.' % lib_name)
lgeoip = CDLL(lib_path)
# Regular expressions for recognizing IP addresses and the GeoIP
# free database editions.
ipregex = re.compile(r'^(?P<w>\d\d?\d?)\.(?P<x>\d\d?\d?)\.(?P<y>\d\d?\d?)\.(?P<z>\d\d?\d?)$')
free_regex = re.compile(r'^GEO-\d{3}FREE')
lite_regex = re.compile(r'^GEO-\d{3}LITE')
#### GeoIP C Structure definitions ####
class GeoIPRecord(Structure):
_fields_ = [('country_code', c_char_p),
('country_code3', c_char_p),
('country_name', c_char_p),
('region', c_char_p),
('city', c_char_p),
('postal_code', c_char_p),
('latitude', c_float),
('longitude', c_float),
# TODO: In 1.4.6 this changed from `int dma_code;` to
# `union {int metro_code; int dma_code;};`. Change
# to a `ctypes.Union` in to accomodate in future when
# pre-1.4.6 versions are no longer distributed.
('dma_code', c_int),
('area_code', c_int),
# TODO: The following structure fields were added in 1.4.3 --
# uncomment these fields when sure previous versions are no
# longer distributed by package maintainers.
#('charset', c_int),
#('continent_code', c_char_p),
]
class GeoIPTag(Structure): pass
#### ctypes function prototypes ####
RECTYPE = POINTER(GeoIPRecord)
DBTYPE = POINTER(GeoIPTag)
# For retrieving records by name or address.
def record_output(func):
func.restype = RECTYPE
return func
rec_by_addr = record_output(lgeoip.GeoIP_record_by_addr)
rec_by_name = record_output(lgeoip.GeoIP_record_by_name)
# For opening & closing GeoIP database files.
geoip_open = lgeoip.GeoIP_open
geoip_open.restype = DBTYPE
geoip_close = lgeoip.GeoIP_delete
geoip_close.argtypes = [DBTYPE]
geoip_close.restype = None
# String output routines.
def string_output(func):
func.restype = c_char_p
return func
geoip_dbinfo = string_output(lgeoip.GeoIP_database_info)
cntry_code_by_addr = string_output(lgeoip.GeoIP_country_code_by_addr)
cntry_code_by_name = string_output(lgeoip.GeoIP_country_code_by_name)
cntry_name_by_addr = string_output(lgeoip.GeoIP_country_name_by_addr)
cntry_name_by_name = string_output(lgeoip.GeoIP_country_name_by_name)
#### GeoIP class ####
class GeoIP(object):
# The flags for GeoIP memory caching.
# GEOIP_STANDARD - read database from filesystem, uses least memory.
#
# GEOIP_MEMORY_CACHE - load database into memory, faster performance
# but uses more memory
#
# GEOIP_CHECK_CACHE - check for updated database. If database has been updated,
# reload filehandle and/or memory cache.
#
# GEOIP_INDEX_CACHE - just cache
# the most frequently accessed index portion of the database, resulting
# in faster lookups than GEOIP_STANDARD, but less memory usage than
# GEOIP_MEMORY_CACHE - useful for larger databases such as
# GeoIP Organization and GeoIP City. Note, for GeoIP Country, Region
# and Netspeed databases, GEOIP_INDEX_CACHE is equivalent to GEOIP_MEMORY_CACHE
#
GEOIP_STANDARD = 0
GEOIP_MEMORY_CACHE = 1
GEOIP_CHECK_CACHE = 2
GEOIP_INDEX_CACHE = 4
cache_options = dict((opt, None) for opt in (0, 1, 2, 4))
_city_file = ''
_country_file = ''
# Initially, pointers to GeoIP file references are NULL.
_city = None
_country = None
def __init__(self, path=None, cache=0, country=None, city=None):
"""
Initializes the GeoIP object, no parameters are required to use default
settings. Keyword arguments may be passed in to customize the locations
of the GeoIP data sets.
* path: Base directory to where GeoIP data is located or the full path
to where the city or country data files (*.dat) are located.
Assumes that both the city and country data sets are located in
this directory; overrides the GEOIP_PATH settings attribute.
* cache: The cache settings when opening up the GeoIP datasets,
and may be an integer in (0, 1, 2, 4) corresponding to
the GEOIP_STANDARD, GEOIP_MEMORY_CACHE, GEOIP_CHECK_CACHE,
and GEOIP_INDEX_CACHE `GeoIPOptions` C API settings,
respectively. Defaults to 0, meaning that the data is read
from the disk.
* country: The name of the GeoIP country data file. Defaults to
'GeoIP.dat'; overrides the GEOIP_COUNTRY settings attribute.
* city: The name of the GeoIP city data file. Defaults to
'GeoLiteCity.dat'; overrides the GEOIP_CITY settings attribute.
"""
# Checking the given cache option.
if cache in self.cache_options:
self._cache = self.cache_options[cache]
else:
raise GeoIPException('Invalid caching option: %s' % cache)
# Getting the GeoIP data path.
if not path:
path = GEOIP_SETTINGS.get('GEOIP_PATH', None)
if not path: raise GeoIPException('GeoIP path must be provided via parameter or the GEOIP_PATH setting.')
if not isinstance(path, basestring):
raise TypeError('Invalid path type: %s' % type(path).__name__)
if os.path.isdir(path):
# Constructing the GeoIP database filenames using the settings
# dictionary. If the database files for the GeoLite country
# and/or city datasets exist, then try and open them.
country_db = os.path.join(path, country or GEOIP_SETTINGS.get('GEOIP_COUNTRY', 'GeoIP.dat'))
if os.path.isfile(country_db):
self._country = geoip_open(country_db, cache)
self._country_file = country_db
city_db = os.path.join(path, city or GEOIP_SETTINGS.get('GEOIP_CITY', 'GeoLiteCity.dat'))
if os.path.isfile(city_db):
self._city = geoip_open(city_db, cache)
self._city_file = city_db
elif os.path.isfile(path):
# Otherwise, some detective work will be needed to figure
# out whether the given database path is for the GeoIP country
# or city databases.
ptr = geoip_open(path, cache)
info = geoip_dbinfo(ptr)
if lite_regex.match(info):
# GeoLite City database detected.
self._city = ptr
self._city_file = path
elif free_regex.match(info):
# GeoIP Country database detected.
self._country = ptr
self._country_file = path
else:
raise GeoIPException('Unable to recognize database edition: %s' % info)
else:
raise GeoIPException('GeoIP path must be a valid file or directory.')
def __del__(self):
# Cleaning any GeoIP file handles lying around.
if self._country: geoip_close(self._country)
if self._city: geoip_close(self._city)
def _check_query(self, query, country=False, city=False, city_or_country=False):
"Helper routine for checking the query and database availability."
# Making sure a string was passed in for the query.
if not isinstance(query, basestring):
raise TypeError('GeoIP query must be a string, not type %s' % type(query).__name__)
# Extra checks for the existence of country and city databases.
if city_or_country and not (self._country or self._city):
raise GeoIPException('Invalid GeoIP country and city data files.')
elif country and not self._country:
raise GeoIPException('Invalid GeoIP country data file: %s' % self._country_file)
elif city and not self._city:
raise GeoIPException('Invalid GeoIP city data file: %s' % self._city_file)
def city(self, query):
"""
Returns a dictionary of city information for the given IP address or
Fully Qualified Domain Name (FQDN). Some information in the dictionary
may be undefined (None).
"""
self._check_query(query, city=True)
if ipregex.match(query):
# If an IP address was passed in
ptr = rec_by_addr(self._city, c_char_p(query))
else:
# If a FQDN was passed in.
ptr = rec_by_name(self._city, c_char_p(query))
# Checking the pointer to the C structure, if valid pull out elements
# into a dicionary and return.
if bool(ptr):
record = ptr.contents
return dict((tup[0], getattr(record, tup[0])) for tup in record._fields_)
else:
return None
def country_code(self, query):
"Returns the country code for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_code_by_addr(self._country, query)
else: return cntry_code_by_name(self._country, query)
else:
return self.city(query)['country_code']
def country_name(self, query):
"Returns the country name for the given IP Address or FQDN."
self._check_query(query, city_or_country=True)
if self._country:
if ipregex.match(query): return cntry_name_by_addr(self._country, query)
else: return cntry_name_by_name(self._country, query)
else:
return self.city(query)['country_name']
def country(self, query):
"""
Returns a dictonary with with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters.
"""
# Returning the country code and name
return {'country_code' : self.country_code(query),
'country_name' : self.country_name(query),
}
#### Coordinate retrieval routines ####
def coords(self, query, ordering=('longitude', 'latitude')):
cdict = self.city(query)
if cdict is None: return None
else: return tuple(cdict[o] for o in ordering)
def lon_lat(self, query):
"Returns a tuple of the (longitude, latitude) for the given query."
return self.coords(query)
def lat_lon(self, query):
"Returns a tuple of the (latitude, longitude) for the given query."
return self.coords(query, ('latitude', 'longitude'))
def geos(self, query):
"Returns a GEOS Point object for the given query."
ll = self.lon_lat(query)
if ll:
from django.contrib.gis.geos import Point
return Point(ll, srid=4326)
else:
return None
#### GeoIP Database Information Routines ####
def country_info(self):
"Returns information about the GeoIP country database."
if self._country is None:
ci = 'No GeoIP Country data in "%s"' % self._country_file
else:
ci = geoip_dbinfo(self._country)
return ci
country_info = property(country_info)
def city_info(self):
"Retuns information about the GeoIP city database."
if self._city is None:
ci = 'No GeoIP City data in "%s"' % self._city_file
else:
ci = geoip_dbinfo(self._city)
return ci
city_info = property(city_info)
def info(self):
"Returns information about all GeoIP databases in use."
return 'Country:\n\t%s\nCity:\n\t%s' % (self.country_info, self.city_info)
info = property(info)
#### Methods for compatibility w/the GeoIP-Python API. ####
@classmethod
def open(cls, full_path, cache):
return GeoIP(full_path, cache)
def _rec_by_arg(self, arg):
if self._city:
return self.city(arg)
else:
return self.country(arg)
region_by_addr = city
region_by_name = city
record_by_addr = _rec_by_arg
record_by_name = _rec_by_arg
country_code_by_addr = country_code
country_code_by_name = country_code
country_name_by_addr = country_name
country_name_by_name = country_name
| bsd-3-clause |
kkopachev/thumbor | thumbor/detectors/face_detector/__init__.py | 2 | 1450 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
from thumbor.detectors.local_detector import CascadeLoaderDetector
from thumbor.point import FocalPoint
from thumbor.utils import logger
HAIR_OFFSET = 0.12
class Detector(CascadeLoaderDetector):
def __init__(self, context, index, detectors):
super(Detector, self).__init__(context, index, detectors)
self.load_cascade_file(__file__, self.context.config.FACE_DETECTOR_CASCADE_FILE)
def __add_hair_offset(self, top, height):
top = max(0, top - height * HAIR_OFFSET)
return top
async def detect(self):
try:
features = self.get_features()
except Exception as error:
logger.exception(error)
logger.warning("Error during face detection; skipping to next detector")
return await self.next()
if features:
for (left, top, width, height), _ in features:
top = self.__add_hair_offset(top, height)
self.context.request.focal_points.append(
FocalPoint.from_square(
left, top, width, height, origin="Face Detection"
)
)
return
await self.next()
| mit |
mano3m/CouchPotatoServer | libs/migrate/versioning/schema.py | 53 | 7595 | """
Database schema version management.
"""
import sys
import logging
from sqlalchemy import (Table, Column, MetaData, String, Text, Integer,
create_engine)
from sqlalchemy.sql import and_
from sqlalchemy import exceptions as sa_exceptions
from sqlalchemy.sql import bindparam
from migrate import exceptions
from migrate.changeset import SQLA_07
from migrate.versioning import genmodel, schemadiff
from migrate.versioning.repository import Repository
from migrate.versioning.util import load_model
from migrate.versioning.version import VerNum
log = logging.getLogger(__name__)
class ControlledSchema(object):
"""A database under version control"""
def __init__(self, engine, repository):
if isinstance(repository, basestring):
repository = Repository(repository)
self.engine = engine
self.repository = repository
self.meta = MetaData(engine)
self.load()
def __eq__(self, other):
"""Compare two schemas by repositories and versions"""
return (self.repository is other.repository \
and self.version == other.version)
def load(self):
"""Load controlled schema version info from DB"""
tname = self.repository.version_table
try:
if not hasattr(self, 'table') or self.table is None:
self.table = Table(tname, self.meta, autoload=True)
result = self.engine.execute(self.table.select(
self.table.c.repository_id == str(self.repository.id)))
data = list(result)[0]
except:
cls, exc, tb = sys.exc_info()
raise exceptions.DatabaseNotControlledError, exc.__str__(), tb
self.version = data['version']
return data
def drop(self):
"""
Remove version control from a database.
"""
if SQLA_07:
try:
self.table.drop()
except sa_exceptions.DatabaseError:
raise exceptions.DatabaseNotControlledError(str(self.table))
else:
try:
self.table.drop()
except (sa_exceptions.SQLError):
raise exceptions.DatabaseNotControlledError(str(self.table))
def changeset(self, version=None):
"""API to Changeset creation.
Uses self.version for start version and engine.name
to get database name.
"""
database = self.engine.name
start_ver = self.version
changeset = self.repository.changeset(database, start_ver, version)
return changeset
def runchange(self, ver, change, step):
startver = ver
endver = ver + step
# Current database version must be correct! Don't run if corrupt!
if self.version != startver:
raise exceptions.InvalidVersionError("%s is not %s" % \
(self.version, startver))
# Run the change
change.run(self.engine, step)
# Update/refresh database version
self.update_repository_table(startver, endver)
self.load()
def update_repository_table(self, startver, endver):
"""Update version_table with new information"""
update = self.table.update(and_(self.table.c.version == int(startver),
self.table.c.repository_id == str(self.repository.id)))
self.engine.execute(update, version=int(endver))
def upgrade(self, version=None):
"""
Upgrade (or downgrade) to a specified version, or latest version.
"""
changeset = self.changeset(version)
for ver, change in changeset:
self.runchange(ver, change, changeset.step)
def update_db_from_model(self, model):
"""
Modify the database to match the structure of the current Python model.
"""
model = load_model(model)
diff = schemadiff.getDiffOfModelAgainstDatabase(
model, self.engine, excludeTables=[self.repository.version_table]
)
genmodel.ModelGenerator(diff,self.engine).runB2A()
self.update_repository_table(self.version, int(self.repository.latest))
self.load()
@classmethod
def create(cls, engine, repository, version=None):
"""
Declare a database to be under a repository's version control.
:raises: :exc:`DatabaseAlreadyControlledError`
:returns: :class:`ControlledSchema`
"""
# Confirm that the version # is valid: positive, integer,
# exists in repos
if isinstance(repository, basestring):
repository = Repository(repository)
version = cls._validate_version(repository, version)
table = cls._create_table_version(engine, repository, version)
# TODO: history table
# Load repository information and return
return cls(engine, repository)
@classmethod
def _validate_version(cls, repository, version):
"""
Ensures this is a valid version number for this repository.
:raises: :exc:`InvalidVersionError` if invalid
:return: valid version number
"""
if version is None:
version = 0
try:
version = VerNum(version) # raises valueerror
if version < 0 or version > repository.latest:
raise ValueError()
except ValueError:
raise exceptions.InvalidVersionError(version)
return version
@classmethod
def _create_table_version(cls, engine, repository, version):
"""
Creates the versioning table in a database.
:raises: :exc:`DatabaseAlreadyControlledError`
"""
# Create tables
tname = repository.version_table
meta = MetaData(engine)
table = Table(
tname, meta,
Column('repository_id', String(250), primary_key=True),
Column('repository_path', Text),
Column('version', Integer), )
# there can be multiple repositories/schemas in the same db
if not table.exists():
table.create()
# test for existing repository_id
s = table.select(table.c.repository_id == bindparam("repository_id"))
result = engine.execute(s, repository_id=repository.id)
if result.fetchone():
raise exceptions.DatabaseAlreadyControlledError
# Insert data
engine.execute(table.insert().values(
repository_id=repository.id,
repository_path=repository.path,
version=int(version)))
return table
@classmethod
def compare_model_to_db(cls, engine, model, repository):
"""
Compare the current model against the current database.
"""
if isinstance(repository, basestring):
repository = Repository(repository)
model = load_model(model)
diff = schemadiff.getDiffOfModelAgainstDatabase(
model, engine, excludeTables=[repository.version_table])
return diff
@classmethod
def create_model(cls, engine, repository, declarative=False):
"""
Dump the current database as a Python model.
"""
if isinstance(repository, basestring):
repository = Repository(repository)
diff = schemadiff.getDiffOfModelAgainstDatabase(
MetaData(), engine, excludeTables=[repository.version_table]
)
return genmodel.ModelGenerator(diff, engine, declarative).genBDefinition()
| gpl-3.0 |
btoconnor/ddby | ddby/serializer.py | 1 | 1436 | # -*- coding: utf-8 -*-
from .money import Money
class Serializer:
"""Serialize a Money object for sending over the wire.
It is not recommended to use the utility of this class
to store in the database. This would prevent you from
doing anything with aggregate functions in SQL, such
as SUM, MAX, MIN, etc. Chances are, it's not worth the
trouble. This is really so that you can exchange
monetary values between stacks that communicate over
network protocols.
"""
VERSION = 1
def serialize(self, money):
"Convert a money object into a serialized value"
return "{0}:{1}:{2}:{3}".format(
self.VERSION,
money.precise_amount,
money.currency.code,
money.precision
)
def unserialize(self, money_string):
"Given a string, return a money object that it represents."
results = money_string.split(':')
if len(results) != 4:
raise ValueError("String {0} is of incorrect format to parse".format(money_string))
try:
amount = int(results[1])
except ValueError:
raise ValueError("Amount {0} is not an integer".format(results[1]))
# TODO: Validate version
version, amount, currency_code, precision = results
return Money(
int(amount),
currency_code,
precision=int(precision)
)
| mit |
lordmuffin/aws-cfn-plex | functions/credstash/cryptography/utils.py | 13 | 3919 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import binascii
import inspect
import sys
import warnings
# the functions deprecated in 1.0 and 1.4 are on an arbitrarily extended
# deprecation cycle and should not be removed until we agree on when that cycle
# ends.
DeprecatedIn10 = DeprecationWarning
DeprecatedIn14 = DeprecationWarning
DeprecatedIn16 = DeprecationWarning
def read_only_property(name):
return property(lambda self: getattr(self, name))
def register_interface(iface):
def register_decorator(klass):
verify_interface(iface, klass)
iface.register(klass)
return klass
return register_decorator
def register_interface_if(predicate, iface):
def register_decorator(klass):
if predicate:
verify_interface(iface, klass)
iface.register(klass)
return klass
return register_decorator
if hasattr(int, "from_bytes"):
int_from_bytes = int.from_bytes
else:
def int_from_bytes(data, byteorder, signed=False):
assert byteorder == 'big'
assert not signed
# call bytes() on data to allow the use of bytearrays
return int(bytes(data).encode('hex'), 16)
def int_to_bytes(integer, length=None):
hex_string = '%x' % integer
if length is None:
n = len(hex_string)
else:
n = length * 2
return binascii.unhexlify(hex_string.zfill(n + (n & 1)))
class InterfaceNotImplemented(Exception):
pass
if hasattr(inspect, "signature"):
signature = inspect.signature
else:
signature = inspect.getargspec
def verify_interface(iface, klass):
for method in iface.__abstractmethods__:
if not hasattr(klass, method):
raise InterfaceNotImplemented(
"{0} is missing a {1!r} method".format(klass, method)
)
if isinstance(getattr(iface, method), abc.abstractproperty):
# Can't properly verify these yet.
continue
sig = signature(getattr(iface, method))
actual = signature(getattr(klass, method))
if sig != actual:
raise InterfaceNotImplemented(
"{0}.{1}'s signature differs from the expected. Expected: "
"{2!r}. Received: {3!r}".format(
klass, method, sig, actual
)
)
if sys.version_info >= (2, 7):
def bit_length(x):
return x.bit_length()
else:
def bit_length(x):
return len(bin(x)) - (2 + (x <= 0))
class _DeprecatedValue(object):
def __init__(self, value, message, warning_class):
self.value = value
self.message = message
self.warning_class = warning_class
class _ModuleWithDeprecations(object):
def __init__(self, module):
self.__dict__["_module"] = module
def __getattr__(self, attr):
obj = getattr(self._module, attr)
if isinstance(obj, _DeprecatedValue):
warnings.warn(obj.message, obj.warning_class, stacklevel=2)
obj = obj.value
return obj
def __setattr__(self, attr, value):
setattr(self._module, attr, value)
def __delattr__(self, attr):
obj = getattr(self._module, attr)
if isinstance(obj, _DeprecatedValue):
warnings.warn(obj.message, obj.warning_class, stacklevel=2)
delattr(self._module, attr)
def __dir__(self):
return ["_module"] + dir(self._module)
def deprecated(value, module_name, message, warning_class):
module = sys.modules[module_name]
if not isinstance(module, _ModuleWithDeprecations):
sys.modules[module_name] = module = _ModuleWithDeprecations(module)
return _DeprecatedValue(value, message, warning_class)
| mit |
Sup3Roque/Pancas | plugin.video.loganaddon/resources/lib/libraries/playcount_cp.py | 9 | 4289 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys,xbmc
from resources.lib.libraries import control
from resources.lib.libraries import trakt
def movies(title, year, imdb, watched):
traktMode = False if trakt.getTraktCredentials() == False else True
watched = int(watched)
try:
if traktMode == True: raise Exception()
from metahandler import metahandlers
metaget = metahandlers.MetaData(preparezip=False)
metaget.get_meta('movie', title ,year=year)
metaget.change_watched('movie', '', imdb, season='', episode='', year='', watched=watched)
except:
pass
try:
if traktMode == False: raise Exception()
if watched == 7: trakt.markMovieAsWatched(imdb)
else: trakt.markMovieAsNotWatched(imdb)
trakt.syncMovies()
except:
pass
control.refresh()
def episodes(imdb, tvdb, season, episode, watched):
traktMode = False if trakt.getTraktCredentials() == False else True
watched = int(watched)
control.log('>>> Watched %s' % watched)
try:
if traktMode == True: raise Exception()
from metahandler import metahandlers
metaget = metahandlers.MetaData(preparezip=False)
metaget.get_meta('tvshow', '', imdb_id=imdb)
metaget.get_episode_meta('', imdb, season, episode)
metaget.change_watched('episode', '', imdb, season=season, episode=episode, year='', watched=watched)
except:
pass
try:
if traktMode == False: raise Exception()
if watched == 7: trakt.markEpisodeAsWatched(tvdb, season, episode)
else: trakt.markEpisodeAsNotWatched(tvdb, season, episode)
trakt.syncTVShows()
except:
pass
control.refresh()
def tvshows(tvshowtitle, year, imdb, tvdb, season, watched):
traktMode = False if trakt.getTraktCredentials() == False else True
watched = int(watched)
try:
if traktMode == True: raise Exception()
from metahandler import metahandlers
from resources.lib.indexers import episodes
metaget = metahandlers.MetaData(preparezip=False)
dialog = control.progressDialog
dialog.create(control.addonInfo('name'), str(tvshowtitle))
dialog.update(0, str(tvshowtitle), control.lang(30451).encode('utf-8') + '...')
metaget.get_meta('tvshow', '', imdb_id=imdb)
items = episodes.episodes().get(tvshowtitle, year, imdb, '0', tvdb, '0', idx=False)
try: items = [i for i in items if int('%01d' % int(season)) == int('%01d' % int(i['season']))]
except: pass
items = [{'name': i['name'], 'season': int('%01d' % int(i['season'])), 'episode': int('%01d' % int(i['episode']))} for i in items]
for i in range(len(items)):
if xbmc.abortRequested == True: return sys.exit()
if dialog.iscanceled(): return dialog.close()
dialog.update(int((100 / float(len(items))) * i), str(tvshowtitle), str(items[i]['name']))
season, episode = items[i]['season'], items[i]['episode']
metaget.get_episode_meta('', imdb, season, episode)
metaget.change_watched('episode', '', imdb, season=season, episode=episode, year='', watched=watched)
try: dialog.close()
except: pass
except:
try: dialog.close()
except: pass
try:
if traktMode == False: raise Exception()
if watched == 7: trakt.markTVShowAsWatched(tvdb)
else: trakt.markTVShowAsNotWatched(tvdb)
trakt.syncTVShows()
except:
pass
control.refresh()
| gpl-2.0 |
kurdd/Oauth | social_auth/backends/contrib/yandex.py | 3 | 4660 | """
Yandex OpenID and OAuth2 support.
This contribution adds support for Yandex.ru OpenID service in the form
openid.yandex.ru/user. Username is retrieved from the identity url.
If username is not specified, OpenID 2.0 url used for authentication.
"""
from django.utils import simplejson
from urllib import urlencode
from urlparse import urlparse, urlsplit
from social_auth.backends import OpenIDBackend, OpenIdAuth, USERNAME, \
OAuthBackend, BaseOAuth2
from social_auth.utils import setting, log, dsa_urlopen
# Yandex configuration
YANDEX_AUTHORIZATION_URL = 'https://oauth.yandex.ru/authorize'
YANDEX_ACCESS_TOKEN_URL = 'https://oauth.yandex.ru/token'
YANDEX_SERVER = 'oauth.yandex.ru'
YANDEX_OPENID_URL = 'http://openid.yandex.ru'
def get_username_from_url(links):
try:
host = urlparse(links.get('www')).hostname
return host.split('.')[0]
except (IndexError, AttributeError):
return None
class YandexBackend(OpenIDBackend):
"""Yandex OpenID authentication backend"""
name = 'yandex'
def get_user_id(self, details, response):
return details['email'] or response.identity_url
def get_user_details(self, response):
"""Generate username from identity url"""
values = super(YandexBackend, self).get_user_details(response)
values[USERNAME] = values.get(USERNAME) or\
urlsplit(response.identity_url)\
.path.strip('/')
values['email'] = values.get('email', '')
return values
class YandexAuth(OpenIdAuth):
"""Yandex OpenID authentication"""
AUTH_BACKEND = YandexBackend
def openid_url(self):
"""Returns Yandex authentication URL"""
return YANDEX_OPENID_URL
class YaruBackend(OAuthBackend):
"""Yandex OAuth authentication backend"""
name = 'yaru'
EXTRA_DATA = [
('id', 'id'),
('expires', setting('SOCIAL_AUTH_EXPIRATION', 'expires'))
]
def get_user_details(self, response):
"""Return user details from Yandex account"""
name = response['name']
last_name = ''
if ' ' in name:
names = name.split(' ')
last_name = names[0]
first_name = names[1]
else:
first_name = name
return {
USERNAME: get_username_from_url(response.get('links')),
'email': response.get('email', ''),
'first_name': first_name,
'last_name': last_name,
}
class YaruAuth(BaseOAuth2):
"""Yandex Ya.ru OAuth mechanism"""
AUTHORIZATION_URL = YANDEX_AUTHORIZATION_URL
ACCESS_TOKEN_URL = YANDEX_ACCESS_TOKEN_URL
AUTH_BACKEND = YaruBackend
SERVER_URL = YANDEX_SERVER
SETTINGS_KEY_NAME = 'YANDEX_APP_ID'
SETTINGS_SECRET_NAME = 'YANDEX_API_SECRET'
def get_api_url(self):
return 'https://api-yaru.yandex.ru/me/'
def user_data(self, access_token, response, *args, **kwargs):
"""Loads user data from service"""
params = {'oauth_token': access_token,
'format': 'json',
'text': 1,
}
url = self.get_api_url() + '?' + urlencode(params)
try:
return simplejson.load(dsa_urlopen(url))
except (ValueError, IndexError):
log('error', 'Could not load data from Yandex.',
exc_info=True, extra=dict(data=params))
return None
class YandexOAuth2Backend(YaruBackend):
"""Legacy Yandex OAuth2 authentication backend"""
name = 'yandex-oauth2'
class YandexOAuth2(YaruAuth):
"""Yandex Ya.ru/Moi Krug OAuth mechanism"""
AUTH_BACKEND = YandexOAuth2Backend
def get_api_url(self):
return setting('YANDEX_OAUTH2_API_URL')
def user_data(self, access_token, response, *args, **kwargs):
reply = super(YandexOAuth2, self).user_data(access_token,
response, args, kwargs)
if reply:
if isinstance(reply, list) and len(reply) >= 1:
reply = reply[0]
if 'links' in reply:
userpic = reply['links'].get('avatar')
elif 'avatar' in reply:
userpic = reply['avatar'].get('Portrait')
else:
userpic = ''
reply.update({
'id': reply['id'].split("/")[-1],
'access_token': access_token,
'userpic': userpic
})
return reply
# Backend definition
BACKENDS = {
'yandex': YandexAuth,
'yaru': YaruAuth,
'yandex-oauth2': YandexOAuth2
}
| apache-2.0 |
adamewing/tebreak | scripts/misc/genotype_ref.py | 1 | 4350 | #!/usr/bin/env python
import argparse
import csv
import os
import pysam
def break_count(bam, chrom, poslist, minpad=5, flex=1, minmapq=10):
''' ref = number of reads spanning TSD, alt = number of reads clipped at breakpoint in poslist '''
altcount = 0
refcount = 0
discards = 0
poslist = list(poslist)
tsd_start = min(poslist)
tsd_end = max(poslist)
tsd_len = tsd_end - tsd_start
if tsd_start < minpad: tsd_start = minpad
for read in bam.fetch(chrom, tsd_start-minpad, tsd_end+minpad):
if read.is_unmapped or read.is_duplicate:
continue
if read.mapq < minmapq:
continue
rclip = len(read.seq) - read.query_alignment_end
lclip = read.query_alignment_start
rbreak = 0
lbreak = 0
if rclip > max(tsd_len, minpad):
rbreak = read.reference_end
if lclip > max(tsd_len, minpad):
lbreak = read.reference_start
support_alt = False
for pos in poslist: # does this read support a breakpoint in the list?
if (rbreak >= pos-flex and rbreak <= pos+flex) or (lbreak >= pos-flex and lbreak <= pos+flex):
support_alt = True
if support_alt:
altcount += 1
else:
#for pos in poslist: # does this read span a breakpoint in the list?
if read.alen == len(read.seq):
if read.reference_start < tsd_start and read.reference_end > tsd_end: # span TSD
refcount += 1
return altcount, refcount
def getVAF(bam, chrom, poslist):
''' return number of reads supporting alt (insertion), ref (reference) and vaf (variant allele fraction) '''
poslist = map(int, poslist)
alt, ref = break_count(bam, chrom, poslist)
vaf = 0.0
if float(ref+alt) > 0:
vaf = float(alt)/float(alt+ref)
return alt, ref, vaf
def main(args):
print('##fileformat=VCFv4.1')
vcf_cols = ['#CHROM', 'POS', 'ID', 'REF', 'ALT', 'QUAL', 'FILTER', 'INFO', 'FORMAT']
bams = []
with open(args.bamlist) as _:
for bam in _:
fn, name = bam.strip().split()
assert os.path.exists(fn.strip())
bams.append(fn)
vcf_cols.append(name)
print('\t'.join(vcf_cols))
with open(args.table) as table:
csv_reader = csv.DictReader(table, delimiter='\t')
for rec in csv_reader:
tsd_5p = list(map(int, [rec['TSD_Start_5p'], rec['TSD_End_5p']]))
tsd_3p = list(map(int, [rec['TSD_Start_3p'], rec['TSD_End_3p']]))
info = 'ELT=%s;ORIENT=%s' % (rec['Name'], rec['Orientation'])
vcf_line = [rec['Chromosome'], str(rec['TSD_Start_5p']), '.', 'REF', 'ALT', '100', 'PASS', info, 'GT:DS']
for bam_fn in bams:
bam = pysam.AlignmentFile(bam_fn)
vaf_5p = getVAF(bam, rec['Chromosome'], tsd_5p)
vaf_3p = getVAF(bam, rec['Chromosome'], tsd_3p)
alt_total = vaf_5p[0] + vaf_3p[0]
ref_total = vaf_5p[1] + vaf_3p[1]
vat_total = 0.0
if alt_total + ref_total > 0:
vaf_total = float(alt_total)/float(alt_total+ref_total)
dose = 0.0
gt = './.'
if alt_total + ref_total >= int(args.mindepth):
dose = 2-(vaf_total*2) # ref-specific
gt = '1/1' # default to homz. reference for insertions in ref assembly
if dose > float(args.hetlow)*2 and dose < float(args.hethi)*2:
gt = '0/1'
if dose < float(args.hetlow)*2: # ref-specific
gt = '0/0'
fmt = '%s:%.3f' % (gt, dose)
vcf_line.append(fmt)
bam.close()
print('\t'.join(vcf_line))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='genotype reference insertions')
parser.add_argument('-b', '--bamlist', required=True)
parser.add_argument('-t', '--table', required=True)
parser.add_argument('--mindepth', default=10)
parser.add_argument('--hetlow', default=0.15)
parser.add_argument('--hethi', default=0.85)
args = parser.parse_args()
main(args)
| mit |
ericlink/adms-server | playframework-dist/1.1-src/python/Lib/idlelib/ParenMatch.py | 2 | 6783 | """ParenMatch -- An IDLE extension for parenthesis matching.
When you hit a right paren, the cursor should move briefly to the left
paren. Paren here is used generically; the matching applies to
parentheses, square brackets, and curly braces.
"""
from HyperParser import HyperParser
from configHandler import idleConf
_openers = {')':'(',']':'[','}':'{'}
CHECK_DELAY = 100 # miliseconds
class ParenMatch:
"""Highlight matching parentheses
There are three supported style of paren matching, based loosely
on the Emacs options. The style is select based on the
HILITE_STYLE attribute; it can be changed used the set_style
method.
The supported styles are:
default -- When a right paren is typed, highlight the matching
left paren for 1/2 sec.
expression -- When a right paren is typed, highlight the entire
expression from the left paren to the right paren.
TODO:
- extend IDLE with configuration dialog to change options
- implement rest of Emacs highlight styles (see below)
- print mismatch warning in IDLE status window
Note: In Emacs, there are several styles of highlight where the
matching paren is highlighted whenever the cursor is immediately
to the right of a right paren. I don't know how to do that in Tk,
so I haven't bothered.
"""
menudefs = [
('edit', [
("Show surrounding parens", "<<flash-paren>>"),
])
]
STYLE = idleConf.GetOption('extensions','ParenMatch','style',
default='expression')
FLASH_DELAY = idleConf.GetOption('extensions','ParenMatch','flash-delay',
type='int',default=500)
HILITE_CONFIG = idleConf.GetHighlight(idleConf.CurrentTheme(),'hilite')
BELL = idleConf.GetOption('extensions','ParenMatch','bell',
type='bool',default=1)
RESTORE_VIRTUAL_EVENT_NAME = "<<parenmatch-check-restore>>"
# We want the restore event be called before the usual return and
# backspace events.
RESTORE_SEQUENCES = ("<KeyPress>", "<ButtonPress>",
"<Key-Return>", "<Key-BackSpace>")
def __init__(self, editwin):
self.editwin = editwin
self.text = editwin.text
# Bind the check-restore event to the function restore_event,
# so that we can then use activate_restore (which calls event_add)
# and deactivate_restore (which calls event_delete).
editwin.text.bind(self.RESTORE_VIRTUAL_EVENT_NAME,
self.restore_event)
self.counter = 0
self.is_restore_active = 0
self.set_style(self.STYLE)
def activate_restore(self):
if not self.is_restore_active:
for seq in self.RESTORE_SEQUENCES:
self.text.event_add(self.RESTORE_VIRTUAL_EVENT_NAME, seq)
self.is_restore_active = True
def deactivate_restore(self):
if self.is_restore_active:
for seq in self.RESTORE_SEQUENCES:
self.text.event_delete(self.RESTORE_VIRTUAL_EVENT_NAME, seq)
self.is_restore_active = False
def set_style(self, style):
self.STYLE = style
if style == "default":
self.create_tag = self.create_tag_default
self.set_timeout = self.set_timeout_last
elif style == "expression":
self.create_tag = self.create_tag_expression
self.set_timeout = self.set_timeout_none
def flash_paren_event(self, event):
indices = HyperParser(self.editwin, "insert").get_surrounding_brackets()
if indices is None:
self.warn_mismatched()
return
self.activate_restore()
self.create_tag(indices)
self.set_timeout_last()
def paren_closed_event(self, event):
# If it was a shortcut and not really a closing paren, quit.
closer = self.text.get("insert-1c")
if closer not in _openers:
return
hp = HyperParser(self.editwin, "insert-1c")
if not hp.is_in_code():
return
indices = hp.get_surrounding_brackets(_openers[closer], True)
if indices is None:
self.warn_mismatched()
return
self.activate_restore()
self.create_tag(indices)
self.set_timeout()
def restore_event(self, event=None):
self.text.tag_delete("paren")
self.deactivate_restore()
self.counter += 1 # disable the last timer, if there is one.
def handle_restore_timer(self, timer_count):
if timer_count == self.counter:
self.restore_event()
def warn_mismatched(self):
if self.BELL:
self.text.bell()
# any one of the create_tag_XXX methods can be used depending on
# the style
def create_tag_default(self, indices):
"""Highlight the single paren that matches"""
self.text.tag_add("paren", indices[0])
self.text.tag_config("paren", self.HILITE_CONFIG)
def create_tag_expression(self, indices):
"""Highlight the entire expression"""
if self.text.get(indices[1]) in (')', ']', '}'):
rightindex = indices[1]+"+1c"
else:
rightindex = indices[1]
self.text.tag_add("paren", indices[0], rightindex)
self.text.tag_config("paren", self.HILITE_CONFIG)
# any one of the set_timeout_XXX methods can be used depending on
# the style
def set_timeout_none(self):
"""Highlight will remain until user input turns it off
or the insert has moved"""
# After CHECK_DELAY, call a function which disables the "paren" tag
# if the event is for the most recent timer and the insert has changed,
# or schedules another call for itself.
self.counter += 1
def callme(callme, self=self, c=self.counter,
index=self.text.index("insert")):
if index != self.text.index("insert"):
self.handle_restore_timer(c)
else:
self.editwin.text_frame.after(CHECK_DELAY, callme, callme)
self.editwin.text_frame.after(CHECK_DELAY, callme, callme)
def set_timeout_last(self):
"""The last highlight created will be removed after .5 sec"""
# associate a counter with an event; only disable the "paren"
# tag if the event is for the most recent timer.
self.counter += 1
self.editwin.text_frame.after(self.FLASH_DELAY,
lambda self=self, c=self.counter: \
self.handle_restore_timer(c))
| mit |
surligas/cs436-gnuradio | gr-digital/examples/example_costas.py | 49 | 5316 | #!/usr/bin/env python
#
# Copyright 2011-2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, digital, filter
from gnuradio import blocks
from gnuradio import channels
from gnuradio import eng_notation
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import sys
try:
import scipy
except ImportError:
print "Error: could not import scipy (http://www.scipy.org/)"
sys.exit(1)
try:
import pylab
except ImportError:
print "Error: could not import pylab (http://matplotlib.sourceforge.net/)"
sys.exit(1)
class example_costas(gr.top_block):
def __init__(self, N, sps, rolloff, ntaps, bw, noise, foffset, toffset, poffset):
gr.top_block.__init__(self)
rrc_taps = filter.firdes.root_raised_cosine(
sps, sps, 1.0, rolloff, ntaps)
data = 2.0*scipy.random.randint(0, 2, N) - 1.0
data = scipy.exp(1j*poffset) * data
self.src = blocks.vector_source_c(data.tolist(), False)
self.rrc = filter.interp_fir_filter_ccf(sps, rrc_taps)
self.chn = channels.channel_model(noise, foffset, toffset)
self.cst = digital.costas_loop_cc(bw, 2)
self.vsnk_src = blocks.vector_sink_c()
self.vsnk_cst = blocks.vector_sink_c()
self.vsnk_frq = blocks.vector_sink_f()
self.connect(self.src, self.rrc, self.chn, self.cst, self.vsnk_cst)
self.connect(self.rrc, self.vsnk_src)
self.connect((self.cst,1), self.vsnk_frq)
def main():
parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
parser.add_option("-N", "--nsamples", type="int", default=2000,
help="Set the number of samples to process [default=%default]")
parser.add_option("-S", "--sps", type="int", default=4,
help="Set the samples per symbol [default=%default]")
parser.add_option("-r", "--rolloff", type="eng_float", default=0.35,
help="Set the rolloff factor [default=%default]")
parser.add_option("-W", "--bandwidth", type="eng_float", default=2*scipy.pi/100.0,
help="Set the loop bandwidth [default=%default]")
parser.add_option("-n", "--ntaps", type="int", default=45,
help="Set the number of taps in the filters [default=%default]")
parser.add_option("", "--noise", type="eng_float", default=0.0,
help="Set the simulation noise voltage [default=%default]")
parser.add_option("-f", "--foffset", type="eng_float", default=0.0,
help="Set the simulation's normalized frequency offset (in Hz) [default=%default]")
parser.add_option("-t", "--toffset", type="eng_float", default=1.0,
help="Set the simulation's timing offset [default=%default]")
parser.add_option("-p", "--poffset", type="eng_float", default=0.707,
help="Set the simulation's phase offset [default=%default]")
(options, args) = parser.parse_args ()
# Adjust N for the interpolation by sps
options.nsamples = options.nsamples // options.sps
# Set up the program-under-test
put = example_costas(options.nsamples, options.sps, options.rolloff,
options.ntaps, options.bandwidth, options.noise,
options.foffset, options.toffset, options.poffset)
put.run()
data_src = scipy.array(put.vsnk_src.data())
# Convert the FLL's LO frequency from rads/sec to Hz
data_frq = scipy.array(put.vsnk_frq.data()) / (2.0*scipy.pi)
# adjust this to align with the data.
data_cst = scipy.array(3*[0,]+list(put.vsnk_cst.data()))
# Plot the Costas loop's LO frequency
f1 = pylab.figure(1, figsize=(12,10), facecolor='w')
s1 = f1.add_subplot(2,2,1)
s1.plot(data_frq)
s1.set_title("Costas LO")
s1.set_xlabel("Samples")
s1.set_ylabel("Frequency (normalized Hz)")
# Plot the IQ symbols
s3 = f1.add_subplot(2,2,2)
s3.plot(data_src.real, data_src.imag, "o")
s3.plot(data_cst.real, data_cst.imag, "rx")
s3.set_title("IQ")
s3.set_xlabel("Real part")
s3.set_ylabel("Imag part")
s3.set_xlim([-2, 2])
s3.set_ylim([-2, 2])
# Plot the symbols in time
s4 = f1.add_subplot(2,2,3)
s4.set_position([0.125, 0.05, 0.775, 0.4])
s4.plot(data_src.real, "o-")
s4.plot(data_cst.real, "rx-")
s4.set_title("Symbols")
s4.set_xlabel("Samples")
s4.set_ylabel("Real Part of Signals")
pylab.show()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
pass
| gpl-3.0 |
xpac1985/ansible | test/units/module_utils/basic/test_heuristic_log_sanitize.py | 48 | 3908 | # -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import sys
import syslog
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.module_utils.basic import heuristic_log_sanitize
class TestHeuristicLogSanitize(unittest.TestCase):
def setUp(self):
self.URL_SECRET = 'http://username:pas:word@foo.com/data'
self.SSH_SECRET = 'username:pas:word@foo.com/data'
self.clean_data = repr(self._gen_data(3, True, True, 'no_secret_here'))
self.url_data = repr(self._gen_data(3, True, True, self.URL_SECRET))
self.ssh_data = repr(self._gen_data(3, True, True, self.SSH_SECRET))
def _gen_data(self, records, per_rec, top_level, secret_text):
hostvars = {'hostvars': {}}
for i in range(1, records, 1):
host_facts = {'host%s' % i:
{'pstack':
{'running': '875.1',
'symlinked': '880.0',
'tars': [],
'versions': ['885.0']},
}}
if per_rec:
host_facts['host%s' % i]['secret'] = secret_text
hostvars['hostvars'].update(host_facts)
if top_level:
hostvars['secret'] = secret_text
return hostvars
def test_did_not_hide_too_much(self):
self.assertEquals(heuristic_log_sanitize(self.clean_data), self.clean_data)
def test_hides_url_secrets(self):
url_output = heuristic_log_sanitize(self.url_data)
# Basic functionality: Successfully hid the password
self.assertNotIn('pas:word', url_output)
# Slightly more advanced, we hid all of the password despite the ":"
self.assertNotIn('pas', url_output)
# In this implementation we replace the password with 8 "*" which is
# also the length of our password. The url fields should be able to
# accurately detect where the password ends so the length should be
# the same:
self.assertEqual(len(url_output), len(self.url_data))
def test_hides_ssh_secrets(self):
ssh_output = heuristic_log_sanitize(self.ssh_data)
self.assertNotIn('pas:word', ssh_output)
# Slightly more advanced, we hid all of the password despite the ":"
self.assertNotIn('pas', ssh_output)
# ssh checking is harder as the heuristic is overzealous in many
# cases. Since the input will have at least one ":" present before
# the password we can tell some things about the beginning and end of
# the data, though:
self.assertTrue(ssh_output.startswith("{'"))
self.assertTrue(ssh_output.endswith("}"))
self.assertIn(":********@foo.com/data'", ssh_output)
@unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)")
def test_hides_parameter_secrets(self):
output = heuristic_log_sanitize('token="secret", user="person", token_entry="test=secret"', frozenset(['secret']))
self.assertNotIn('secret', output)
| gpl-3.0 |
jMyles/AutobahnPython | autobahn/autobahn/compress_base.py | 46 | 1628 | ###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
__all__ = ["PerMessageCompressOffer",
"PerMessageCompressOfferAccept",
"PerMessageCompressResponse",
"PerMessageCompressResponseAccept",
"PerMessageCompress"]
class PerMessageCompressOffer:
"""
Base class for WebSocket compression parameter client offers.
"""
pass
class PerMessageCompressOfferAccept:
"""
Base class for WebSocket compression parameter client offer accepts by the server.
"""
pass
class PerMessageCompressResponse:
"""
Base class for WebSocket compression parameter server responses.
"""
pass
class PerMessageCompressResponseAccept:
"""
Base class for WebSocket compression parameter server response accepts by client.
"""
pass
class PerMessageCompress:
"""
Base class for WebSocket compression negotiated parameters.
"""
pass
| apache-2.0 |
dchabot/ophyd | ophyd/scaler.py | 2 | 1571 |
import logging
from collections import OrderedDict
from .signal import (EpicsSignal, EpicsSignalRO)
from .device import Device
from .device import (Component as C, DynamicDeviceComponent as DDC)
logger = logging.getLogger(__name__)
def _scaler_fields(attr_base, field_base, range_, **kwargs):
defn = OrderedDict()
for i in range_:
attr = '{attr}{i}'.format(attr=attr_base, i=i)
suffix = '{field}{i}'.format(field=field_base, i=i)
defn[attr] = (EpicsSignalRO, suffix, kwargs)
return defn
class EpicsScaler(Device):
'''SynApps Scaler Record interface'''
count = C(EpicsSignal, '.CNT', trigger_value=1)
count_mode = C(EpicsSignal, '.CONT', string=True)
time = C(EpicsSignal, '.T')
preset_time = C(EpicsSignal, '.TP')
auto_count_time = C(EpicsSignal, '.TP1')
channels = DDC(_scaler_fields('chan', '.S', range(1, 33)))
presets = DDC(_scaler_fields('preset', '.PR', range(1, 33)))
gates = DDC(_scaler_fields('gate', '.G', range(1, 33)))
def __init__(self, prefix, *, read_attrs=None, configuration_attrs=None,
name=None, parent=None, **kwargs):
if read_attrs is None:
read_attrs = ['channels', 'time']
if configuration_attrs is None:
configuration_attrs = ['preset_time', 'presets', 'gates']
super().__init__(prefix, read_attrs=read_attrs,
configuration_attrs=configuration_attrs,
name=name, parent=parent, **kwargs)
self.stage_sigs.update([(self.count_mode, 0)])
| bsd-3-clause |
rcaelers/workrave | testconf.py | 9 | 1287 | #!/usr/bin/python
import os;
import sys;
options = [ "gconf",
"xml",
"gnome3",
"indicator",
"distribution",
"gstreamer",
"dbus",
"exercises",
"pulse",
"debug",
"x11-monitoring-fallback",
"tracing"]
sys.stdout.write("Q=@\n\n")
sys.stdout.write("all:\n\n")
for i in range(0, 1024) :
d = ""
conf = "";
for j in range(0, 10) :
if i & (1 << j) :
conf = conf + "--disable-" + options[j] + " "
d = d + "1"
else :
conf = conf + "--enable-" + options[j] + " "
d = d + "0"
dir = "testconf/" + d
sys.stdout.write("all: " + d + "\n");
sys.stdout.write(".PHONY: " + d + "\n");
sys.stdout.write(d + ":\n");
sys.stdout.write("\t-$(Q)rm -rf " + dir + "*\n")
sys.stdout.write("\t$(Q)mkdir -p " + dir + "\n")
sys.stdout.write("\t$(Q)(cd " + dir + " && ../../configure " + conf + ") > " + dir + "-conf.log 2>&1\n");
sys.stdout.write("\t$(Q)date +\"%y-%m-%d %H:%M:%S " + conf + "\"\n")
sys.stdout.write("\t$(Q)$(MAKE) -C " + dir + " > " + dir + "-make.log 2>&1\n")
sys.stdout.write("\t-$(Q)rm -rf " + dir + "*\n")
sys.stdout.write("\n")
| gpl-3.0 |
moyogo/robofab | Lib/robofab/pens/filterPen.py | 9 | 12103 | """A couple of point pens to filter contours in various ways."""
from fontTools.pens.basePen import AbstractPen, BasePen
from robofab.pens.pointPen import AbstractPointPen
from robofab.objects.objectsRF import RGlyph as _RGlyph
from robofab.objects.objectsBase import _interpolatePt
import math
#
# threshold filtering
#
def distance(pt1, pt2):
return math.hypot(pt1[0]-pt2[0], pt1[1]-pt2[1])
class ThresholdPointPen(AbstractPointPen):
"""
Rewrite of the ThresholdPen as a PointPen
so that we can preserve named points and other arguments.
This pen will add components from the original glyph, but
but it won't filter those components.
"move", "line", "curve" or "qcurve"
"""
def __init__(self, otherPointPen, threshold=10):
self.threshold = threshold
self._lastPt = None
self._offCurveBuffer = []
self.otherPointPen = otherPointPen
def beginPath(self):
"""Start a new sub path."""
self.otherPointPen.beginPath()
self._lastPt = None
def endPath(self):
"""End the current sub path."""
self.otherPointPen.endPath()
def addPoint(self, pt, segmentType=None, smooth=False, name=None, **kwargs):
"""Add a point to the current sub path."""
if segmentType in ['curve', 'qcurve']:
# it's an offcurve, let's buffer them until we get another oncurve
# and we know what to do with them
self._offCurveBuffer.append((pt, segmentType, smooth, name, kwargs))
return
elif segmentType == "move":
# start of an open contour
self.otherPointPen.addPoint(pt, segmentType, smooth, name) # how to add kwargs?
self._lastPt = pt
self._offCurveBuffer = []
elif segmentType == "line":
if self._lastPt is None:
self.otherPointPen.addPoint(pt, segmentType, smooth, name) # how to add kwargs?
self._lastPt = pt
elif distance(pt, self._lastPt) >= self.threshold:
# we're oncurve and far enough from the last oncurve
if self._offCurveBuffer:
# empty any buffered offcurves
for buf_pt, buf_segmentType, buf_smooth, buf_name, buf_kwargs in self._offCurveBuffer:
self.otherPointPen.addPoint(buf_pt, buf_segmentType, buf_smooth, buf_name) # how to add kwargs?
self._offCurveBuffer = []
# finally add the oncurve.
self.otherPointPen.addPoint(pt, segmentType, smooth, name) # how to add kwargs?
self._lastPt = pt
else:
# we're too short, so we're not going to make it.
# we need to clear out the offcurve buffer.
self._offCurveBuffer = []
def addComponent(self, baseGlyphName, transformation):
"""Add a sub glyph. Note: this way components are not filtered."""
self.otherPointPen.addComponent(baseGlyphName, transformation)
class ThresholdPen(AbstractPen):
"""Removes segments shorter in length than the threshold value."""
def __init__(self, otherPen, threshold=10):
self.threshold = threshold
self._lastPt = None
self.otherPen = otherPen
def moveTo(self, pt):
self._lastPt = pt
self.otherPen.moveTo(pt)
def lineTo(self, pt, smooth=False):
if self.threshold <= distance(pt, self._lastPt):
self.otherPen.lineTo(pt)
self._lastPt = pt
def curveTo(self, pt1, pt2, pt3):
if self.threshold <= distance(pt3, self._lastPt):
self.otherPen.curveTo(pt1, pt2, pt3)
self._lastPt = pt3
def qCurveTo(self, *points):
if self.threshold <= distance(points[-1], self._lastPt):
self.otherPen.qCurveTo(*points)
self._lastPt = points[-1]
def closePath(self):
self.otherPen.closePath()
def endPath(self):
self.otherPen.endPath()
def addComponent(self, glyphName, transformation):
self.otherPen.addComponent(glyphName, transformation)
def thresholdGlyph(aGlyph, threshold=10):
""" Convenience function that handles the filtering. """
from robofab.pens.adapterPens import PointToSegmentPen
new = _RGlyph()
filterpen = ThresholdPen(new.getPen(), threshold)
wrappedPen = PointToSegmentPen(filterpen)
aGlyph.drawPoints(wrappedPen)
aGlyph.clear()
aGlyph.appendGlyph(new)
aGlyph.update()
return aGlyph
def thresholdGlyphPointPen(aGlyph, threshold=10):
""" Same a thresholdGlyph, but using the ThresholdPointPen, which should respect anchors."""
from robofab.pens.adapterPens import PointToSegmentPen
new = _RGlyph()
wrappedPen = new.getPointPen()
filterpen = ThresholdPointPen(wrappedPen, threshold)
aGlyph.drawPoints(filterpen)
aGlyph.clear()
new.drawPoints(aGlyph.getPointPen())
aGlyph.update()
return aGlyph
#
# Curve flattening
#
def _estimateCubicCurveLength(pt0, pt1, pt2, pt3, precision=10):
"""Estimate the length of this curve by iterating
through it and averaging the length of the flat bits.
"""
points = []
length = 0
step = 1.0/precision
factors = range(0, precision+1)
for i in factors:
points.append(_getCubicPoint(i*step, pt0, pt1, pt2, pt3))
for i in range(len(points)-1):
pta = points[i]
ptb = points[i+1]
length += distance(pta, ptb)
return length
def _mid((x0, y0), (x1, y1)):
"""(Point, Point) -> Point\nReturn the point that lies in between the two input points."""
return 0.5 * (x0 + x1), 0.5 * (y0 + y1)
def _getCubicPoint(t, pt0, pt1, pt2, pt3):
if t == 0:
return pt0
if t == 1:
return pt3
if t == 0.5:
a = _mid(pt0, pt1)
b = _mid(pt1, pt2)
c = _mid(pt2, pt3)
d = _mid(a, b)
e = _mid(b, c)
return _mid(d, e)
else:
cx = (pt1[0] - pt0[0]) * 3
cy = (pt1[1] - pt0[1]) * 3
bx = (pt2[0] - pt1[0]) * 3 - cx
by = (pt2[1] - pt1[1]) * 3 - cy
ax = pt3[0] - pt0[0] - cx - bx
ay = pt3[1] - pt0[1] - cy - by
t3 = t ** 3
t2 = t * t
x = ax * t3 + bx * t2 + cx * t + pt0[0]
y = ay * t3 + by * t2 + cy * t + pt0[1]
return x, y
class FlattenPen(BasePen):
"""Process the contours into a series of straight lines by flattening the curves.
"""
def __init__(self, otherPen, approximateSegmentLength=5, segmentLines=False, filterDoubles=True):
self.approximateSegmentLength = approximateSegmentLength
BasePen.__init__(self, {})
self.otherPen = otherPen
self.currentPt = None
self.firstPt = None
self.segmentLines = segmentLines
self.filterDoubles = filterDoubles
def _moveTo(self, pt):
self.otherPen.moveTo(pt)
self.currentPt = pt
self.firstPt = pt
def _lineTo(self, pt):
if self.filterDoubles:
if pt == self.currentPt:
return
if not self.segmentLines:
self.otherPen.lineTo(pt)
self.currentPt = pt
return
d = distance(self.currentPt, pt)
maxSteps = int(round(d / self.approximateSegmentLength))
if maxSteps < 1:
self.otherPen.lineTo(pt)
self.currentPt = pt
return
step = 1.0/maxSteps
factors = range(0, maxSteps+1)
for i in factors[1:]:
self.otherPen.lineTo(_interpolatePt(self.currentPt, pt, i*step))
self.currentPt = pt
def _curveToOne(self, pt1, pt2, pt3):
est = _estimateCubicCurveLength(self.currentPt, pt1, pt2, pt3)/self.approximateSegmentLength
maxSteps = int(round(est))
falseCurve = (pt1==self.currentPt) and (pt2==pt3)
if maxSteps < 1 or falseCurve:
self.otherPen.lineTo(pt3)
self.currentPt = pt3
return
step = 1.0/maxSteps
factors = range(0, maxSteps+1)
for i in factors[1:]:
pt = _getCubicPoint(i*step, self.currentPt, pt1, pt2, pt3)
self.otherPen.lineTo(pt)
self.currentPt = pt3
def _closePath(self):
self.lineTo(self.firstPt)
self.otherPen.closePath()
self.currentPt = None
def _endPath(self):
self.otherPen.endPath()
self.currentPt = None
def addComponent(self, glyphName, transformation):
self.otherPen.addComponent(glyphName, transformation)
def flattenGlyph(aGlyph, threshold=10, segmentLines=True):
"""Replace curves with series of straight l ines."""
from robofab.pens.adapterPens import PointToSegmentPen
if len(aGlyph.contours) == 0:
return
new = _RGlyph()
writerPen = new.getPen()
filterpen = FlattenPen(writerPen, threshold, segmentLines)
wrappedPen = PointToSegmentPen(filterpen)
aGlyph.drawPoints(wrappedPen)
aGlyph.clear()
aGlyph.appendGlyph(new)
aGlyph.update()
return aGlyph
def spikeGlyph(aGlyph, segmentLength=20, spikeLength=40, patternFunc=None):
"""Add narly spikes or dents to the glyph.
patternFunc is an optional function which recalculates the offset."""
from math import atan2, sin, cos, pi
new = _RGlyph()
new.appendGlyph(aGlyph)
new.width = aGlyph.width
if len(new.contours) == 0:
return
flattenGlyph(new, segmentLength, segmentLines=True)
for contour in new:
l = len(contour.points)
lastAngle = None
for i in range(0, len(contour.points), 2):
prev = contour.points[i-1]
cur = contour.points[i]
next = contour.points[(i+1)%l]
angle = atan2(prev.x - next.x, prev.y - next.y)
lastAngle = angle
if patternFunc is not None:
thisSpikeLength = patternFunc(spikeLength)
else:
thisSpikeLength = spikeLength
cur.x -= sin(angle+.5*pi)*thisSpikeLength
cur.y -= cos(angle+.5*pi)*thisSpikeLength
new.update()
aGlyph.clear()
aGlyph.appendGlyph(new)
aGlyph.update()
return aGlyph
def halftoneGlyph(aGlyph, invert=False):
"""Convert the glyph into some sort of halftoning pattern.
Measure a bunch of inside/outside points to simulate grayscale levels.
Slow.
"""
print 'halftoneGlyph is running...'
grid = {}
drawing = {}
dataDistance = 10
scan = 2
preload = 0
cellDistance = dataDistance * 5
overshoot = dataDistance * 2
(xMin, yMin, xMax, yMax) = aGlyph.box
for x in range(xMin-overshoot, xMax+overshoot, dataDistance):
print 'scanning..', x
for y in range(yMin-overshoot, yMax+overshoot, dataDistance):
if aGlyph.pointInside((x, y)):
grid[(x, y)] = True
else:
grid[(x, y)] = False
#print 'gathering data', x, y, grid[(x, y)]
print 'analyzing..'
for x in range(xMin-overshoot, xMax+overshoot, cellDistance):
for y in range(yMin-overshoot, yMax+overshoot, cellDistance):
total = preload
for scanx in range(-scan, scan):
for scany in range(-scan, scan):
if grid.get((x+scanx*dataDistance, y+scany*dataDistance)):
total += 1
if invert:
drawing[(x, y)] = 2*scan**2 - float(total)
else:
drawing[(x, y)] = float(total)
aGlyph.clear()
print drawing
for (x,y) in drawing.keys():
size = drawing[(x,y)] / float(2*scan**2) * 5
pen = aGlyph.getPen()
pen.moveTo((x-size, y-size))
pen.lineTo((x+size, y-size))
pen.lineTo((x+size, y+size))
pen.lineTo((x-size, y+size))
pen.lineTo((x-size, y-size))
pen.closePath()
aGlyph.update()
if __name__ == "__main__":
from robofab.pens.pointPen import PrintingPointPen
pp = PrintingPointPen()
#pp.beginPath()
#pp.addPoint((100, 100))
#pp.endPath()
tpp = ThresholdPointPen(pp, threshold=20)
tpp.beginPath()
#segmentType=None, smooth=False, name=None
tpp.addPoint((100, 100), segmentType="line", smooth=True)
# section that should be too small
tpp.addPoint((100, 102), segmentType="line", smooth=True)
tpp.addPoint((200, 200), segmentType="line", smooth=True)
# curve section with final point that's far enough, but with offcurves that are under the threshold
tpp.addPoint((200, 205), segmentType="curve", smooth=True)
tpp.addPoint((300, 295), segmentType="curve", smooth=True)
tpp.addPoint((300, 300), segmentType="line", smooth=True)
# curve section with final point that is not far enough
tpp.addPoint((550, 350), segmentType="curve", smooth=True)
tpp.addPoint((360, 760), segmentType="curve", smooth=True)
tpp.addPoint((310, 310), segmentType="line", smooth=True)
tpp.addPoint((400, 400), segmentType="line", smooth=True)
tpp.addPoint((100, 100), segmentType="line", smooth=True)
tpp.endPath()
# couple of single points with names
tpp.beginPath()
tpp.addPoint((500, 500), segmentType="move", smooth=True, name="named point")
tpp.addPoint((600, 500), segmentType="move", smooth=True, name="named point")
tpp.addPoint((601, 501), segmentType="move", smooth=True, name="named point")
tpp.endPath()
# open path
tpp.beginPath()
tpp.addPoint((500, 500), segmentType="move", smooth=True)
tpp.addPoint((501, 500), segmentType="line", smooth=True)
tpp.addPoint((101, 500), segmentType="line", smooth=True)
tpp.addPoint((101, 100), segmentType="line", smooth=True)
tpp.addPoint((498, 498), segmentType="line", smooth=True)
tpp.endPath()
| bsd-3-clause |
kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/pip/_vendor/colorama/ansi.py | 640 | 2524 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
'''
This module generates ANSI character codes to printing colors to terminals.
See: http://en.wikipedia.org/wiki/ANSI_escape_code
'''
CSI = '\033['
OSC = '\033]'
BEL = '\007'
def code_to_chars(code):
return CSI + str(code) + 'm'
def set_title(title):
return OSC + '2;' + title + BEL
def clear_screen(mode=2):
return CSI + str(mode) + 'J'
def clear_line(mode=2):
return CSI + str(mode) + 'K'
class AnsiCodes(object):
def __init__(self):
# the subclasses declare class attributes which are numbers.
# Upon instantiation we define instance attributes, which are the same
# as the class attributes but wrapped with the ANSI escape sequence
for name in dir(self):
if not name.startswith('_'):
value = getattr(self, name)
setattr(self, name, code_to_chars(value))
class AnsiCursor(object):
def UP(self, n=1):
return CSI + str(n) + 'A'
def DOWN(self, n=1):
return CSI + str(n) + 'B'
def FORWARD(self, n=1):
return CSI + str(n) + 'C'
def BACK(self, n=1):
return CSI + str(n) + 'D'
def POS(self, x=1, y=1):
return CSI + str(y) + ';' + str(x) + 'H'
class AnsiFore(AnsiCodes):
BLACK = 30
RED = 31
GREEN = 32
YELLOW = 33
BLUE = 34
MAGENTA = 35
CYAN = 36
WHITE = 37
RESET = 39
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 90
LIGHTRED_EX = 91
LIGHTGREEN_EX = 92
LIGHTYELLOW_EX = 93
LIGHTBLUE_EX = 94
LIGHTMAGENTA_EX = 95
LIGHTCYAN_EX = 96
LIGHTWHITE_EX = 97
class AnsiBack(AnsiCodes):
BLACK = 40
RED = 41
GREEN = 42
YELLOW = 43
BLUE = 44
MAGENTA = 45
CYAN = 46
WHITE = 47
RESET = 49
# These are fairly well supported, but not part of the standard.
LIGHTBLACK_EX = 100
LIGHTRED_EX = 101
LIGHTGREEN_EX = 102
LIGHTYELLOW_EX = 103
LIGHTBLUE_EX = 104
LIGHTMAGENTA_EX = 105
LIGHTCYAN_EX = 106
LIGHTWHITE_EX = 107
class AnsiStyle(AnsiCodes):
BRIGHT = 1
DIM = 2
NORMAL = 22
RESET_ALL = 0
Fore = AnsiFore()
Back = AnsiBack()
Style = AnsiStyle()
Cursor = AnsiCursor()
| gpl-3.0 |
ennoborg/gramps | gramps/plugins/tool/verify.py | 1 | 66904 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2008 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
# Copyright (C) 2011 Paul Franklin
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
A plugin to verify the data against user-adjusted tests.
This is the research tool, not the low-level data ingerity check.
Note that this tool has an old heritage (20-Oct-2002 at least) and
so there are vestages of earlier ways of doing things which have not
been converted to a more-modern way. For instance the way the tool
options are defined (and read in) is not done the way it would be now.
"""
# pylint: disable=not-callable
# pylint: disable=no-self-use
# pylint: disable=undefined-variable
#------------------------------------------------------------------------
#
# standard python modules
#
#------------------------------------------------------------------------
import os
import pickle
from hashlib import md5
#------------------------------------------------------------------------
#
# GNOME/GTK modules
#
#------------------------------------------------------------------------
from gi.repository import Gdk
from gi.repository import Gtk
from gi.repository import GObject
#------------------------------------------------------------------------
#
# Gramps modules
#
#------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.sgettext
from gramps.gen.errors import WindowActiveError
from gramps.gen.const import URL_MANUAL_PAGE, VERSION_DIR
from gramps.gen.lib import (ChildRefType, EventRoleType, EventType,
FamilyRelType, NameType, Person)
from gramps.gen.lib.date import Today
from gramps.gui.editors import EditPerson, EditFamily
from gramps.gen.utils.db import family_name
from gramps.gui.display import display_help
from gramps.gui.managedwindow import ManagedWindow
from gramps.gen.updatecallback import UpdateCallback
from gramps.gui.plug import tool
from gramps.gui.glade import Glade
#-------------------------------------------------------------------------
#
# Constants
#
#-------------------------------------------------------------------------
WIKI_HELP_PAGE = '%s_-_Tools' % URL_MANUAL_PAGE
WIKI_HELP_SEC = _('manual|Verify_the_Data')
#-------------------------------------------------------------------------
#
# temp storage and related functions
#
#-------------------------------------------------------------------------
_person_cache = {}
_family_cache = {}
_event_cache = {}
_today = Today().get_sort_value()
def find_event(db, handle):
""" find an event, given a handle """
if handle in _event_cache:
obj = _event_cache[handle]
else:
obj = db.get_event_from_handle(handle)
_event_cache[handle] = obj
return obj
def find_person(db, handle):
""" find a person, given a handle """
if handle in _person_cache:
obj = _person_cache[handle]
else:
obj = db.get_person_from_handle(handle)
_person_cache[handle] = obj
return obj
def find_family(db, handle):
""" find a family, given a handle """
if handle in _family_cache:
obj = _family_cache[handle]
else:
obj = db.get_family_from_handle(handle)
_family_cache[handle] = obj
return obj
def clear_cache():
""" clear the cache """
_person_cache.clear()
_family_cache.clear()
_event_cache.clear()
#-------------------------------------------------------------------------
#
# helper functions
#
#-------------------------------------------------------------------------
def get_date_from_event_handle(db, event_handle, estimate=False):
""" get a date from an event handle """
if not event_handle:
return 0
event = find_event(db, event_handle)
if event:
date_obj = event.get_date_object()
if (not estimate
and (date_obj.get_day() == 0 or date_obj.get_month() == 0)):
return 0
return date_obj.get_sort_value()
else:
return 0
def get_date_from_event_type(db, person, event_type, estimate=False):
""" get a date from a person's specific event type """
if not person:
return 0
for event_ref in person.get_event_ref_list():
event = find_event(db, event_ref.ref)
if event:
if (event_ref.get_role() != EventRoleType.PRIMARY
and event.get_type() == EventType.BURIAL):
continue
if event.get_type() == event_type:
date_obj = event.get_date_object()
if (not estimate
and (date_obj.get_day() == 0
or date_obj.get_month() == 0)):
return 0
return date_obj.get_sort_value()
return 0
def get_bapt_date(db, person, estimate=False):
""" get a person's baptism date """
return get_date_from_event_type(db, person,
EventType.BAPTISM, estimate)
def get_bury_date(db, person, estimate=False):
""" get a person's burial date """
# check role on burial event
for event_ref in person.get_event_ref_list():
event = find_event(db, event_ref.ref)
if (event
and event.get_type() == EventType.BURIAL
and event_ref.get_role() == EventRoleType.PRIMARY):
return get_date_from_event_type(db, person,
EventType.BURIAL, estimate)
def get_birth_date(db, person, estimate=False):
""" get a person's birth date (or baptism date if 'estimated') """
if not person:
return 0
birth_ref = person.get_birth_ref()
if not birth_ref:
ret = 0
else:
ret = get_date_from_event_handle(db, birth_ref.ref, estimate)
if estimate and (ret == 0):
ret = get_bapt_date(db, person, estimate)
ret = 0 if ret is None else ret
return ret
def get_death(db, person):
"""
boolean whether there is a death event or not
(if a user claims a person is dead, we will believe it even with no date)
"""
if not person:
return False
death_ref = person.get_death_ref()
if death_ref:
return True
else:
return False
def get_death_date(db, person, estimate=False):
""" get a person's death date (or burial date if 'estimated') """
if not person:
return 0
death_ref = person.get_death_ref()
if not death_ref:
ret = 0
else:
ret = get_date_from_event_handle(db, death_ref.ref, estimate)
if estimate and (ret == 0):
ret = get_bury_date(db, person, estimate)
ret = 0 if ret is None else ret
return ret
def get_age_at_death(db, person, estimate):
""" get a person's age at death """
birth_date = get_birth_date(db, person, estimate)
death_date = get_death_date(db, person, estimate)
if (birth_date > 0) and (death_date > 0):
return death_date - birth_date
return 0
def get_father(db, family):
""" get a family's father """
if not family:
return None
father_handle = family.get_father_handle()
if father_handle:
return find_person(db, father_handle)
return None
def get_mother(db, family):
""" get a family's mother """
if not family:
return None
mother_handle = family.get_mother_handle()
if mother_handle:
return find_person(db, mother_handle)
return None
def get_child_birth_dates(db, family, estimate):
""" get a family's children's birth dates """
dates = []
for child_ref in family.get_child_ref_list():
child = find_person(db, child_ref.ref)
child_birth_date = get_birth_date(db, child, estimate)
if child_birth_date > 0:
dates.append(child_birth_date)
return dates
def get_n_children(db, person):
""" get the number of a family's children """
number = 0
for family_handle in person.get_family_handle_list():
family = find_family(db, family_handle)
if family:
number += len(family.get_child_ref_list())
return number
def get_marriage_date(db, family):
""" get a family's marriage date """
if not family:
return 0
for event_ref in family.get_event_ref_list():
event = find_event(db, event_ref.ref)
if (event.get_type() == EventType.MARRIAGE
and (event_ref.get_role() == EventRoleType.FAMILY
or event_ref.get_role() == EventRoleType.PRIMARY)):
date_obj = event.get_date_object()
return date_obj.get_sort_value()
return 0
#-------------------------------------------------------------------------
#
# Actual tool
#
#-------------------------------------------------------------------------
class Verify(tool.Tool, ManagedWindow, UpdateCallback):
"""
A plugin to verify the data against user-adjusted tests.
This is the research tool, not the low-level data ingerity check.
"""
def __init__(self, dbstate, user, options_class, name, callback=None):
""" initialize things """
uistate = user.uistate
self.label = _('Data Verify tool')
self.v_r = None
tool.Tool.__init__(self, dbstate, options_class, name)
ManagedWindow.__init__(self, uistate, [], self.__class__)
if uistate:
UpdateCallback.__init__(self, self.uistate.pulse_progressbar)
self.dbstate = dbstate
if uistate:
self.init_gui()
else:
self.add_results = self.add_results_cli
self.run_the_tool(cli=True)
def add_results_cli(self, results):
""" print data for the user, no GUI """
(msg, gramps_id, name, the_type, rule_id, severity, handle) = results
severity_str = 'S'
if severity == Rule.WARNING:
severity_str = 'W'
elif severity == Rule.ERROR:
severity_str = 'E'
# translators: needed for French+Arabic, ignore otherwise
print(_("%(severity)s: %(msg)s, %(type)s: %(gid)s, %(name)s"
) % {'severity' : severity_str, 'msg' : msg, 'type' : the_type,
'gid' : gramps_id, 'name' : name})
def init_gui(self):
""" Draw dialog and make it handle everything """
self.v_r = None
self.top = Glade()
self.top.connect_signals({
"destroy_passed_object" : self.close,
"on_help_clicked" : self.on_help_clicked,
"on_verify_ok_clicked" : self.on_apply_clicked,
"on_delete_event" : self.close,
})
window = self.top.toplevel
self.set_window(window, self.top.get_object('title'), self.label)
self.setup_configs('interface.verify', 650, 400)
o_dict = self.options.handler.options_dict
for option in o_dict:
if option in ['estimate_age', 'invdate']:
self.top.get_object(option).set_active(o_dict[option])
else:
self.top.get_object(option).set_value(o_dict[option])
self.show()
def build_menu_names(self, obj):
""" build the menu names """
return (_("Tool settings"), self.label)
def on_help_clicked(self, obj):
""" Display the relevant portion of Gramps manual """
display_help(webpage=WIKI_HELP_PAGE, section=WIKI_HELP_SEC)
def on_apply_clicked(self, obj):
""" event handler for user clicking the OK button: start things """
run_button = self.top.get_object('button4')
close_button = self.top.get_object('button5')
run_button.set_sensitive(False)
close_button.set_sensitive(False)
o_dict = self.options.handler.options_dict
for option in o_dict:
if option in ['estimate_age', 'invdate']:
o_dict[option] = self.top.get_object(option).get_active()
else:
o_dict[option] = self.top.get_object(option).get_value_as_int()
try:
self.v_r = VerifyResults(self.dbstate, self.uistate, self.track,
self.top)
self.add_results = self.v_r.add_results
self.v_r.load_ignored(self.db.full_name)
except WindowActiveError:
pass
except AttributeError: # VerifyResults.load_ignored was not run
self.v_r.ignores = {}
self.uistate.set_busy_cursor(True)
self.uistate.progress.show()
busy_cursor = Gdk.Cursor.new_for_display(Gdk.Display.get_default(),
Gdk.CursorType.WATCH)
self.window.get_window().set_cursor(busy_cursor)
try:
self.v_r.window.get_window().set_cursor(busy_cursor)
except AttributeError:
pass
self.run_the_tool(cli=False)
self.uistate.progress.hide()
self.uistate.set_busy_cursor(False)
try:
self.window.get_window().set_cursor(None)
self.v_r.window.get_window().set_cursor(None)
except AttributeError:
pass
run_button.set_sensitive(True)
close_button.set_sensitive(True)
self.reset()
# Save options
self.options.handler.save_options()
def run_the_tool(self, cli=False):
""" run the tool """
person_handles = self.db.iter_person_handles()
for option, value in self.options.handler.options_dict.items():
exec('%s = %s' % (option, value), globals())
# TODO my pylint doesn't seem to understand these variables really
# are defined here, so I have disabled the undefined-variable error
if self.v_r:
self.v_r.real_model.clear()
self.set_total(self.db.get_number_of_people() +
self.db.get_number_of_families())
for person_handle in person_handles:
person = find_person(self.db, person_handle)
rule_list = [
BirthAfterBapt(self.db, person),
DeathBeforeBapt(self.db, person),
BirthAfterBury(self.db, person),
DeathAfterBury(self.db, person),
BirthAfterDeath(self.db, person),
BaptAfterBury(self.db, person),
OldAge(self.db, person, oldage, estimate_age),
OldAgeButNoDeath(self.db, person, oldage, estimate_age),
UnknownGender(self.db, person),
MultipleParents(self.db, person),
MarriedOften(self.db, person, wedder),
OldUnmarried(self.db, person, oldunm, estimate_age),
TooManyChildren(self.db, person, mxchilddad, mxchildmom),
Disconnected(self.db, person),
InvalidBirthDate(self.db, person, invdate),
InvalidDeathDate(self.db, person, invdate),
]
for rule in rule_list:
if rule.broken():
self.add_results(rule.report_itself())
clear_cache()
if not cli:
self.update()
# Family-based rules
for family_handle in self.db.iter_family_handles():
family = find_family(self.db, family_handle)
rule_list = [
SameSexFamily(self.db, family),
FemaleHusband(self.db, family),
MaleWife(self.db, family),
SameSurnameFamily(self.db, family),
LargeAgeGapFamily(self.db, family, hwdif, estimate_age),
MarriageBeforeBirth(self.db, family, estimate_age),
MarriageAfterDeath(self.db, family, estimate_age),
EarlyMarriage(self.db, family, yngmar, estimate_age),
LateMarriage(self.db, family, oldmar, estimate_age),
OldParent(self.db, family, oldmom, olddad, estimate_age),
YoungParent(self.db, family, yngmom, yngdad, estimate_age),
UnbornParent(self.db, family, estimate_age),
DeadParent(self.db, family, estimate_age),
LargeChildrenSpan(self.db, family, cbspan, estimate_age),
LargeChildrenAgeDiff(self.db, family, cspace, estimate_age),
MarriedRelation(self.db, family),
]
for rule in rule_list:
if rule.broken():
self.add_results(rule.report_itself())
clear_cache()
if not cli:
self.update()
#-------------------------------------------------------------------------
#
# Display the results
#
#-------------------------------------------------------------------------
class VerifyResults(ManagedWindow):
""" GUI class to show the results in another dialog """
IGNORE_COL = 0
WARNING_COL = 1
OBJ_ID_COL = 2
OBJ_NAME_COL = 3
OBJ_TYPE_COL = 4
RULE_ID_COL = 5
OBJ_HANDLE_COL = 6
FG_COLOR_COL = 7
TRUE_COL = 8
SHOW_COL = 9
def __init__(self, dbstate, uistate, track, glade):
""" initialize things """
self.title = _('Data Verification Results')
ManagedWindow.__init__(self, uistate, track, self.__class__)
self.dbstate = dbstate
self._set_filename()
self.top = glade
window = self.top.get_object("verify_result")
self.set_window(window, self.top.get_object('title2'), self.title)
self.setup_configs('interface.verifyresults', 500, 300)
self.top.connect_signals({
"destroy_passed_object" : self.close,
"on_verify_ok_clicked" : self.__dummy,
"on_help_clicked" : self.__dummy,
})
self.warn_tree = self.top.get_object('warn_tree')
self.warn_tree.connect('button_press_event', self.double_click)
self.selection = self.warn_tree.get_selection()
self.hide_button = self.top.get_object('hide_button')
self.hide_button.connect('toggled', self.hide_toggled)
self.mark_button = self.top.get_object('mark_all')
self.mark_button.connect('clicked', self.mark_clicked)
self.unmark_button = self.top.get_object('unmark_all')
self.unmark_button.connect('clicked', self.unmark_clicked)
self.invert_button = self.top.get_object('invert_all')
self.invert_button.connect('clicked', self.invert_clicked)
self.real_model = Gtk.ListStore(GObject.TYPE_BOOLEAN,
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GObject.TYPE_STRING, object,
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GObject.TYPE_BOOLEAN,
GObject.TYPE_BOOLEAN)
self.filt_model = self.real_model.filter_new()
self.filt_model.set_visible_column(VerifyResults.TRUE_COL)
self.sort_model = self.filt_model.sort_new_with_model()
self.warn_tree.set_model(self.sort_model)
self.renderer = Gtk.CellRendererText()
self.img_renderer = Gtk.CellRendererPixbuf()
self.bool_renderer = Gtk.CellRendererToggle()
self.bool_renderer.connect('toggled', self.selection_toggled)
# Add ignore column
ignore_column = Gtk.TreeViewColumn(_('Mark'), self.bool_renderer,
active=VerifyResults.IGNORE_COL)
ignore_column.set_sort_column_id(VerifyResults.IGNORE_COL)
self.warn_tree.append_column(ignore_column)
# Add image column
img_column = Gtk.TreeViewColumn(None, self.img_renderer)
img_column.set_cell_data_func(self.img_renderer, self.get_image)
self.warn_tree.append_column(img_column)
# Add column with the warning text
warn_column = Gtk.TreeViewColumn(_('Warning'), self.renderer,
text=VerifyResults.WARNING_COL,
foreground=VerifyResults.FG_COLOR_COL)
warn_column.set_sort_column_id(VerifyResults.WARNING_COL)
self.warn_tree.append_column(warn_column)
# Add column with object gramps_id
id_column = Gtk.TreeViewColumn(_('ID'), self.renderer,
text=VerifyResults.OBJ_ID_COL,
foreground=VerifyResults.FG_COLOR_COL)
id_column.set_sort_column_id(VerifyResults.OBJ_ID_COL)
self.warn_tree.append_column(id_column)
# Add column with object name
name_column = Gtk.TreeViewColumn(_('Name'), self.renderer,
text=VerifyResults.OBJ_NAME_COL,
foreground=VerifyResults.FG_COLOR_COL)
name_column.set_sort_column_id(VerifyResults.OBJ_NAME_COL)
self.warn_tree.append_column(name_column)
self.show()
self.window_shown = False
def __dummy(self, obj):
"""dummy callback, needed because VerifyResults is in same glade file
as Verify, so callbacks of Verify must be defined.
"""
pass
def _set_filename(self):
""" set the file where people who will be ignored will be kept """
db_filename = self.dbstate.db.get_save_path()
if isinstance(db_filename, str):
db_filename = db_filename.encode('utf-8')
md5sum = md5(db_filename)
self.ignores_filename = os.path.join(
VERSION_DIR, md5sum.hexdigest() + os.path.extsep + 'vfm')
def load_ignored(self, db_filename):
""" get ready to load the file with the previously-ignored people """
## a new Gramps major version means recreating the .vfm file.
## User can copy over old one, with name of new one, but no guarantee
## that will work.
if not self._load_ignored(self.ignores_filename):
self.ignores = {}
def _load_ignored(self, filename):
""" load the file with the people who were previously ignored """
try:
try:
file = open(filename, 'rb')
except IOError:
return False
self.ignores = pickle.load(file)
file.close()
return True
except (IOError, EOFError):
file.close()
return False
def save_ignored(self, new_ignores):
""" get ready to save the file with the ignored people """
self.ignores = new_ignores
self._save_ignored(self.ignores_filename)
def _save_ignored(self, filename):
""" save the file with the people the user wants to ignore """
try:
with open(filename, 'wb') as file:
pickle.dump(self.ignores, file, 1)
return True
except IOError:
return False
def get_marking(self, handle, rule_id):
if handle in self.ignores:
return rule_id in self.ignores[handle]
else:
return False
def get_new_marking(self):
new_ignores = {}
for row_num in range(len(self.real_model)):
path = (row_num,)
row = self.real_model[path]
ignore = row[VerifyResults.IGNORE_COL]
if ignore:
handle = row[VerifyResults.OBJ_HANDLE_COL]
rule_id = row[VerifyResults.RULE_ID_COL]
if handle not in new_ignores:
new_ignores[handle] = set()
new_ignores[handle].add(rule_id)
return new_ignores
def close(self, *obj):
""" close the dialog and write out the file """
new_ignores = self.get_new_marking()
self.save_ignored(new_ignores)
ManagedWindow.close(self, *obj)
def hide_toggled(self, button):
if button.get_active():
button.set_label(_("_Show all"))
self.filt_model = self.real_model.filter_new()
self.filt_model.set_visible_column(VerifyResults.SHOW_COL)
self.sort_model = self.filt_model.sort_new_with_model()
self.warn_tree.set_model(self.sort_model)
else:
self.filt_model = self.real_model.filter_new()
self.filt_model.set_visible_column(VerifyResults.TRUE_COL)
self.sort_model = self.filt_model.sort_new_with_model()
self.warn_tree.set_model(self.sort_model)
button.set_label(_("_Hide marked"))
def selection_toggled(self, cell, path_string):
sort_path = tuple(map(int, path_string.split(':')))
filt_path = self.sort_model.convert_path_to_child_path(
Gtk.TreePath(sort_path))
real_path = self.filt_model.convert_path_to_child_path(filt_path)
row = self.real_model[real_path]
row[VerifyResults.IGNORE_COL] = not row[VerifyResults.IGNORE_COL]
row[VerifyResults.SHOW_COL] = not row[VerifyResults.IGNORE_COL]
self.real_model.row_changed(real_path, row.iter)
def mark_clicked(self, mark_button):
for row_num in range(len(self.real_model)):
path = (row_num,)
row = self.real_model[path]
row[VerifyResults.IGNORE_COL] = True
row[VerifyResults.SHOW_COL] = False
self.filt_model.refilter()
def unmark_clicked(self, unmark_button):
for row_num in range(len(self.real_model)):
path = (row_num,)
row = self.real_model[path]
row[VerifyResults.IGNORE_COL] = False
row[VerifyResults.SHOW_COL] = True
self.filt_model.refilter()
def invert_clicked(self, invert_button):
for row_num in range(len(self.real_model)):
path = (row_num,)
row = self.real_model[path]
row[VerifyResults.IGNORE_COL] = not row[VerifyResults.IGNORE_COL]
row[VerifyResults.SHOW_COL] = not row[VerifyResults.SHOW_COL]
self.filt_model.refilter()
def double_click(self, obj, event):
""" the user wants to edit the selected person or family """
if event.type == Gdk.EventType._2BUTTON_PRESS and event.button == 1:
(model, node) = self.selection.get_selected()
if not node:
return
sort_path = self.sort_model.get_path(node)
filt_path = self.sort_model.convert_path_to_child_path(sort_path)
real_path = self.filt_model.convert_path_to_child_path(filt_path)
row = self.real_model[real_path]
the_type = row[VerifyResults.OBJ_TYPE_COL]
handle = row[VerifyResults.OBJ_HANDLE_COL]
if the_type == 'Person':
try:
person = self.dbstate.db.get_person_from_handle(handle)
EditPerson(self.dbstate, self.uistate, self.track, person)
except WindowActiveError:
pass
elif the_type == 'Family':
try:
family = self.dbstate.db.get_family_from_handle(handle)
EditFamily(self.dbstate, self.uistate, self.track, family)
except WindowActiveError:
pass
def get_image(self, column, cell, model, iter_, user_data=None):
""" flag whether each line is a person or family """
the_type = model.get_value(iter_, VerifyResults.OBJ_TYPE_COL)
if the_type == 'Person':
cell.set_property('icon-name', 'gramps-person')
elif the_type == 'Family':
cell.set_property('icon-name', 'gramps-family')
def add_results(self, results):
(msg, gramps_id, name, the_type, rule_id, severity, handle) = results
ignore = self.get_marking(handle, rule_id)
if severity == Rule.ERROR:
line_color = 'red'
else:
line_color = None
self.real_model.append(row=[ignore, msg, gramps_id, name,
the_type, rule_id, handle, line_color,
True, not ignore])
if not self.window_shown:
self.window.show()
self.window_shown = True
def build_menu_names(self, obj):
""" build the menu names """
return (self.title, self.title)
#------------------------------------------------------------------------
#
#
#
#------------------------------------------------------------------------
class VerifyOptions(tool.ToolOptions):
"""
Defines options and provides handling interface.
"""
def __init__(self, name, person_id=None):
""" initialize the options """
tool.ToolOptions.__init__(self, name, person_id)
# Options specific for this report
self.options_dict = {
'oldage' : 90,
'hwdif' : 30,
'cspace' : 8,
'cbspan' : 25,
'yngmar' : 17,
'oldmar' : 50,
'oldmom' : 48,
'yngmom' : 17,
'yngdad' : 18,
'olddad' : 65,
'wedder' : 3,
'mxchildmom' : 12,
'mxchilddad' : 15,
'lngwdw' : 30,
'oldunm' : 99,
'estimate_age' : 0,
'invdate' : 1,
}
# TODO these strings are defined in the glade file (more or less, since
# those have accelerators), and so are not translated here, but that
# means that a CLI user who runs gramps in a non-English language and
# says (for instance) "show=oldage" will see "Maximum age" in English
# (but I think such a CLI use is very unlikely and so is low priority,
# especially since the tool's normal CLI output will be translated)
self.options_help = {
'oldage' : ("=num", "Maximum age", "Age in years"),
'hwdif' : ("=num", "Maximum husband-wife age difference",
"Age difference in years"),
'cspace' : ("=num",
"Maximum number of years between children",
"Number of years"),
'cbspan' : ("=num",
"Maximum span of years for all children",
"Span in years"),
'yngmar' : ("=num", "Minimum age to marry", "Age in years"),
'oldmar' : ("=num", "Maximum age to marry", "Age in years"),
'oldmom' : ("=num", "Maximum age to bear a child",
"Age in years"),
'yngmom' : ("=num", "Minimum age to bear a child",
"Age in years"),
'yngdad' : ("=num", "Minimum age to father a child",
"Age in years"),
'olddad' : ("=num", "Maximum age to father a child",
"Age in years"),
'wedder' : ("=num", "Maximum number of spouses for a person",
"Number of spouses"),
'mxchildmom' : ("=num", "Maximum number of children for a woman",
"Number of children"),
'mxchilddad' : ("=num", "Maximum number of children for a man",
"Number of chidlren"),
'lngwdw' : ("=num", "Maximum number of consecutive years "
"of widowhood before next marriage",
"Number of years"),
'oldunm' : ("=num", "Maximum age for an unmarried person"
"Number of years"),
'estimate_age' : ("=0/1",
"Whether to estimate missing or inexact dates",
["Do not estimate", "Estimate dates"],
True),
'invdate' : ("=0/1", "Whether to check for invalid dates"
"Do not identify invalid dates",
"Identify invalid dates", True),
}
#-------------------------------------------------------------------------
#
# Base classes for different tests -- the rules
#
#-------------------------------------------------------------------------
class Rule:
"""
Basic class for use in this tool.
Other rules must inherit from this.
"""
ID = 0
TYPE = ''
ERROR = 1
WARNING = 2
SEVERITY = WARNING
def __init__(self, db, obj):
""" initialize the rule """
self.db = db
self.obj = obj
def broken(self):
"""
Return boolean indicating whether this rule is violated.
"""
return False
def get_message(self):
""" return the rule's error message """
assert False, "Need to be overriden in the derived class"
def get_name(self):
""" return the person's primary name or the name of the family """
assert False, "Need to be overriden in the derived class"
def get_handle(self):
""" return the object's handle """
return self.obj.handle
def get_id(self):
""" return the object's gramps_id """
return self.obj.gramps_id
def get_rule_id(self):
""" return the rule's identification number, and parameters """
params = self._get_params()
return (self.ID, params)
def _get_params(self):
""" return the rule's parameters """
return tuple()
def report_itself(self):
""" return the details about a rule """
handle = self.get_handle()
the_type = self.TYPE
rule_id = self.get_rule_id()
severity = self.SEVERITY
name = self.get_name()
gramps_id = self.get_id()
msg = self.get_message()
return (msg, gramps_id, name, the_type, rule_id, severity, handle)
class PersonRule(Rule):
"""
Person-based class.
"""
TYPE = 'Person'
def get_name(self):
""" return the person's primary name """
return self.obj.get_primary_name().get_name()
class FamilyRule(Rule):
"""
Family-based class.
"""
TYPE = 'Family'
def get_name(self):
""" return the name of the family """
return family_name(self.obj, self.db)
#-------------------------------------------------------------------------
#
# Actual rules for testing
#
#-------------------------------------------------------------------------
class BirthAfterBapt(PersonRule):
""" test if a person was baptised before their birth """
ID = 1
SEVERITY = Rule.ERROR
def broken(self):
""" return boolean indicating whether this rule is violated """
birth_date = get_birth_date(self.db, self.obj)
bapt_date = get_bapt_date(self.db, self.obj)
birth_ok = birth_date > 0 if birth_date is not None else False
bapt_ok = bapt_date > 0 if bapt_date is not None else False
return birth_ok and bapt_ok and birth_date > bapt_date
def get_message(self):
""" return the rule's error message """
return _("Baptism before birth")
class DeathBeforeBapt(PersonRule):
""" test if a person died before their baptism """
ID = 2
SEVERITY = Rule.ERROR
def broken(self):
""" return boolean indicating whether this rule is violated """
death_date = get_death_date(self.db, self.obj)
bapt_date = get_bapt_date(self.db, self.obj)
bapt_ok = bapt_date > 0 if bapt_date is not None else False
death_ok = death_date > 0 if death_date is not None else False
return death_ok and bapt_ok and bapt_date > death_date
def get_message(self):
""" return the rule's error message """
return _("Death before baptism")
class BirthAfterBury(PersonRule):
""" test if a person was buried before their birth """
ID = 3
SEVERITY = Rule.ERROR
def broken(self):
""" return boolean indicating whether this rule is violated """
birth_date = get_birth_date(self.db, self.obj)
bury_date = get_bury_date(self.db, self.obj)
birth_ok = birth_date > 0 if birth_date is not None else False
bury_ok = bury_date > 0 if bury_date is not None else False
return birth_ok and bury_ok and birth_date > bury_date
def get_message(self):
""" return the rule's error message """
return _("Burial before birth")
class DeathAfterBury(PersonRule):
""" test if a person was buried before their death """
ID = 4
SEVERITY = Rule.ERROR
def broken(self):
""" return boolean indicating whether this rule is violated """
death_date = get_death_date(self.db, self.obj)
bury_date = get_bury_date(self.db, self.obj)
death_ok = death_date > 0 if death_date is not None else False
bury_ok = bury_date > 0 if bury_date is not None else False
return death_ok and bury_ok and death_date > bury_date
def get_message(self):
""" return the rule's error message """
return _("Burial before death")
class BirthAfterDeath(PersonRule):
""" test if a person died before their birth """
ID = 5
SEVERITY = Rule.ERROR
def broken(self):
""" return boolean indicating whether this rule is violated """
birth_date = get_birth_date(self.db, self.obj)
death_date = get_death_date(self.db, self.obj)
birth_ok = birth_date > 0 if birth_date is not None else False
death_ok = death_date > 0 if death_date is not None else False
return birth_ok and death_ok and birth_date > death_date
def get_message(self):
""" return the rule's error message """
return _("Death before birth")
class BaptAfterBury(PersonRule):
""" test if a person was buried before their baptism """
ID = 6
SEVERITY = Rule.ERROR
def broken(self):
""" return boolean indicating whether this rule is violated """
bapt_date = get_bapt_date(self.db, self.obj)
bury_date = get_bury_date(self.db, self.obj)
bapt_ok = bapt_date > 0 if bapt_date is not None else False
bury_ok = bury_date > 0 if bury_date is not None else False
return bapt_ok and bury_ok and bapt_date > bury_date
def get_message(self):
""" return the rule's error message """
return _("Burial before baptism")
class OldAge(PersonRule):
""" test if a person died beyond the age the user has set """
ID = 7
SEVERITY = Rule.WARNING
def __init__(self, db, person, old_age, est):
""" initialize the rule """
PersonRule.__init__(self, db, person)
self.old_age = old_age
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.old_age, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
age_at_death = get_age_at_death(self.db, self.obj, self.est)
return age_at_death / 365 > self.old_age
def get_message(self):
""" return the rule's error message """
return _("Old age at death")
class UnknownGender(PersonRule):
""" test if a person is neither a male nor a female """
ID = 8
SEVERITY = Rule.WARNING
def broken(self):
""" return boolean indicating whether this rule is violated """
female = self.obj.get_gender() == Person.FEMALE
male = self.obj.get_gender() == Person.MALE
return not (male or female)
def get_message(self):
""" return the rule's error message """
return _("Unknown gender")
class MultipleParents(PersonRule):
""" test if a person belongs to multiple families """
ID = 9
SEVERITY = Rule.WARNING
def broken(self):
""" return boolean indicating whether this rule is violated """
n_parent_sets = len(self.obj.get_parent_family_handle_list())
return n_parent_sets > 1
def get_message(self):
""" return the rule's error message """
return _("Multiple parents")
class MarriedOften(PersonRule):
""" test if a person was married 'often' """
ID = 10
SEVERITY = Rule.WARNING
def __init__(self, db, person, wedder):
""" initialize the rule """
PersonRule.__init__(self, db, person)
self.wedder = wedder
def _get_params(self):
""" return the rule's parameters """
return (self.wedder,)
def broken(self):
""" return boolean indicating whether this rule is violated """
n_spouses = len(self.obj.get_family_handle_list())
return n_spouses > self.wedder
def get_message(self):
""" return the rule's error message """
return _("Married often")
class OldUnmarried(PersonRule):
""" test if a person was married when they died """
ID = 11
SEVERITY = Rule.WARNING
def __init__(self, db, person, old_unm, est):
""" initialize the rule """
PersonRule.__init__(self, db, person)
self.old_unm = old_unm
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.old_unm, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
age_at_death = get_age_at_death(self.db, self.obj, self.est)
n_spouses = len(self.obj.get_family_handle_list())
return age_at_death / 365 > self.old_unm and n_spouses == 0
def get_message(self):
""" return the rule's error message """
return _("Old and unmarried")
class TooManyChildren(PersonRule):
""" test if a person had 'too many' children """
ID = 12
SEVERITY = Rule.WARNING
def __init__(self, db, obj, mx_child_dad, mx_child_mom):
""" initialize the rule """
PersonRule.__init__(self, db, obj)
self.mx_child_dad = mx_child_dad
self.mx_child_mom = mx_child_mom
def _get_params(self):
""" return the rule's parameters """
return (self.mx_child_dad, self.mx_child_mom)
def broken(self):
""" return boolean indicating whether this rule is violated """
n_child = get_n_children(self.db, self.obj)
if (self.obj.get_gender == Person.MALE
and n_child > self.mx_child_dad):
return True
if (self.obj.get_gender == Person.FEMALE
and n_child > self.mx_child_mom):
return True
return False
def get_message(self):
""" return the rule's error message """
return _("Too many children")
class SameSexFamily(FamilyRule):
""" test if a family's parents are both male or both female """
ID = 13
SEVERITY = Rule.WARNING
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
same_sex = (mother and father and
(mother.get_gender() == father.get_gender()))
unknown_sex = (mother and
(mother.get_gender() == Person.UNKNOWN))
return same_sex and not unknown_sex
def get_message(self):
""" return the rule's error message """
return _("Same sex marriage")
class FemaleHusband(FamilyRule):
""" test if a family's 'husband' is female """
ID = 14
SEVERITY = Rule.WARNING
def broken(self):
""" return boolean indicating whether this rule is violated """
father = get_father(self.db, self.obj)
return father and (father.get_gender() == Person.FEMALE)
def get_message(self):
""" return the rule's error message """
return _("Female husband")
class MaleWife(FamilyRule):
""" test if a family's 'wife' is male """
ID = 15
SEVERITY = Rule.WARNING
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
return mother and (mother.get_gender() == Person.MALE)
def get_message(self):
""" return the rule's error message """
return _("Male wife")
class SameSurnameFamily(FamilyRule):
""" test if a family's parents were born with the same surname """
ID = 16
SEVERITY = Rule.WARNING
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
_broken = False
# Make sure both mother and father exist.
if mother and father:
mname = mother.get_primary_name()
fname = father.get_primary_name()
# Only compare birth names (not married names).
if (mname.get_type() == NameType.BIRTH
and fname.get_type() == NameType.BIRTH):
# Empty names don't count.
if (len(mname.get_surname()) != 0
and len(fname.get_surname()) != 0):
# Finally, check if the names are the same.
if mname.get_surname() == fname.get_surname():
_broken = True
return _broken
def get_message(self):
""" return the rule's error message """
return _("Husband and wife with the same surname")
class LargeAgeGapFamily(FamilyRule):
""" test if a family's parents were born far apart """
ID = 17
SEVERITY = Rule.WARNING
def __init__(self, db, obj, hw_diff, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.hw_diff = hw_diff
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.hw_diff, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_birth_date = get_birth_date(self.db, mother, self.est)
father_birth_date = get_birth_date(self.db, father, self.est)
mother_birth_date_ok = mother_birth_date > 0
father_birth_date_ok = father_birth_date > 0
large_diff = abs(
father_birth_date-mother_birth_date) / 365 > self.hw_diff
return mother_birth_date_ok and father_birth_date_ok and large_diff
def get_message(self):
""" return the rule's error message """
return _("Large age difference between spouses")
class MarriageBeforeBirth(FamilyRule):
""" test if each family's parent was born before the marriage """
ID = 18
SEVERITY = Rule.ERROR
def __init__(self, db, obj, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.est,)
def broken(self):
""" return boolean indicating whether this rule is violated """
marr_date = get_marriage_date(self.db, self.obj)
marr_date_ok = marr_date > 0
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_birth_date = get_birth_date(self.db, mother, self.est)
father_birth_date = get_birth_date(self.db, father, self.est)
mother_birth_date_ok = mother_birth_date > 0
father_birth_date_ok = father_birth_date > 0
father_broken = (father_birth_date_ok and marr_date_ok
and (father_birth_date > marr_date))
mother_broken = (mother_birth_date_ok and marr_date_ok
and (mother_birth_date > marr_date))
return father_broken or mother_broken
def get_message(self):
""" return the rule's error message """
return _("Marriage before birth")
class MarriageAfterDeath(FamilyRule):
""" test if each family's parent died before the marriage """
ID = 19
SEVERITY = Rule.ERROR
def __init__(self, db, obj, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.est,)
def broken(self):
""" return boolean indicating whether this rule is violated """
marr_date = get_marriage_date(self.db, self.obj)
marr_date_ok = marr_date > 0
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_death_date = get_death_date(self.db, mother, self.est)
father_death_date = get_death_date(self.db, father, self.est)
mother_death_date_ok = mother_death_date > 0
father_death_date_ok = father_death_date > 0
father_broken = (father_death_date_ok and marr_date_ok
and (father_death_date < marr_date))
mother_broken = (mother_death_date_ok and marr_date_ok
and (mother_death_date < marr_date))
return father_broken or mother_broken
def get_message(self):
""" return the rule's error message """
return _("Marriage after death")
class EarlyMarriage(FamilyRule):
""" test if each family's parent was 'too young' at the marriage """
ID = 20
SEVERITY = Rule.WARNING
def __init__(self, db, obj, yng_mar, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.yng_mar = yng_mar
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.yng_mar, self.est,)
def broken(self):
""" return boolean indicating whether this rule is violated """
marr_date = get_marriage_date(self.db, self.obj)
marr_date_ok = marr_date > 0
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_birth_date = get_birth_date(self.db, mother, self.est)
father_birth_date = get_birth_date(self.db, father, self.est)
mother_birth_date_ok = mother_birth_date > 0
father_birth_date_ok = father_birth_date > 0
father_broken = (
father_birth_date_ok and marr_date_ok and
father_birth_date < marr_date and
((marr_date - father_birth_date) / 365 < self.yng_mar))
mother_broken = (
mother_birth_date_ok and marr_date_ok and
mother_birth_date < marr_date and
((marr_date - mother_birth_date) / 365 < self.yng_mar))
return father_broken or mother_broken
def get_message(self):
""" return the rule's error message """
return _("Early marriage")
class LateMarriage(FamilyRule):
""" test if each family's parent was 'too old' at the marriage """
ID = 21
SEVERITY = Rule.WARNING
def __init__(self, db, obj, old_mar, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.old_mar = old_mar
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.old_mar, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
marr_date = get_marriage_date(self.db, self.obj)
marr_date_ok = marr_date > 0
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_birth_date = get_birth_date(self.db, mother, self.est)
father_birth_date = get_birth_date(self.db, father, self.est)
mother_birth_date_ok = mother_birth_date > 0
father_birth_date_ok = father_birth_date > 0
father_broken = (
father_birth_date_ok and marr_date_ok and
((marr_date - father_birth_date) / 365 > self.old_mar))
mother_broken = (
mother_birth_date_ok and marr_date_ok and
((marr_date - mother_birth_date) / 365 > self.old_mar))
return father_broken or mother_broken
def get_message(self):
""" return the rule's error message """
return _("Late marriage")
class OldParent(FamilyRule):
""" test if each family's parent was 'too old' at a child's birth """
ID = 22
SEVERITY = Rule.WARNING
def __init__(self, db, obj, old_mom, old_dad, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.old_mom = old_mom
self.old_dad = old_dad
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.old_mom, self.old_dad, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_birth_date = get_birth_date(self.db, mother, self.est)
father_birth_date = get_birth_date(self.db, father, self.est)
mother_birth_date_ok = mother_birth_date > 0
father_birth_date_ok = father_birth_date > 0
for child_ref in self.obj.get_child_ref_list():
child = find_person(self.db, child_ref.ref)
child_birth_date = get_birth_date(self.db, child, self.est)
child_birth_date_ok = child_birth_date > 0
if not child_birth_date_ok:
continue
father_broken = (
father_birth_date_ok and
((child_birth_date - father_birth_date) / 365 > self.old_dad))
if father_broken:
self.get_message = self.father_message
return True
mother_broken = (
mother_birth_date_ok and
((child_birth_date - mother_birth_date) / 365 > self.old_mom))
if mother_broken:
self.get_message = self.mother_message
return True
return False
def father_message(self):
""" return the rule's error message """
return _("Old father")
def mother_message(self):
""" return the rule's error message """
return _("Old mother")
class YoungParent(FamilyRule):
""" test if each family's parent was 'too young' at a child's birth """
ID = 23
SEVERITY = Rule.WARNING
def __init__(self, db, obj, yng_mom, yng_dad, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.yng_dad = yng_dad
self.yng_mom = yng_mom
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.yng_mom, self.yng_dad, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_birth_date = get_birth_date(self.db, mother, self.est)
father_birth_date = get_birth_date(self.db, father, self.est)
mother_birth_date_ok = mother_birth_date > 0
father_birth_date_ok = father_birth_date > 0
for child_ref in self.obj.get_child_ref_list():
child = find_person(self.db, child_ref.ref)
child_birth_date = get_birth_date(self.db, child, self.est)
child_birth_date_ok = child_birth_date > 0
if not child_birth_date_ok:
continue
father_broken = (
father_birth_date_ok and
((child_birth_date - father_birth_date) / 365 < self.yng_dad))
if father_broken:
self.get_message = self.father_message
return True
mother_broken = (
mother_birth_date_ok and
((child_birth_date - mother_birth_date) / 365 < self.yng_mom))
if mother_broken:
self.get_message = self.mother_message
return True
return False
def father_message(self):
""" return the rule's error message """
return _("Young father")
def mother_message(self):
""" return the rule's error message """
return _("Young mother")
class UnbornParent(FamilyRule):
""" test if each family's parent was not yet born at a child's birth """
ID = 24
SEVERITY = Rule.ERROR
def __init__(self, db, obj, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.est,)
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_birth_date = get_birth_date(self.db, mother, self.est)
father_birth_date = get_birth_date(self.db, father, self.est)
mother_birth_date_ok = mother_birth_date > 0
father_birth_date_ok = father_birth_date > 0
for child_ref in self.obj.get_child_ref_list():
child = find_person(self.db, child_ref.ref)
child_birth_date = get_birth_date(self.db, child, self.est)
child_birth_date_ok = child_birth_date > 0
if not child_birth_date_ok:
continue
father_broken = (father_birth_date_ok
and (father_birth_date > child_birth_date))
if father_broken:
self.get_message = self.father_message
return True
mother_broken = (mother_birth_date_ok
and (mother_birth_date > child_birth_date))
if mother_broken:
self.get_message = self.mother_message
return True
def father_message(self):
""" return the rule's error message """
return _("Unborn father")
def mother_message(self):
""" return the rule's error message """
return _("Unborn mother")
class DeadParent(FamilyRule):
""" test if each family's parent was dead at a child's birth """
ID = 25
SEVERITY = Rule.ERROR
def __init__(self, db, obj, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.est,)
def broken(self):
""" return boolean indicating whether this rule is violated """
mother = get_mother(self.db, self.obj)
father = get_father(self.db, self.obj)
mother_death_date = get_death_date(self.db, mother, self.est)
father_death_date = get_death_date(self.db, father, self.est)
mother_death_date_ok = mother_death_date > 0
father_death_date_ok = father_death_date > 0
for child_ref in self.obj.get_child_ref_list():
child = find_person(self.db, child_ref.ref)
child_birth_date = get_birth_date(self.db, child, self.est)
child_birth_date_ok = child_birth_date > 0
if not child_birth_date_ok:
continue
has_birth_rel_to_mother = child_ref.mrel == ChildRefType.BIRTH
has_birth_rel_to_father = child_ref.frel == ChildRefType.BIRTH
father_broken = (
has_birth_rel_to_father
and father_death_date_ok
and ((father_death_date + 294) < child_birth_date))
if father_broken:
self.get_message = self.father_message
return True
mother_broken = (has_birth_rel_to_mother
and mother_death_date_ok
and (mother_death_date < child_birth_date))
if mother_broken:
self.get_message = self.mother_message
return True
def father_message(self):
""" return the rule's error message """
return _("Dead father")
def mother_message(self):
""" return the rule's error message """
return _("Dead mother")
class LargeChildrenSpan(FamilyRule):
""" test if a family's first and last children were born far apart """
ID = 26
SEVERITY = Rule.WARNING
def __init__(self, db, obj, cb_span, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.cbs = cb_span
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.cbs, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
child_birh_dates = get_child_birth_dates(self.db, self.obj, self.est)
child_birh_dates.sort()
return (child_birh_dates and
((child_birh_dates[-1] - child_birh_dates[0]) / 365 > self.cbs))
def get_message(self):
""" return the rule's error message """
return _("Large year span for all children")
class LargeChildrenAgeDiff(FamilyRule):
""" test if any of a family's children were born far apart """
ID = 27
SEVERITY = Rule.WARNING
def __init__(self, db, obj, c_space, est):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
self.c_space = c_space
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.c_space, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
child_birh_dates = get_child_birth_dates(self.db, self.obj, self.est)
child_birh_dates_diff = [child_birh_dates[i+1] - child_birh_dates[i]
for i in range(len(child_birh_dates)-1)]
return (child_birh_dates_diff and
max(child_birh_dates_diff) / 365 > self.c_space)
def get_message(self):
""" return the rule's error message """
return _("Large age differences between children")
class Disconnected(PersonRule):
""" test if a person has no children and no parents """
ID = 28
SEVERITY = Rule.WARNING
def broken(self):
""" return boolean indicating whether this rule is violated """
return (len(self.obj.get_parent_family_handle_list())
+ len(self.obj.get_family_handle_list()) == 0)
def get_message(self):
""" return the rule's error message """
return _("Disconnected individual")
class InvalidBirthDate(PersonRule):
""" test if a person has an 'invalid' birth date """
ID = 29
SEVERITY = Rule.ERROR
def __init__(self, db, person, invdate):
""" initialize the rule """
PersonRule.__init__(self, db, person)
self._invdate = invdate
def broken(self):
""" return boolean indicating whether this rule is violated """
if not self._invdate: # should we check?
return False
# if so, let's get the birth date
person = self.obj
birth_ref = person.get_birth_ref()
if birth_ref:
birth_event = self.db.get_event_from_handle(birth_ref.ref)
birth_date = birth_event.get_date_object()
if birth_date and not birth_date.get_valid():
return True
return False
def get_message(self):
""" return the rule's error message """
return _("Invalid birth date")
class InvalidDeathDate(PersonRule):
""" test if a person has an 'invalid' death date """
ID = 30
SEVERITY = Rule.ERROR
def __init__(self, db, person, invdate):
""" initialize the rule """
PersonRule.__init__(self, db, person)
self._invdate = invdate
def broken(self):
""" return boolean indicating whether this rule is violated """
if not self._invdate: # should we check?
return False
# if so, let's get the death date
person = self.obj
death_ref = person.get_death_ref()
if death_ref:
death_event = self.db.get_event_from_handle(death_ref.ref)
death_date = death_event.get_date_object()
if death_date and not death_date.get_valid():
return True
return False
def get_message(self):
""" return the rule's error message """
return _("Invalid death date")
class MarriedRelation(FamilyRule):
""" test if a family has a marriage date but is not marked 'married' """
ID = 31
SEVERITY = Rule.WARNING
def __init__(self, db, obj):
""" initialize the rule """
FamilyRule.__init__(self, db, obj)
def broken(self):
""" return boolean indicating whether this rule is violated """
marr_date = get_marriage_date(self.db, self.obj)
marr_date_ok = marr_date > 0
married = self.obj.get_relationship() == FamilyRelType.MARRIED
if not married and marr_date_ok:
return self.get_message
def get_message(self):
""" return the rule's error message """
return _("Marriage date but not married")
class OldAgeButNoDeath(PersonRule):
""" test if a person is 'too old' but is not shown as dead """
ID = 32
SEVERITY = Rule.WARNING
def __init__(self, db, person, old_age, est):
""" initialize the rule """
PersonRule.__init__(self, db, person)
self.old_age = old_age
self.est = est
def _get_params(self):
""" return the rule's parameters """
return (self.old_age, self.est)
def broken(self):
""" return boolean indicating whether this rule is violated """
birth_date = get_birth_date(self.db, self.obj, self.est)
dead = get_death(self.db, self.obj)
death_date = get_death_date(self.db, self.obj, True) # or burial date
if dead or death_date or not birth_date:
return 0
age = (_today - birth_date) / 365
return age > self.old_age
def get_message(self):
""" return the rule's error message """
return _("Old age but no death")
| gpl-2.0 |
mdpiper/dakota-experiments | experiments/beaver-creek-lsq/long_profile.py | 1 | 8531 | #! /usr/bin/env python
from __future__ import print_function
import argparse
import re
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def str2num(s):
try:
return int(s)
except ValueError:
return float(s)
class Dakota(object):
FLOAT_REGEX = '[-+]?[0-9]*\.?[0-9]*([eE][-+]?[0-9]+)?'
KEY_REGEX = '(?P<key>\w+)'
VALUE_REGEX = '(?P<value>' + FLOAT_REGEX + ')'
@staticmethod
def read_params(filename):
pattern = re.compile('\s*' + Dakota.VALUE_REGEX + '\s+' +
Dakota.KEY_REGEX)
params = {}
with open(filename, 'r') as fp:
for line in fp:
m = pattern.match(line)
if m is not None:
params[m.group('key')] = str2num(m.group('value'))
return params
@staticmethod
def read_aprepro(filename):
pattern = re.compile('\s*\{\s+' + Dakota.KEY_REGEX + '\s+=\s+' +
Dakota.VALUE_REGEX + '\s+\}')
params = {}
with open(filename, 'r') as fp:
for line in fp:
m = pattern.match(line)
if m is not None:
params[m.group('key')] = str2num(m.group('value'))
return params
@staticmethod
def print_gradients(fp, grads):
for items in zip(*grads):
format_str = '[ ' + ' '.join(['%f'] * len(items)) + ' ]'
print(format_str % items, file=fp)
@staticmethod
def print_hessians(fp, hessians):
for items in zip(*hessians):
format_str = '[[ ' + ' '.join(['%f'] * len(items)) + ' ]]'
print(format_str % items, file=fp)
@staticmethod
def print_results(filename, x, gradients=None, hessians=None):
gradients = gradients or ([], )
hessians = hessians or ([], )
np.savetxt(filename, x)
with open(filename, 'a+') as fp:
Dakota.print_gradients(fp, gradients)
Dakota.print_hessians(fp, hessians)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('params', type=str, nargs='?', help='Dakota parameters file')
parser.add_argument('results', type=str, nargs='?',
help='Dakota results file')
parser.add_argument('--model', choices=('power', 'log', 'peckham'),
default='power',
help='Model used to calculate longitudinal profile')
parser.add_argument('--data', type=str,
default='beaver_creek.npy',
#default='beaver_channel_profile.csv',
help='Data file containing profile elevations')
args = parser.parse_args()
if args.params:
params = Dakota.read_params(args.params)
else:
params = {}
x, z = measured_elevations_from_file(args.data)
params['x0'] = x[0]
params['z0'] = z[0]
if args.model == 'power':
model = PowerLawModel(params=params)
elif args.model == 'log':
model = LogModel(params=params)
else:
model = PeckhamModel(params=params)
if args.results:
Dakota.print_results(args.results, model.residual(x, z),
gradients=model.gradients(x))
else:
model.plot(x, z)
def sum_of_squares(y, f):
return np.sum(np.power(y - f, 2.))
def r_squared(y, f):
return 1. - sum_of_squares(y, f) / sum_of_squares(y, y.mean())
def measured_elevations_from_file(filename):
(x, z) = np.load(filename)
return (x, z)
#data = np.loadtxt(filename)
#return data[:, 0] * 1000., data[:, 1]
class ChannelProfileModel(object):
def __init__(self, params=None):
self._params = params or {}
self._x0 = params.get('x0')
self._z0 = params.get('z0')
def eval(self, x):
raise NotImplementedError('eval')
def residual(self, x, z):
return z - self.eval(x)
def gradients(self, x):
return (self._grad_wrt_c(x), self._grad_wrt_p(x))
def _grad_wrt_c(self, x):
return []
def _grad_wrt_p(self, x):
return []
def plot(self, x, z):
bbox_props = dict(boxstyle='square,pad=.5', fc='none')
sns.set_style('whitegrid')
plt.plot(x / 1000., z)
plt.plot(x / 1000., self.eval(x))
annotation = '\n'.join(['R^2 = %f' % r_squared(z, self.eval(x)),
self.text_summary()])
plt.annotate(annotation, xy=(.05, .95),
xycoords='axes fraction', ha='left', va='top',
bbox=bbox_props)
plt.title('Distance (km) vs elevation (m) for main channel profile of '
'Beaver Creek, KY.')
plt.show()
def text_summary(self):
text = []
for item in self._params.items():
text.append('%s = %f' % item)
return '\n'.join(text)
class PowerLawModel(ChannelProfileModel):
def __init__(self, params=None):
super(PowerLawModel, self).__init__(params=params)
# newton
#self._params.setdefault('c', 2.1784678105e+01)
#self._params.setdefault('p', 1.4312563604e-01)
# global
#self._params.setdefault('c', 4.1460189615e+01)
#self._params.setdefault('p', 5.4463636358e-02)
# local
#self._params.setdefault('c', 6.1090204531e+01)
#self._params.setdefault('p', 1.0056306635e-03)
self._params.setdefault('c', 3.9999968015e+01)
self._params.setdefault('p', 6.1132405380e-02)
def eval(self, x):
c, p, x0 = self._params['c'], self._params['p'], self._params['x0']
return self._z0 - (c / p) * (np.power(x, p) - np.power(x0, p))
def _grad_wrt_c(self, x):
p, x0 = self._params['p'], self._params['x0']
return (- 1. / p) * (np.power(x, p) - np.power(x0, p))
def _grad_wrt_p(self, x):
c, p, x0 = self._params['c'], self._params['p'], self._params['x0']
return (c / p ** 2.) * (
- np.power(x, p) + p * np.power(x, p) * np.log(x) +
np.power(x0, p) - p * np.power(x0, p) * np.log(x0))
def __str__(self):
return '$f(p,x) = (1/p) \, x^p$'
class LogModel(ChannelProfileModel):
def __init__(self, params=None):
super(LogModel, self).__init__(params=params)
# newton
self._params.setdefault('c', 2.0785632989e+02)
self._params.setdefault('p', 6.0921199008e-01)
# local
#self._params.setdefault('c', 1.7369029258e+02)
#self._params.setdefault('p', 6.6198835493e-01)
# global
#self._params.setdefault('c', 2.5405015305e+02)
#self._params.setdefault('p', 5.5275361485e-01)
def eval(self, x):
c, p, x0 = self._params['c'], self._params['p'], self._params['x0']
return self._z0 - c * (np.log(x) ** p - np.log(x0) ** p)
def _grad_wrt_c(self, x):
p, x0 = self._params['p'], self._params['x0']
return - (np.log(x) ** p - np.log(x0) ** p)
def _grad_wrt_p(self, x):
c, p, x0 = self._params['c'], self._params['p'], self._params['x0']
return - c * (np.log(np.log(x)) * np.power(np.log(x), p) -
np.log(np.log(x0)) * np.power(np.log(x0), p))
def __str__(self):
return '$f(p,x) = \log^p(x)$'
class PeckhamModel(ChannelProfileModel):
def __init__(self, params=None):
super(PeckhamModel, self).__init__(params=params)
self._params.setdefault('gamma', -7.6991826046e-01)
self._params.setdefault('r', 5.2248736972e-03)
self._params.setdefault('s0', 6.7005230518e-01)
self._params.setdefault('x0', 0.)
self._params.setdefault('z0', 668.33)
def eval(self, x):
z0, x0, s0 = self._params['z0'], self._params['x0'], self._params['s0']
r_star, gamma = self._params['r'], self._params['gamma']
p_gamma = (gamma + 1.) / gamma
return z0 + (1. / (p_gamma * r_star)) * (
np.power(s0, gamma + 1.) - np.power(np.power(s0, gamma) +
r_star * (x - x0), p_gamma)
)
def gradients(self, x):
return ([], [], [])
def _grad_wrt_s0(self, x):
raise NotImplemented('grad_wrt_s0')
def _grad_wrt_gamma(self, x):
raise NotImplemented('grad_wrt_gamma')
def _grad_wrt_r(self, x):
raise NotImplemented('grad_wrt_r')
def __str__(self):
return '$f(x) = x$'
if __name__ == '__main__':
main()
| mit |
jorgb/airs | gui/images/icon_visit_site.py | 1 | 3226 | #----------------------------------------------------------------------
# This file was generated by D:\src\cvl_repository\wxpython\framework\images\make_images.py
#
from wx import ImageFromStream, BitmapFromImage, EmptyIcon
import cStringIO, zlib
def getData():
return zlib.decompress(
'x\xda\x01\x98\x03g\xfc\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\
\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x04sBIT\x08\
\x08\x08\x08|\x08d\x88\x00\x00\x03OIDAT8\x8d]\x93\xcbO\\u\x00\x85\xbf;s\xe7\
\t\x03\x0c\x0c\x04\x18\xa6U\n\x95\x87\xd4\x96\x82%BI\x9aHY\xd5\xa4\x89\xe9\
\xa6&\xf5\x91\x10c\xd4\x7f\xc0Ect\xd5\x98t\xa1a\xe5\x02W\x8d!1&&\xd6h\x95\
\xb4\xc5jC\x95J-\xb5<\xd4\xe1i\x99\xcc\x0c3\xcc\xdc\xb9\x8f\xdf\xc3\x8d4\xea\
Y\x9d\x9c|9\xbb\xcf\xe0\x7fys\xf6\xe5\x93\x06\xc6[h\x061h4\xd0&\xda(`p[+5\
\xf9\xe1\xe8\'_\xfd\x9b7\xf6\xcb\xc5\x99\x8bf\xdeLOa\xf0\xe2\x99\xd4X(\x1ei\
\xa4hUa\xb9 \xa4\x8d\xe0\x11\xd7\xb6\xae\n%\xc57f@\x9c\x9b<5]z|0qg"\x10\xb6\
\xbd\x99\x81D\xef\xf0\xe9\xd4)\x8av\x08\x07\x97\x95\xa2AP\x87\xc8Z\n-\\"Xl;7\
\x99\xdb\x9c[p\x84\x7ft\xfa\xdct\xc1\x07\x10\xb2\xbd\xa9\xfe\x86\xc3\xc3#\
\xad\x03\xe4\xa4\x8b\xe73\x10\xc2GE\x04\xb1\x08\x92sB\xec\xd8\x116\n\x01\x0e\
T?\xcb`K\xd7\x11i\x97\xaf\x00\xf8\xdf\xbe\xf9\xca\x88\x81\xbet\xfe\xf0\x19\
\xff\xaf\xb2\xcc6\x15*JP\xb2\xab\xc99\x01\x8a6,o\xeeQ(\x97\t\xc5\xb3\x18\xb5\
\xcb\x0c%\xba\xf8v\xf1v{\xc7\xf8\x81_\xfc\'^;vy<9\xf4\xb4\xe1\x8f\xb0\xed\
\x13\xa0\x9bh\x91\xd5\xfc\xf4\xc8O\xc5\xf6\xb1\xb8YB\x85\xf3\x84\x0e>\xc4\
\xa9Y&\xa3\xf2\x98yI_C\xcaw\'\xfd[\xbd\xa9a0U\x93\xe4\xae\xb5E&\xec\xa3\xc7\
\x7f\x08\x85 \xbd\xe3b\xdb\x15j\x9b5\xc5\xfa\xafYQ\xbb\xe0h\xeaD\x88\xe5\x8c\
\xc3\xf3O\xf4\xe39\xa2\xcf4\xb4n\x88\x9aU\xa4\xad-v\x1c\x8bX\xb4\x9d]7F>\xaf\
\xa8m\xb4i\x8a[d\xe4Q\x92\xf3\x93H\xbbL&\xd5En7Ju$\x86\xe7\x88:\x9f\xd2\x18\
\x86\x06\xd7)\xa2\xad\x16LOr\xf5G\x0f#\x1ca{\xc5d}\xad\x86V\'\x82\xf04m\x89n\
B+\xf3xv\x05\xb4\xc6\x13\xc20\x95\x94\xb9\xa2[L6\xf8\xeb\x88\x85\x9fa#cb{\
\x01Z\xd4\x1b\xd4F%\xce\xa6\xc7^Z\x90\xac\xef\xa0\xbb\xf9\x04{v\x99\x85\x8d\
\xef\xc8\x95\xc7\x91R\x17L\xcf\x93?\xff\x99[KvDS\\\xfb\xeb{\xea\xa2\x1d\x04\
\xfd)\x84r\x19\xeb\xbd\x80\xd4\n\xa9$\n\xcdVa\x9d\xbe\xb6aJn\x85w>\x7f\x1dt\
\xe7\x03\x9f\xb4\xe5G\x9f=\xb8.\x0eV7bZ\x9b\x14\xb2\xb7\x88\xc7\x1dl\xe1"\
\xb5"\x9d]\xe6\x8f\xec\x12\xab\x99Ev+Y\xd2\xb9U\x8e\xa6N\xd2\xd9|\x9c\xbd\
\xc0\xc2\x88\x7f\xfe\xca\xfd\xd5\xae\xb3\x1d\x03N\xa5\xfc\xd4\xf1\x96\x1er\
\x96E8\xf4$\x99\xcc\x17t6\xf5\x13\r\xd6\x10\r\xd6P\x15\xaa\xc3r-\x12\xb1V\
\xe6\xd7oqci\xd6rJ2n\x02\xd8%\xe3\xa5\x99\xa5\xf9Y\xa7\xe2\x1e\x19?6\xca\xc3\
\x1d\x93{\xc2a\xea\x87\x0fp\x95\x8b-\\\x0e%z\x18j?\xcd\\z\x96\x99\xa5\xebN\
\xc4k\xeb^\xbd\xfc{\xe5\xb1L/|\xfc\\\xac\\\xf4\x7f\xea\t1va\xf8U\x13\xd9\x8c\
\xd6Q\x0c\x9fA\xa2\xb6\xc8{_\x9e\xa7\xb7e\x88\x1bK\xb3{\xd5^\xaa\xe7\xde\xa5\
\x95\x8d\xff\xd8\xb8\x9f\xa1\xf7\x07\xcf*\xa1&\xa4T}J\xc8\xf8?sa\xd7\xbc\x9f\
\x90\x8a\xc0\x9a)\x03\xbc\x8b\xd8\xe7\xff\x06\xf2\x93\xac\xef~%\xf2\xbf\x00\
\x00\x00\x00IEND\xaeB`\x82\xb3\xee\xba\x80' )
def getBitmap():
return BitmapFromImage(getImage())
def getImage():
stream = cStringIO.StringIO(getData())
return ImageFromStream(stream)
| gpl-2.0 |
richpolis/siveinpy | env/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py | 550 | 8977 | # urllib3/poolmanager.py
# Copyright 2008-2014 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import logging
try: # Python 3
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme
from .request import RequestMethods
from .util import parse_url
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
pool_classes_by_scheme = {
'http': HTTPConnectionPool,
'https': HTTPSConnectionPool,
}
log = logging.getLogger(__name__)
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
'ssl_version')
class PoolManager(RequestMethods):
"""
Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least
recently used pool.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param \**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example: ::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.request('GET', 'http://google.com/')
>>> r = manager.request('GET', 'http://google.com/mail')
>>> r = manager.request('GET', 'http://yahoo.com/')
>>> len(manager.pools)
2
"""
proxy = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close())
def _new_pool(self, scheme, host, port):
"""
Create a new :class:`ConnectionPool` based on host, port and scheme.
This method is used to actually create the connection pools handed out
by :meth:`connection_from_url` and companion methods. It is intended
to be overridden for customization.
"""
pool_cls = pool_classes_by_scheme[scheme]
kwargs = self.connection_pool_kw
if scheme == 'http':
kwargs = self.connection_pool_kw.copy()
for kw in SSL_KEYWORDS:
kwargs.pop(kw, None)
return pool_cls(host, port, **kwargs)
def clear(self):
"""
Empty our store of pools and direct them all to close.
This will not affect in-flight connections, but they will not be
re-used after completion.
"""
self.pools.clear()
def connection_from_host(self, host, port=None, scheme='http'):
"""
Get a :class:`ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``.
"""
scheme = scheme or 'http'
port = port or port_by_scheme.get(scheme, 80)
pool_key = (scheme, host, port)
with self.pools.lock:
# If the scheme, host, or port doesn't match existing open
# connections, open a new ConnectionPool.
pool = self.pools.get(pool_key)
if pool:
return pool
# Make a fresh ConnectionPool of the desired type
pool = self._new_pool(scheme, host, port)
self.pools[pool_key] = pool
return pool
def connection_from_url(self, url):
"""
Similar to :func:`urllib3.connectionpool.connection_from_url` but
doesn't pass any additional parameters to the
:class:`urllib3.connectionpool.ConnectionPool` constructor.
Additional parameters are taken from the :class:`.PoolManager`
constructor.
"""
u = parse_url(url)
return self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
def urlopen(self, method, url, redirect=True, **kw):
"""
Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
The given ``url`` parameter must be absolute, such that an appropriate
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
kw['assert_same_host'] = False
kw['redirect'] = False
if 'headers' not in kw:
kw['headers'] = self.headers
if self.proxy is not None and u.scheme == "http":
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
redirect_location = redirect and response.get_redirect_location()
if not redirect_location:
return response
# Support relative URLs for redirecting.
redirect_location = urljoin(url, redirect_location)
# RFC 2616, Section 10.3.4
if response.status == 303:
method = 'GET'
log.info("Redirecting %s -> %s" % (url, redirect_location))
kw['retries'] = kw.get('retries', 3) - 1 # Persist retries countdown
kw['redirect'] = redirect
return self.urlopen(method, redirect_location, **kw)
class ProxyManager(PoolManager):
"""
Behaves just like :class:`PoolManager`, but sends all requests through
the defined proxy, using the CONNECT method for HTTPS URLs.
:param proxy_url:
The URL of the proxy to be used.
:param proxy_headers:
A dictionary contaning headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
>>> r2 = proxy.request('GET', 'http://httpbin.org/')
>>> len(proxy.pools)
1
>>> r3 = proxy.request('GET', 'https://httpbin.org/')
>>> r4 = proxy.request('GET', 'https://twitter.com/')
>>> len(proxy.pools)
3
"""
def __init__(self, proxy_url, num_pools=10, headers=None,
proxy_headers=None, **connection_pool_kw):
if isinstance(proxy_url, HTTPConnectionPool):
proxy_url = '%s://%s:%i' % (proxy_url.scheme, proxy_url.host,
proxy_url.port)
proxy = parse_url(proxy_url)
if not proxy.port:
port = port_by_scheme.get(proxy.scheme, 80)
proxy = proxy._replace(port=port)
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
assert self.proxy.scheme in ("http", "https"), \
'Not supported proxy scheme %s' % self.proxy.scheme
connection_pool_kw['_proxy'] = self.proxy
connection_pool_kw['_proxy_headers'] = self.proxy_headers
super(ProxyManager, self).__init__(
num_pools, headers, **connection_pool_kw)
def connection_from_host(self, host, port=None, scheme='http'):
if scheme == "https":
return super(ProxyManager, self).connection_from_host(
host, port, scheme)
return super(ProxyManager, self).connection_from_host(
self.proxy.host, self.proxy.port, self.proxy.scheme)
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
headers_ = {'Accept': '*/*'}
netloc = parse_url(url).netloc
if netloc:
headers_['Host'] = netloc
if headers:
headers_.update(headers)
return headers_
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
if u.scheme == "http":
# For proxied HTTPS requests, httplib sets the necessary headers
# on the CONNECT to the proxy. For HTTP, we'll definitely
# need to set 'Host' at the very least.
kw['headers'] = self._set_proxy_headers(url, kw.get('headers',
self.headers))
return super(ProxyManager, self).urlopen(method, url, redirect, **kw)
def proxy_from_url(url, **kw):
return ProxyManager(proxy_url=url, **kw)
| mit |
ssssam/ansible-modules-core | utilities/logic/fail.py | 86 | 1443 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: fail
short_description: Fail with custom message
description:
- This module fails the progress with a custom message. It can be
useful for bailing out when a certain condition is met using C(when).
version_added: "0.8"
options:
msg:
description:
- The customized message used for failing execution. If omitted,
fail will simple bail out with a generic message.
required: false
default: "'Failed as requested from task'"
author: Dag Wieers
'''
EXAMPLES = '''
# Example playbook using fail and when together
- fail: msg="The system may not be provisioned according to the CMDB status."
when: cmdb_status != "to-be-staged"
'''
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.